fix(users/Profpatsch/whatcd-resolver): reduce json data from db
We’d transfer the full json data for each torrent from the db instead of just the 2 or 3 fields we need. Adds some more helpers for parsing database values. Adds some better logging events & traces. Change-Id: I5db386c4ea247febf5f9fc3815da2e7f11286d41 Reviewed-on: https://cl.tvl.fyi/c/depot/+/12140 Tested-by: BuildkiteCI Reviewed-by: Profpatsch <mail@profpatsch.de>
This commit is contained in:
parent
13d79e04d8
commit
f9703a9af5
4 changed files with 104 additions and 20 deletions
|
@ -8,6 +8,8 @@ import Data.Typeable (Typeable)
|
||||||
import Database.PostgreSQL.Simple (Binary (fromBinary))
|
import Database.PostgreSQL.Simple (Binary (fromBinary))
|
||||||
import Database.PostgreSQL.Simple.FromField qualified as PG
|
import Database.PostgreSQL.Simple.FromField qualified as PG
|
||||||
import Database.PostgreSQL.Simple.FromRow qualified as PG
|
import Database.PostgreSQL.Simple.FromRow qualified as PG
|
||||||
|
import FieldParser (FieldParser)
|
||||||
|
import FieldParser qualified as Field
|
||||||
import Json qualified
|
import Json qualified
|
||||||
import Label
|
import Label
|
||||||
import PossehlAnalyticsPrelude
|
import PossehlAnalyticsPrelude
|
||||||
|
@ -24,12 +26,65 @@ bytea = fromField @(Binary ByteString) <&> (.fromBinary)
|
||||||
byteaMay :: Decoder (Maybe ByteString)
|
byteaMay :: Decoder (Maybe ByteString)
|
||||||
byteaMay = fromField @(Maybe (Binary ByteString)) <&> fmap (.fromBinary)
|
byteaMay = fromField @(Maybe (Binary ByteString)) <&> fmap (.fromBinary)
|
||||||
|
|
||||||
|
-- | Parse a `text` field.
|
||||||
|
text :: Decoder Text
|
||||||
|
text = fromField @Text
|
||||||
|
|
||||||
|
-- | Parse a nullable `text` field.
|
||||||
|
textMay :: Decoder (Maybe Text)
|
||||||
|
textMay = fromField @(Maybe Text)
|
||||||
|
|
||||||
|
-- | Parse a `text` field, and then use a 'FieldParser' to convert the result further.
|
||||||
|
textParse :: (Typeable to) => FieldParser Text to -> Decoder to
|
||||||
|
textParse = parse @Text
|
||||||
|
|
||||||
|
-- | Parse a nullable `text` field, and then use a 'FieldParser' to convert the result further.
|
||||||
|
textParseMay :: (Typeable to) => FieldParser Text to -> Decoder (Maybe to)
|
||||||
|
textParseMay = parseMay @Text
|
||||||
|
|
||||||
|
-- | Parse a type implementing 'FromField', and then use a 'FieldParser' to convert the result further.
|
||||||
|
parse ::
|
||||||
|
forall from to.
|
||||||
|
( PG.FromField from,
|
||||||
|
Typeable to
|
||||||
|
) =>
|
||||||
|
FieldParser from to ->
|
||||||
|
Decoder to
|
||||||
|
parse parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
||||||
|
val <- PG.fromField @from field bytes
|
||||||
|
case Field.runFieldParser parser val of
|
||||||
|
Left err ->
|
||||||
|
PG.returnError
|
||||||
|
PG.ConversionFailed
|
||||||
|
field
|
||||||
|
(err & prettyError & textToString)
|
||||||
|
Right a -> pure a
|
||||||
|
|
||||||
|
-- | Parse a nullable type implementing 'FromField', and then use a 'FieldParser' to convert the result further.
|
||||||
|
parseMay ::
|
||||||
|
forall from to.
|
||||||
|
( PG.FromField from,
|
||||||
|
Typeable to
|
||||||
|
) =>
|
||||||
|
FieldParser from to ->
|
||||||
|
Decoder (Maybe to)
|
||||||
|
parseMay parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
||||||
|
val <- PG.fromField @(Maybe from) field bytes
|
||||||
|
case Field.runFieldParser parser <$> val of
|
||||||
|
Nothing -> pure Nothing
|
||||||
|
Just (Left err) ->
|
||||||
|
PG.returnError
|
||||||
|
PG.ConversionFailed
|
||||||
|
field
|
||||||
|
(err & prettyError & textToString)
|
||||||
|
Just (Right a) -> pure (Just a)
|
||||||
|
|
||||||
-- | Turn any type that implements 'PG.fromField' into a 'Decoder'. Use type applications to prevent accidental conversions:
|
-- | Turn any type that implements 'PG.fromField' into a 'Decoder'. Use type applications to prevent accidental conversions:
|
||||||
--
|
--
|
||||||
-- @
|
-- @
|
||||||
-- fromField @Text :: Decoder Text
|
-- fromField @Text :: Decoder Text
|
||||||
-- @
|
-- @
|
||||||
fromField :: PG.FromField a => Decoder a
|
fromField :: (PG.FromField a) => Decoder a
|
||||||
fromField = Decoder $ PG.fieldWith PG.fromField
|
fromField = Decoder $ PG.fieldWith PG.fromField
|
||||||
|
|
||||||
-- | Turn any type that implements 'PG.fromField' into a 'Decoder' and wrap the result into the given 'Label'. Use type applications to prevent accidental conversions:
|
-- | Turn any type that implements 'PG.fromField' into a 'Decoder' and wrap the result into the given 'Label'. Use type applications to prevent accidental conversions:
|
||||||
|
@ -37,7 +92,7 @@ fromField = Decoder $ PG.fieldWith PG.fromField
|
||||||
-- @
|
-- @
|
||||||
-- fromField @"myField" @Text :: Decoder (Label "myField" Text)
|
-- fromField @"myField" @Text :: Decoder (Label "myField" Text)
|
||||||
-- @
|
-- @
|
||||||
fromFieldLabel :: forall lbl a. PG.FromField a => Decoder (Label lbl a)
|
fromFieldLabel :: forall lbl a. (PG.FromField a) => Decoder (Label lbl a)
|
||||||
fromFieldLabel = label @lbl <$> fromField
|
fromFieldLabel = label @lbl <$> fromField
|
||||||
|
|
||||||
-- | Parse fields out of a json value returned from the database.
|
-- | Parse fields out of a json value returned from the database.
|
||||||
|
@ -55,7 +110,7 @@ fromFieldLabel = label @lbl <$> fromField
|
||||||
--
|
--
|
||||||
-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
|
-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
|
||||||
-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
|
-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
|
||||||
json :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder a
|
json :: (Typeable a) => Json.ParseT ErrorTree Identity a -> Decoder a
|
||||||
json parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
json parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
||||||
val <- PG.fromField @Json.Value field bytes
|
val <- PG.fromField @Json.Value field bytes
|
||||||
case Json.parseValue parser val of
|
case Json.parseValue parser val of
|
||||||
|
@ -81,7 +136,7 @@ json parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
||||||
--
|
--
|
||||||
-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
|
-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
|
||||||
-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
|
-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
|
||||||
jsonMay :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder (Maybe a)
|
jsonMay :: (Typeable a) => Json.ParseT ErrorTree Identity a -> Decoder (Maybe a)
|
||||||
jsonMay parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
jsonMay parser = Decoder $ PG.fieldWith $ \field bytes -> do
|
||||||
val <- PG.fromField @(Maybe Json.Value) field bytes
|
val <- PG.fromField @(Maybe Json.Value) field bytes
|
||||||
case Json.parseValue parser <$> val of
|
case Json.parseValue parser <$> val of
|
||||||
|
|
|
@ -74,6 +74,16 @@ addAttribute span key a = Otel.addAttribute span ("_." <> key) a
|
||||||
addAttributes :: (MonadIO m) => Otel.Span -> HashMap Text Otel.Attribute -> m ()
|
addAttributes :: (MonadIO m) => Otel.Span -> HashMap Text Otel.Attribute -> m ()
|
||||||
addAttributes span attrs = Otel.addAttributes span $ attrs & HashMap.mapKeys ("_." <>)
|
addAttributes span attrs = Otel.addAttributes span $ attrs & HashMap.mapKeys ("_." <>)
|
||||||
|
|
||||||
|
addEventSimple :: (MonadIO m) => Otel.Span -> Text -> m ()
|
||||||
|
addEventSimple span name =
|
||||||
|
Otel.addEvent
|
||||||
|
span
|
||||||
|
Otel.NewEvent
|
||||||
|
{ Otel.newEventName = name,
|
||||||
|
Otel.newEventTimestamp = Nothing,
|
||||||
|
Otel.newEventAttributes = mempty
|
||||||
|
}
|
||||||
|
|
||||||
-- | Create an otel attribute from a json encoder
|
-- | Create an otel attribute from a json encoder
|
||||||
jsonAttribute :: Enc -> Otel.Attribute
|
jsonAttribute :: Enc -> Otel.Attribute
|
||||||
jsonAttribute e = e & Enc.encToTextPretty & Otel.toAttribute
|
jsonAttribute e = e & Enc.encToTextPretty & Otel.toAttribute
|
||||||
|
|
|
@ -364,14 +364,13 @@ data TorrentData transmissionInfo = TorrentData
|
||||||
torrentId :: Int,
|
torrentId :: Int,
|
||||||
seedingWeight :: Int,
|
seedingWeight :: Int,
|
||||||
artists :: [T2 "artistId" Int "artistName" Text],
|
artists :: [T2 "artistId" Int "artistName" Text],
|
||||||
torrentJson :: Json.Value,
|
|
||||||
torrentGroupJson :: TorrentGroupJson,
|
torrentGroupJson :: TorrentGroupJson,
|
||||||
torrentStatus :: TorrentStatus transmissionInfo
|
torrentStatus :: TorrentStatus transmissionInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
data TorrentGroupJson = TorrentGroupJson
|
data TorrentGroupJson = TorrentGroupJson
|
||||||
{ groupName :: Text,
|
{ groupName :: Text,
|
||||||
groupYear :: Int
|
groupYear :: Natural
|
||||||
}
|
}
|
||||||
|
|
||||||
data TorrentStatus transmissionInfo
|
data TorrentStatus transmissionInfo
|
||||||
|
@ -420,8 +419,9 @@ getBestTorrents opts = do
|
||||||
tg.group_id,
|
tg.group_id,
|
||||||
t.torrent_id,
|
t.torrent_id,
|
||||||
t.seeding_weight,
|
t.seeding_weight,
|
||||||
t.full_json_result AS torrent_json,
|
t.full_json_result->'artists' AS artists,
|
||||||
tg.full_json_result AS torrent_group_json,
|
tg.full_json_result->>'groupName' AS group_name,
|
||||||
|
tg.full_json_result->>'groupYear' AS group_year,
|
||||||
t.torrent_file IS NOT NULL AS has_torrent_file,
|
t.torrent_file IS NOT NULL AS has_torrent_file,
|
||||||
t.transmission_torrent_hash
|
t.transmission_torrent_hash
|
||||||
FROM filtered_torrents f
|
FROM filtered_torrents f
|
||||||
|
@ -442,19 +442,15 @@ getBestTorrents opts = do
|
||||||
groupId <- Dec.fromField @Int
|
groupId <- Dec.fromField @Int
|
||||||
torrentId <- Dec.fromField @Int
|
torrentId <- Dec.fromField @Int
|
||||||
seedingWeight <- Dec.fromField @Int
|
seedingWeight <- Dec.fromField @Int
|
||||||
(torrentJson, artists) <- Dec.json $ do
|
artists <- Dec.json $
|
||||||
val <- Json.asValue
|
Json.eachInArray $ do
|
||||||
artists <- Json.keyOrDefault "artists" [] $ Json.eachInArray $ do
|
|
||||||
id_ <- Json.keyLabel @"artistId" "id" (Json.asIntegral @_ @Int)
|
id_ <- Json.keyLabel @"artistId" "id" (Json.asIntegral @_ @Int)
|
||||||
name <- Json.keyLabel @"artistName" "name" Json.asText
|
name <- Json.keyLabel @"artistName" "name" Json.asText
|
||||||
pure $ T2 id_ name
|
pure $ T2 id_ name
|
||||||
pure (val, artists)
|
torrentGroupJson <- do
|
||||||
torrentGroupJson <-
|
groupName <- Dec.text
|
||||||
( Dec.json $ do
|
groupYear <- Dec.textParse Field.decimalNatural
|
||||||
groupName <- Json.key "groupName" Json.asText
|
pure $ TorrentGroupJson {..}
|
||||||
groupYear <- Json.key "groupYear" (Json.asIntegral @_ @Int)
|
|
||||||
pure $ TorrentGroupJson {..}
|
|
||||||
)
|
|
||||||
hasTorrentFile <- Dec.fromField @Bool
|
hasTorrentFile <- Dec.fromField @Bool
|
||||||
transmissionTorrentHash <-
|
transmissionTorrentHash <-
|
||||||
Dec.fromField @(Maybe Text)
|
Dec.fromField @(Maybe Text)
|
||||||
|
|
|
@ -388,6 +388,7 @@ runHandlers defaultHandler handlers req respond = withRunInIO $ \runInIO -> do
|
||||||
)
|
)
|
||||||
( \span -> do
|
( \span -> do
|
||||||
res <- act span <&> (\h -> T2 (label @"html" h) (label @"extraHeaders" []))
|
res <- act span <&> (\h -> T2 (label @"html" h) (label @"extraHeaders" []))
|
||||||
|
addEventSimple span "Got Html result, rendering…"
|
||||||
liftIO $ respond (resp res)
|
liftIO $ respond (resp res)
|
||||||
)
|
)
|
||||||
let htmlResp res = Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
|
let htmlResp res = Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
|
||||||
|
@ -444,6 +445,24 @@ singleQueryArgument field inner =
|
||||||
)
|
)
|
||||||
>>> Parse.fieldParser inner
|
>>> Parse.fieldParser inner
|
||||||
|
|
||||||
|
singleQueryArgumentMay :: Text -> FieldParser ByteString to -> Parse Http.Query (Maybe to)
|
||||||
|
singleQueryArgumentMay field inner =
|
||||||
|
Parse.mkParsePushContext
|
||||||
|
field
|
||||||
|
( \(ctx, qry) -> case qry
|
||||||
|
& mapMaybe
|
||||||
|
( \(k, v) ->
|
||||||
|
if k == (field & textToBytesUtf8)
|
||||||
|
then Just v
|
||||||
|
else Nothing
|
||||||
|
) of
|
||||||
|
[] -> Right Nothing
|
||||||
|
[Nothing] -> Left [fmt|Expected one query argument with a value, but "{field}" was a query flag|]
|
||||||
|
[Just one] -> Right (Just one)
|
||||||
|
more -> Left [fmt|More than one value for query argument "{field}": {show more}, at {ctx & Parse.showContext}|]
|
||||||
|
)
|
||||||
|
>>> Parse.maybe (Parse.fieldParser inner)
|
||||||
|
|
||||||
-- | Make sure we can parse the given Text into an URI.
|
-- | Make sure we can parse the given Text into an URI.
|
||||||
textToURI :: Parse Text URI
|
textToURI :: Parse Text URI
|
||||||
textToURI =
|
textToURI =
|
||||||
|
@ -518,6 +537,9 @@ getBestTorrentsTable dat = do
|
||||||
fresh <- getBestTorrentsData dat
|
fresh <- getBestTorrentsData dat
|
||||||
pure $ mkBestTorrentsTable fresh
|
pure $ mkBestTorrentsTable fresh
|
||||||
|
|
||||||
|
doIfJust :: (Applicative f) => (a -> f ()) -> Maybe a -> f ()
|
||||||
|
doIfJust = traverse_
|
||||||
|
|
||||||
getBestTorrentsData ::
|
getBestTorrentsData ::
|
||||||
( MonadTransmission m,
|
( MonadTransmission m,
|
||||||
MonadThrow m,
|
MonadThrow m,
|
||||||
|
@ -527,7 +549,8 @@ getBestTorrentsData ::
|
||||||
) =>
|
) =>
|
||||||
Maybe (Label "artistRedactedId" Natural) ->
|
Maybe (Label "artistRedactedId" Natural) ->
|
||||||
Transaction m [TorrentData (Label "percentDone" Percentage)]
|
Transaction m [TorrentData (Label "percentDone" Percentage)]
|
||||||
getBestTorrentsData artistFilter = do
|
getBestTorrentsData artistFilter = inSpan' "get torrents table data" $ \span -> do
|
||||||
|
artistFilter & doIfJust (\a -> addAttribute span "artist-filter.redacted-id" (a.artistRedactedId & showToText & Otel.toAttribute))
|
||||||
bestStale :: [TorrentData ()] <- getBestTorrents GetBestTorrentsFilter {onlyArtist = artistFilter, onlyDownloaded = False}
|
bestStale :: [TorrentData ()] <- getBestTorrents GetBestTorrentsFilter {onlyArtist = artistFilter, onlyDownloaded = False}
|
||||||
actual <-
|
actual <-
|
||||||
getAndUpdateTransmissionTorrentsStatus
|
getAndUpdateTransmissionTorrentsStatus
|
||||||
|
@ -596,7 +619,7 @@ mkBestTorrentsTable fresh = do
|
||||||
{Html.toHtml @Text b.torrentGroupJson.groupName}
|
{Html.toHtml @Text b.torrentGroupJson.groupName}
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
<td>{Html.toHtml @Int b.torrentGroupJson.groupYear}</td>
|
<td>{Html.toHtml @Natural b.torrentGroupJson.groupYear}</td>
|
||||||
<td>{Html.toHtml @Int b.seedingWeight}</td>
|
<td>{Html.toHtml @Int b.seedingWeight}</td>
|
||||||
<td><details hx-trigger="toggle once" hx-post="snips/redacted/torrentDataJson" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}></details></td>
|
<td><details hx-trigger="toggle once" hx-post="snips/redacted/torrentDataJson" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}></details></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
Loading…
Reference in a new issue