Skip to content

Commit aef0e12

Browse files
authored
Merge pull request #35 from unisoncomputing/cp/json-caching
Generic JSON Caching
2 parents 679a03d + ea9ffb9 commit aef0e12

File tree

6 files changed

+199
-50
lines changed

6 files changed

+199
-50
lines changed

share-api.cabal

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ library
9292
Share.UserProfile
9393
Share.Utils.API
9494
Share.Utils.Caching
95+
Share.Utils.Caching.JSON
9596
Share.Utils.Logging
9697
Share.Utils.Logging.Types
9798
Share.Utils.Postgres

sql/2025-02-10_json-cache.sql

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
-- Add a table for persistent caching of anything which can be serialized to JSON
2+
-- Things in here should be considered immutable and tied to a specific key.
3+
CREATE TABLE json_cache (
4+
-- The user this cache entry belongs to.
5+
-- Can be NULL for non-sandboxed entries.
6+
codebase_user_id UUID NULL REFERENCES users(id) ON DELETE CASCADE,
7+
-- Which category of data this cache entry belongs to.
8+
topic TEXT NOT NULL CHECK (topic <> ''),
9+
-- The key for this cache entry.
10+
key TEXT NOT NULL CHECK (key <> ''),
11+
value JSONB NOT NULL,
12+
-- When this cache entry was created.
13+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
14+
);
15+
16+
CREATE UNIQUE INDEX json_cache_key ON json_cache (topic, codebase_user_id, key) NULLS NOT DISTINCT;

src/Share/Utils/Caching/JSON.hs

Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
-- Multipurpose caching for things which can serialize to JSON.
2+
--
3+
-- Note that this will often involve deserializing, then reserializing the value
4+
-- if it's going to be dumped to a response, which may be slightly less efficient than other
5+
-- methods, but grants a lot of flexibility and simplicity as a general approach.
6+
module Share.Utils.Caching.JSON (CacheKey (..), usingJSONCache) where
7+
8+
import Data.Aeson (FromJSON, ToJSON (..))
9+
import Data.Aeson qualified as Aeson
10+
import Data.ByteString.Lazy qualified as BL
11+
import Data.Functor
12+
import Data.Text qualified as T
13+
import Data.Text.Encoding qualified as Text
14+
import Servant.Server qualified as Servant
15+
import Share.IDs
16+
import Share.Postgres qualified as PG
17+
import Share.Postgres.IDs
18+
import Share.Prelude
19+
import Share.Utils.Logging qualified as Logging
20+
import Share.Web.Errors
21+
22+
data CacheKey = CacheKey
23+
{ cacheTopic :: Text,
24+
-- Ordered key-value pairs to make up a cache key.
25+
key :: [(Text, Text)],
26+
-- The causal id which this cache entry is derived from.
27+
-- Leave as 'Nothing' if the cache entry is not derived from a causal id.
28+
rootCausalId :: Maybe CausalId,
29+
-- 'Nothing' is its own global sandbox, and should only be used for
30+
-- things which are not user-specific.
31+
sandbox :: Maybe UserId
32+
}
33+
deriving (Show)
34+
35+
encodeKey :: CacheKey -> Text
36+
encodeKey (CacheKey {key, rootCausalId}) =
37+
let keyWithCausal = maybe key (\(CausalId rci) -> ("rootCausalId", tShow @Int32 rci) : key) rootCausalId
38+
in keyWithCausal
39+
<&> (\(k, v) -> k <> "=" <> v)
40+
& T.intercalate ","
41+
42+
usingJSONCache ::
43+
(ToJSON v, FromJSON v, PG.QueryM m) =>
44+
CacheKey ->
45+
-- How to build the value if it's not in the cache.
46+
m v ->
47+
m v
48+
usingJSONCache ck action = do
49+
getJSONCacheEntry ck >>= \case
50+
Just v -> pure v
51+
Nothing -> do
52+
v <- action
53+
putJSONCacheEntry ck v
54+
pure v
55+
56+
data JSONCacheError
57+
= JSONCacheDecodingError CacheKey Text
58+
deriving (Show)
59+
60+
instance ToServerError JSONCacheError where
61+
toServerError (JSONCacheDecodingError ck err) =
62+
(ErrorID "json-cache:decoding-error", Servant.err500 {Servant.errBody = BL.fromStrict $ Text.encodeUtf8 $ "Error decoding JSON cache entry: " <> tShow ck <> " - " <> err})
63+
64+
instance Logging.Loggable JSONCacheError where
65+
toLog (JSONCacheDecodingError ck err) =
66+
Logging.textLog ("Error decoding JSON cache entry: " <> encodeKey ck <> ", " <> tShow ck <> ", Error: " <> err)
67+
& Logging.withSeverity Logging.Error
68+
& Logging.withTag ("cacheTopic", cacheTopic ck)
69+
& Logging.withTag ("sandbox", tShow $ sandbox ck)
70+
& Logging.withTag ("rootCausalId", tShow $ rootCausalId ck)
71+
72+
getJSONCacheEntry :: (FromJSON v, PG.QueryM m) => CacheKey -> m (Maybe v)
73+
getJSONCacheEntry ck@(CacheKey {cacheTopic, sandbox}) = do
74+
let cacheKey = encodeKey ck
75+
r <-
76+
PG.query1Col @Text
77+
[PG.sql|
78+
SELECT jc.value
79+
FROM json_cache jc
80+
WHERE topic = #{cacheTopic}
81+
AND key = #{cacheKey}
82+
AND codebase_user_id = #{sandbox}
83+
LIMIT 1
84+
|]
85+
case r of
86+
Nothing -> pure Nothing
87+
Just valText ->
88+
case Aeson.eitherDecode (BL.fromStrict . Text.encodeUtf8 $ valText) of
89+
Left _err -> do
90+
-- reportError $ JSONCacheDecodingError ck (T.pack err)
91+
pure Nothing
92+
Right v -> pure $ Just v
93+
94+
putJSONCacheEntry :: (ToJSON v, PG.QueryM m) => CacheKey -> v -> m ()
95+
putJSONCacheEntry ck@(CacheKey {cacheTopic, sandbox}) v = do
96+
let keyText = encodeKey ck
97+
let valText = Text.decodeUtf8 . BL.toStrict $ Aeson.encode v
98+
PG.execute_
99+
[PG.sql|
100+
INSERT INTO json_cache (topic, key, codebase_user_id, value)
101+
VALUES (#{cacheTopic}, #{keyText}, #{sandbox}, #{valText}::jsonb)
102+
ON CONFLICT (topic, key, codebase_user_id)
103+
DO UPDATE SET value = EXCLUDED.value
104+
|]

src/Unison/Server/Share/Definitions.hs

Lines changed: 58 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,10 @@ import Share.Postgres.Causal.Queries qualified as CausalQ
2525
import Share.Postgres.IDs (CausalId)
2626
import Share.Postgres.NameLookups.Ops qualified as NameLookupOps
2727
import Share.Prelude
28+
import Share.Utils.Caching.JSON qualified as Caching
2829
import Unison.Codebase.Editor.DisplayObject (DisplayObject)
2930
import Unison.Codebase.Path (Path)
31+
import Unison.Codebase.Path qualified as Path
3032
import Unison.ConstructorReference qualified as ConstructorReference
3133
import Unison.DataDeclaration qualified as DD
3234
import Unison.DataDeclaration.Dependencies qualified as DD
@@ -79,51 +81,63 @@ definitionForHQName ::
7981
HQ.HashQualified Name ->
8082
Codebase.CodebaseM e DefinitionDisplayResults
8183
definitionForHQName perspective rootCausalId renderWidth suffixifyBindings rt perspectiveQuery = do
82-
rootBranchNamespaceHashId <- CausalQ.expectNamespaceIdsByCausalIdsOf id rootCausalId
83-
(namesPerspective, query) <- NameLookupOps.relocateToNameRoot perspective perspectiveQuery rootBranchNamespaceHashId
84-
Debug.debugM Debug.Server "definitionForHQName: (namesPerspective, query)" (namesPerspective, query)
85-
-- Bias towards both relative and absolute path to queries,
86-
-- This allows us to still bias towards definitions outside our namesRoot but within the
87-
-- same tree;
88-
-- e.g. if the query is `map` and we're in `base.trunk.List`,
89-
-- we bias towards `map` and `.base.trunk.List.map` which ensures we still prefer names in
90-
-- `trunk` over those in other releases.
91-
-- ppe which returns names fully qualified to the current namesRoot, not to the codebase root.
92-
let biases = maybeToList $ HQ.toName query
93-
let ppedBuilder deps = (PPED.biasTo biases) <$> lift (PPEPostgres.ppedForReferences namesPerspective deps)
94-
let nameSearch = PGNameSearch.nameSearchForPerspective namesPerspective
95-
dr@(Backend.DefinitionResults terms types misses) <- mkDefinitionsForQuery nameSearch [query]
96-
Debug.debugM Debug.Server "definitionForHQName: found definitions" dr
97-
let width = mayDefaultWidth renderWidth
98-
let docResults :: Name -> Codebase.CodebaseM e [(HashQualifiedName, UnisonHash, Doc.Doc)]
99-
docResults name = do
100-
Debug.debugM Debug.Server "definitionForHQName: looking up docs for name" name
101-
docRefs <- Docs.docsForDefinitionName nameSearch name
102-
Debug.debugM Debug.Server "definitionForHQName: Found these docs" docRefs
103-
renderDocRefs ppedBuilder width rt docRefs
84+
codebaseOwnerUserId <- asks Codebase.codebaseOwner
85+
let cacheKey =
86+
Caching.CacheKey
87+
{ cacheTopic = "definitionForHQName",
88+
key = [("perspective", Path.toText perspective), ("suffixify", tShow $ suffixified (suffixifyBindings)), ("hqName", HQ.toText perspectiveQuery), ("width", tShow renderWidth)],
89+
rootCausalId = Just rootCausalId,
90+
sandbox = Just codebaseOwnerUserId
91+
}
92+
Caching.usingJSONCache cacheKey go
93+
where
94+
go :: Codebase.CodebaseM e DefinitionDisplayResults
95+
go = do
96+
rootBranchNamespaceHashId <- CausalQ.expectNamespaceIdsByCausalIdsOf id rootCausalId
97+
(namesPerspective, query) <- NameLookupOps.relocateToNameRoot perspective perspectiveQuery rootBranchNamespaceHashId
98+
Debug.debugM Debug.Server "definitionForHQName: (namesPerspective, query)" (namesPerspective, query)
99+
-- Bias towards both relative and absolute path to queries,
100+
-- This allows us to still bias towards definitions outside our namesRoot but within the
101+
-- same tree;
102+
-- e.g. if the query is `map` and we're in `base.trunk.List`,
103+
-- we bias towards `map` and `.base.trunk.List.map` which ensures we still prefer names in
104+
-- `trunk` over those in other releases.
105+
-- ppe which returns names fully qualified to the current namesRoot, not to the codebase root.
106+
let biases = maybeToList $ HQ.toName query
107+
let ppedBuilder deps = (PPED.biasTo biases) <$> lift (PPEPostgres.ppedForReferences namesPerspective deps)
108+
let nameSearch = PGNameSearch.nameSearchForPerspective namesPerspective
109+
dr@(Backend.DefinitionResults terms types misses) <- mkDefinitionsForQuery nameSearch [query]
110+
Debug.debugM Debug.Server "definitionForHQName: found definitions" dr
111+
let width = mayDefaultWidth renderWidth
112+
let docResults :: Name -> Codebase.CodebaseM e [(HashQualifiedName, UnisonHash, Doc.Doc)]
113+
docResults name = do
114+
Debug.debugM Debug.Server "definitionForHQName: looking up docs for name" name
115+
docRefs <- Docs.docsForDefinitionName nameSearch name
116+
Debug.debugM Debug.Server "definitionForHQName: Found these docs" docRefs
117+
renderDocRefs ppedBuilder width rt docRefs
104118

105-
let drDeps = Backend.definitionResultsDependencies dr
106-
termAndTypePPED <- ppedBuilder drDeps
107-
let fqnTermAndTypePPE = PPED.unsuffixifiedPPE termAndTypePPED
108-
typeDefinitions <-
109-
ifor (Backend.typesToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) types) \ref tp -> do
110-
let hqTypeName = PPE.typeNameOrHashOnly fqnTermAndTypePPE ref
111-
docs <- maybe (pure []) docResults (HQ.toName hqTypeName)
112-
lift $ Backend.mkTypeDefinition termAndTypePPED width ref docs tp
113-
termDefinitions <-
114-
ifor (Backend.termsToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) terms) \reference trm -> do
115-
let referent = Referent.Ref reference
116-
let hqTermName = PPE.termNameOrHashOnly fqnTermAndTypePPE referent
117-
docs <- maybe (pure []) docResults (HQ.toName hqTermName)
118-
Backend.mkTermDefinition termAndTypePPED width reference docs trm
119-
let renderedDisplayTerms = Map.mapKeys Reference.toText termDefinitions
120-
renderedDisplayTypes = Map.mapKeys Reference.toText typeDefinitions
121-
renderedMisses = fmap HQ.toText misses
122-
pure $
123-
DefinitionDisplayResults
124-
renderedDisplayTerms
125-
renderedDisplayTypes
126-
renderedMisses
119+
let drDeps = Backend.definitionResultsDependencies dr
120+
termAndTypePPED <- ppedBuilder drDeps
121+
let fqnTermAndTypePPE = PPED.unsuffixifiedPPE termAndTypePPED
122+
typeDefinitions <-
123+
ifor (Backend.typesToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) types) \ref tp -> do
124+
let hqTypeName = PPE.typeNameOrHashOnly fqnTermAndTypePPE ref
125+
docs <- maybe (pure []) docResults (HQ.toName hqTypeName)
126+
lift $ Backend.mkTypeDefinition termAndTypePPED width ref docs tp
127+
termDefinitions <-
128+
ifor (Backend.termsToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) terms) \reference trm -> do
129+
let referent = Referent.Ref reference
130+
let hqTermName = PPE.termNameOrHashOnly fqnTermAndTypePPE referent
131+
docs <- maybe (pure []) docResults (HQ.toName hqTermName)
132+
Backend.mkTermDefinition termAndTypePPED width reference docs trm
133+
let renderedDisplayTerms = Map.mapKeys Reference.toText termDefinitions
134+
renderedDisplayTypes = Map.mapKeys Reference.toText typeDefinitions
135+
renderedMisses = fmap HQ.toText misses
136+
pure $
137+
DefinitionDisplayResults
138+
renderedDisplayTerms
139+
renderedDisplayTypes
140+
renderedMisses
127141

128142
renderDocRefs ::
129143
PPEDBuilder (Codebase.CodebaseM e) ->

src/Unison/Server/Share/RenderDoc.hs

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,23 @@ module Unison.Server.Share.RenderDoc where
1212

1313
import Data.Set qualified as Set
1414
import Share.Backend qualified as Backend
15+
import Share.Codebase qualified as Codebase
1516
import Share.Codebase.Types (CodebaseM, CodebaseRuntime)
1617
import Share.Postgres.Causal.Queries qualified as CausalQ
1718
import Share.Postgres.IDs (CausalId)
1819
import Share.Postgres.NameLookups.Ops qualified as NLOps
1920
import Share.Postgres.NameLookups.Types (PathSegments (..))
2021
import Share.Prelude
22+
import Share.Utils.Caching.JSON qualified as Caching
2123
import U.Codebase.Causal qualified as V2Causal
2224
import Unison.Codebase.Path qualified as Path
2325
import Unison.LabeledDependency qualified as LD
2426
import Unison.NameSegment.Internal (NameSegment (..))
2527
import Unison.PrettyPrintEnvDecl.Postgres qualified as PostgresPPE
28+
import Unison.Reference qualified as Reference
2629
import Unison.Server.Doc (Doc)
2730
import Unison.Server.Doc qualified as Doc
31+
import Unison.ShortHash qualified as SH
2832
import Unison.Util.Pretty (Width)
2933

3034
-- | Find, eval, and render the first doc we find with any of the provided names within the given namespace
@@ -42,9 +46,19 @@ findAndRenderDoc docNames runtime namespacePath rootCausalId _mayWidth = runMayb
4246
rootNamespaceHashId <- lift $ CausalQ.expectNamespaceIdsByCausalIdsOf id rootCausalId
4347
namespaceCausal <- MaybeT $ CausalQ.loadCausalNamespaceAtPath rootCausalId namespacePath
4448
shallowBranchAtNamespace <- lift $ V2Causal.value namespaceCausal
45-
namesPerspective <- NLOps.namesPerspectiveForRootAndPath rootNamespaceHashId (coerce $ Path.toList namespacePath)
4649
docRef <- MaybeT . pure $ Backend.findDocInBranch docNames shallowBranchAtNamespace
47-
eDoc <- lift $ Backend.evalDocRef runtime docRef
48-
let docDeps = Doc.dependencies eDoc <> Set.singleton (LD.TermReference docRef)
49-
docPPE <- PostgresPPE.ppedForReferences namesPerspective docDeps
50-
pure $ Doc.renderDoc docPPE eDoc
50+
codebaseOwnerUserId <- asks Codebase.codebaseOwner
51+
let cacheKey =
52+
Caching.CacheKey
53+
{ cacheTopic = "findAndRenderDoc",
54+
key = [("namespacePath", tShow namespacePath), ("docRef", SH.toText $ Reference.toShortHash docRef)],
55+
rootCausalId = Just rootCausalId,
56+
sandbox = Just codebaseOwnerUserId
57+
}
58+
59+
lift $ Caching.usingJSONCache cacheKey do
60+
namesPerspective <- NLOps.namesPerspectiveForRootAndPath rootNamespaceHashId (coerce $ Path.toList namespacePath)
61+
eDoc <- Backend.evalDocRef runtime docRef
62+
let docDeps = Doc.dependencies eDoc <> Set.singleton (LD.TermReference docRef)
63+
docPPE <- PostgresPPE.ppedForReferences namesPerspective docDeps
64+
pure $ Doc.renderDoc docPPE eDoc

0 commit comments

Comments
 (0)