diff --git a/share-api.cabal b/share-api.cabal index 778d1a37..6356c4f4 100644 --- a/share-api.cabal +++ b/share-api.cabal @@ -92,6 +92,7 @@ library Share.UserProfile Share.Utils.API Share.Utils.Caching + Share.Utils.Caching.JSON Share.Utils.Logging Share.Utils.Logging.Types Share.Utils.Postgres diff --git a/sql/2025-02-10_json-cache.sql b/sql/2025-02-10_json-cache.sql new file mode 100644 index 00000000..28d2cdda --- /dev/null +++ b/sql/2025-02-10_json-cache.sql @@ -0,0 +1,16 @@ +-- Add a table for persistent caching of anything which can be serialized to JSON +-- Things in here should be considered immutable and tied to a specific key. +CREATE TABLE json_cache ( + -- The user this cache entry belongs to. + -- Can be NULL for non-sandboxed entries. + codebase_user_id UUID NULL REFERENCES users(id) ON DELETE CASCADE, + -- Which category of data this cache entry belongs to. + topic TEXT NOT NULL CHECK (topic <> ''), + -- The key for this cache entry. + key TEXT NOT NULL CHECK (key <> ''), + value JSONB NOT NULL, + -- When this cache entry was created. + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE UNIQUE INDEX json_cache_key ON json_cache (topic, codebase_user_id, key) NULLS NOT DISTINCT; diff --git a/src/Share/Utils/Caching/JSON.hs b/src/Share/Utils/Caching/JSON.hs new file mode 100644 index 00000000..f71d0d63 --- /dev/null +++ b/src/Share/Utils/Caching/JSON.hs @@ -0,0 +1,104 @@ +-- Multipurpose caching for things which can serialize to JSON. +-- +-- Note that this will often involve deserializing, then reserializing the value +-- if it's going to be dumped to a response, which may be slightly less efficient than other +-- methods, but grants a lot of flexibility and simplicity as a general approach. +module Share.Utils.Caching.JSON (CacheKey (..), usingJSONCache) where + +import Data.Aeson (FromJSON, ToJSON (..)) +import Data.Aeson qualified as Aeson +import Data.ByteString.Lazy qualified as BL +import Data.Functor +import Data.Text qualified as T +import Data.Text.Encoding qualified as Text +import Servant.Server qualified as Servant +import Share.IDs +import Share.Postgres qualified as PG +import Share.Postgres.IDs +import Share.Prelude +import Share.Utils.Logging qualified as Logging +import Share.Web.Errors + +data CacheKey = CacheKey + { cacheTopic :: Text, + -- Ordered key-value pairs to make up a cache key. + key :: [(Text, Text)], + -- The causal id which this cache entry is derived from. + -- Leave as 'Nothing' if the cache entry is not derived from a causal id. + rootCausalId :: Maybe CausalId, + -- 'Nothing' is its own global sandbox, and should only be used for + -- things which are not user-specific. + sandbox :: Maybe UserId + } + deriving (Show) + +encodeKey :: CacheKey -> Text +encodeKey (CacheKey {key, rootCausalId}) = + let keyWithCausal = maybe key (\(CausalId rci) -> ("rootCausalId", tShow @Int32 rci) : key) rootCausalId + in keyWithCausal + <&> (\(k, v) -> k <> "=" <> v) + & T.intercalate "," + +usingJSONCache :: + (ToJSON v, FromJSON v, PG.QueryM m) => + CacheKey -> + -- How to build the value if it's not in the cache. + m v -> + m v +usingJSONCache ck action = do + getJSONCacheEntry ck >>= \case + Just v -> pure v + Nothing -> do + v <- action + putJSONCacheEntry ck v + pure v + +data JSONCacheError + = JSONCacheDecodingError CacheKey Text + deriving (Show) + +instance ToServerError JSONCacheError where + toServerError (JSONCacheDecodingError ck err) = + (ErrorID "json-cache:decoding-error", Servant.err500 {Servant.errBody = BL.fromStrict $ Text.encodeUtf8 $ "Error decoding JSON cache entry: " <> tShow ck <> " - " <> err}) + +instance Logging.Loggable JSONCacheError where + toLog (JSONCacheDecodingError ck err) = + Logging.textLog ("Error decoding JSON cache entry: " <> encodeKey ck <> ", " <> tShow ck <> ", Error: " <> err) + & Logging.withSeverity Logging.Error + & Logging.withTag ("cacheTopic", cacheTopic ck) + & Logging.withTag ("sandbox", tShow $ sandbox ck) + & Logging.withTag ("rootCausalId", tShow $ rootCausalId ck) + +getJSONCacheEntry :: (FromJSON v, PG.QueryM m) => CacheKey -> m (Maybe v) +getJSONCacheEntry ck@(CacheKey {cacheTopic, sandbox}) = do + let cacheKey = encodeKey ck + r <- + PG.query1Col @Text + [PG.sql| + SELECT jc.value + FROM json_cache jc + WHERE topic = #{cacheTopic} + AND key = #{cacheKey} + AND codebase_user_id = #{sandbox} + LIMIT 1 + |] + case r of + Nothing -> pure Nothing + Just valText -> + case Aeson.eitherDecode (BL.fromStrict . Text.encodeUtf8 $ valText) of + Left _err -> do + -- reportError $ JSONCacheDecodingError ck (T.pack err) + pure Nothing + Right v -> pure $ Just v + +putJSONCacheEntry :: (ToJSON v, PG.QueryM m) => CacheKey -> v -> m () +putJSONCacheEntry ck@(CacheKey {cacheTopic, sandbox}) v = do + let keyText = encodeKey ck + let valText = Text.decodeUtf8 . BL.toStrict $ Aeson.encode v + PG.execute_ + [PG.sql| + INSERT INTO json_cache (topic, key, codebase_user_id, value) + VALUES (#{cacheTopic}, #{keyText}, #{sandbox}, #{valText}::jsonb) + ON CONFLICT (topic, key, codebase_user_id) + DO UPDATE SET value = EXCLUDED.value + |] diff --git a/src/Unison/Server/Share/Definitions.hs b/src/Unison/Server/Share/Definitions.hs index cc1fc048..64a934bb 100644 --- a/src/Unison/Server/Share/Definitions.hs +++ b/src/Unison/Server/Share/Definitions.hs @@ -25,8 +25,10 @@ import Share.Postgres.Causal.Queries qualified as CausalQ import Share.Postgres.IDs (CausalId) import Share.Postgres.NameLookups.Ops qualified as NameLookupOps import Share.Prelude +import Share.Utils.Caching.JSON qualified as Caching import Unison.Codebase.Editor.DisplayObject (DisplayObject) import Unison.Codebase.Path (Path) +import Unison.Codebase.Path qualified as Path import Unison.ConstructorReference qualified as ConstructorReference import Unison.DataDeclaration qualified as DD import Unison.DataDeclaration.Dependencies qualified as DD @@ -79,51 +81,63 @@ definitionForHQName :: HQ.HashQualified Name -> Codebase.CodebaseM e DefinitionDisplayResults definitionForHQName perspective rootCausalId renderWidth suffixifyBindings rt perspectiveQuery = do - rootBranchNamespaceHashId <- CausalQ.expectNamespaceIdsByCausalIdsOf id rootCausalId - (namesPerspective, query) <- NameLookupOps.relocateToNameRoot perspective perspectiveQuery rootBranchNamespaceHashId - Debug.debugM Debug.Server "definitionForHQName: (namesPerspective, query)" (namesPerspective, query) - -- Bias towards both relative and absolute path to queries, - -- This allows us to still bias towards definitions outside our namesRoot but within the - -- same tree; - -- e.g. if the query is `map` and we're in `base.trunk.List`, - -- we bias towards `map` and `.base.trunk.List.map` which ensures we still prefer names in - -- `trunk` over those in other releases. - -- ppe which returns names fully qualified to the current namesRoot, not to the codebase root. - let biases = maybeToList $ HQ.toName query - let ppedBuilder deps = (PPED.biasTo biases) <$> lift (PPEPostgres.ppedForReferences namesPerspective deps) - let nameSearch = PGNameSearch.nameSearchForPerspective namesPerspective - dr@(Backend.DefinitionResults terms types misses) <- mkDefinitionsForQuery nameSearch [query] - Debug.debugM Debug.Server "definitionForHQName: found definitions" dr - let width = mayDefaultWidth renderWidth - let docResults :: Name -> Codebase.CodebaseM e [(HashQualifiedName, UnisonHash, Doc.Doc)] - docResults name = do - Debug.debugM Debug.Server "definitionForHQName: looking up docs for name" name - docRefs <- Docs.docsForDefinitionName nameSearch name - Debug.debugM Debug.Server "definitionForHQName: Found these docs" docRefs - renderDocRefs ppedBuilder width rt docRefs + codebaseOwnerUserId <- asks Codebase.codebaseOwner + let cacheKey = + Caching.CacheKey + { cacheTopic = "definitionForHQName", + key = [("perspective", Path.toText perspective), ("suffixify", tShow $ suffixified (suffixifyBindings)), ("hqName", HQ.toText perspectiveQuery), ("width", tShow renderWidth)], + rootCausalId = Just rootCausalId, + sandbox = Just codebaseOwnerUserId + } + Caching.usingJSONCache cacheKey go + where + go :: Codebase.CodebaseM e DefinitionDisplayResults + go = do + rootBranchNamespaceHashId <- CausalQ.expectNamespaceIdsByCausalIdsOf id rootCausalId + (namesPerspective, query) <- NameLookupOps.relocateToNameRoot perspective perspectiveQuery rootBranchNamespaceHashId + Debug.debugM Debug.Server "definitionForHQName: (namesPerspective, query)" (namesPerspective, query) + -- Bias towards both relative and absolute path to queries, + -- This allows us to still bias towards definitions outside our namesRoot but within the + -- same tree; + -- e.g. if the query is `map` and we're in `base.trunk.List`, + -- we bias towards `map` and `.base.trunk.List.map` which ensures we still prefer names in + -- `trunk` over those in other releases. + -- ppe which returns names fully qualified to the current namesRoot, not to the codebase root. + let biases = maybeToList $ HQ.toName query + let ppedBuilder deps = (PPED.biasTo biases) <$> lift (PPEPostgres.ppedForReferences namesPerspective deps) + let nameSearch = PGNameSearch.nameSearchForPerspective namesPerspective + dr@(Backend.DefinitionResults terms types misses) <- mkDefinitionsForQuery nameSearch [query] + Debug.debugM Debug.Server "definitionForHQName: found definitions" dr + let width = mayDefaultWidth renderWidth + let docResults :: Name -> Codebase.CodebaseM e [(HashQualifiedName, UnisonHash, Doc.Doc)] + docResults name = do + Debug.debugM Debug.Server "definitionForHQName: looking up docs for name" name + docRefs <- Docs.docsForDefinitionName nameSearch name + Debug.debugM Debug.Server "definitionForHQName: Found these docs" docRefs + renderDocRefs ppedBuilder width rt docRefs - let drDeps = Backend.definitionResultsDependencies dr - termAndTypePPED <- ppedBuilder drDeps - let fqnTermAndTypePPE = PPED.unsuffixifiedPPE termAndTypePPED - typeDefinitions <- - ifor (Backend.typesToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) types) \ref tp -> do - let hqTypeName = PPE.typeNameOrHashOnly fqnTermAndTypePPE ref - docs <- maybe (pure []) docResults (HQ.toName hqTypeName) - lift $ Backend.mkTypeDefinition termAndTypePPED width ref docs tp - termDefinitions <- - ifor (Backend.termsToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) terms) \reference trm -> do - let referent = Referent.Ref reference - let hqTermName = PPE.termNameOrHashOnly fqnTermAndTypePPE referent - docs <- maybe (pure []) docResults (HQ.toName hqTermName) - Backend.mkTermDefinition termAndTypePPED width reference docs trm - let renderedDisplayTerms = Map.mapKeys Reference.toText termDefinitions - renderedDisplayTypes = Map.mapKeys Reference.toText typeDefinitions - renderedMisses = fmap HQ.toText misses - pure $ - DefinitionDisplayResults - renderedDisplayTerms - renderedDisplayTypes - renderedMisses + let drDeps = Backend.definitionResultsDependencies dr + termAndTypePPED <- ppedBuilder drDeps + let fqnTermAndTypePPE = PPED.unsuffixifiedPPE termAndTypePPED + typeDefinitions <- + ifor (Backend.typesToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) types) \ref tp -> do + let hqTypeName = PPE.typeNameOrHashOnly fqnTermAndTypePPE ref + docs <- maybe (pure []) docResults (HQ.toName hqTypeName) + lift $ Backend.mkTypeDefinition termAndTypePPED width ref docs tp + termDefinitions <- + ifor (Backend.termsToSyntaxOf suffixifyBindings width termAndTypePPED (Map.asList_ . traversed) terms) \reference trm -> do + let referent = Referent.Ref reference + let hqTermName = PPE.termNameOrHashOnly fqnTermAndTypePPE referent + docs <- maybe (pure []) docResults (HQ.toName hqTermName) + Backend.mkTermDefinition termAndTypePPED width reference docs trm + let renderedDisplayTerms = Map.mapKeys Reference.toText termDefinitions + renderedDisplayTypes = Map.mapKeys Reference.toText typeDefinitions + renderedMisses = fmap HQ.toText misses + pure $ + DefinitionDisplayResults + renderedDisplayTerms + renderedDisplayTypes + renderedMisses renderDocRefs :: PPEDBuilder (Codebase.CodebaseM e) -> diff --git a/src/Unison/Server/Share/RenderDoc.hs b/src/Unison/Server/Share/RenderDoc.hs index b25616b2..8aaebcda 100644 --- a/src/Unison/Server/Share/RenderDoc.hs +++ b/src/Unison/Server/Share/RenderDoc.hs @@ -12,19 +12,23 @@ module Unison.Server.Share.RenderDoc where import Data.Set qualified as Set import Share.Backend qualified as Backend +import Share.Codebase qualified as Codebase import Share.Codebase.Types (CodebaseM, CodebaseRuntime) import Share.Postgres.Causal.Queries qualified as CausalQ import Share.Postgres.IDs (CausalId) import Share.Postgres.NameLookups.Ops qualified as NLOps import Share.Postgres.NameLookups.Types (PathSegments (..)) import Share.Prelude +import Share.Utils.Caching.JSON qualified as Caching import U.Codebase.Causal qualified as V2Causal import Unison.Codebase.Path qualified as Path import Unison.LabeledDependency qualified as LD import Unison.NameSegment.Internal (NameSegment (..)) import Unison.PrettyPrintEnvDecl.Postgres qualified as PostgresPPE +import Unison.Reference qualified as Reference import Unison.Server.Doc (Doc) import Unison.Server.Doc qualified as Doc +import Unison.ShortHash qualified as SH import Unison.Util.Pretty (Width) -- | Find, eval, and render the first doc we find with any of the provided names within the given namespace @@ -42,9 +46,19 @@ findAndRenderDoc docNames runtime namespacePath rootCausalId _mayWidth = runMayb rootNamespaceHashId <- lift $ CausalQ.expectNamespaceIdsByCausalIdsOf id rootCausalId namespaceCausal <- MaybeT $ CausalQ.loadCausalNamespaceAtPath rootCausalId namespacePath shallowBranchAtNamespace <- lift $ V2Causal.value namespaceCausal - namesPerspective <- NLOps.namesPerspectiveForRootAndPath rootNamespaceHashId (coerce $ Path.toList namespacePath) docRef <- MaybeT . pure $ Backend.findDocInBranch docNames shallowBranchAtNamespace - eDoc <- lift $ Backend.evalDocRef runtime docRef - let docDeps = Doc.dependencies eDoc <> Set.singleton (LD.TermReference docRef) - docPPE <- PostgresPPE.ppedForReferences namesPerspective docDeps - pure $ Doc.renderDoc docPPE eDoc + codebaseOwnerUserId <- asks Codebase.codebaseOwner + let cacheKey = + Caching.CacheKey + { cacheTopic = "findAndRenderDoc", + key = [("namespacePath", tShow namespacePath), ("docRef", SH.toText $ Reference.toShortHash docRef)], + rootCausalId = Just rootCausalId, + sandbox = Just codebaseOwnerUserId + } + + lift $ Caching.usingJSONCache cacheKey do + namesPerspective <- NLOps.namesPerspectiveForRootAndPath rootNamespaceHashId (coerce $ Path.toList namespacePath) + eDoc <- Backend.evalDocRef runtime docRef + let docDeps = Doc.dependencies eDoc <> Set.singleton (LD.TermReference docRef) + docPPE <- PostgresPPE.ppedForReferences namesPerspective docDeps + pure $ Doc.renderDoc docPPE eDoc diff --git a/unison b/unison index 49432a6c..8d287d63 160000 --- a/unison +++ b/unison @@ -1 +1 @@ -Subproject commit 49432a6c3463326c3a0fadaea2794a66fdaabde9 +Subproject commit 8d287d63ca70700cd7a86b4e3a40dfdc7034b726