diff --git a/bench/locli/CHANGELOG.md b/bench/locli/CHANGELOG.md index 6bfaea32800..3b3fbebe0c2 100644 --- a/bench/locli/CHANGELOG.md +++ b/bench/locli/CHANGELOG.md @@ -1,5 +1,14 @@ # Revision history for locli +## 2.0 -- Dec 2024 + +* New database (DB) persistence backend for log objects using serverless SQLite DBs +* Refactor current file persistence backend into its own module +* New CLI commands `prepare-db` and `unlog-db` to create and read from DB persistence backend respectively +* New sum type `LogObjectSource` to represent input from different backends (file or DB) +* Tweak GC to mitigate high RAM requirements (for perf cluster analyses only) +* New executable `locli-quick` which aims to be a development testbed for (upcoming) DB-backed quick queries + ## 1.36 -- Nov 2024 * Add `CHANGELOG.md` for `locli` diff --git a/bench/locli/app/locli-quick.hs b/bench/locli/app/locli-quick.hs new file mode 100644 index 00000000000..1a849cf25c0 --- /dev/null +++ b/bench/locli/app/locli-quick.hs @@ -0,0 +1,57 @@ +import Cardano.Api (SlotNo (..)) + +import Cardano.Unlog.BackendDB +import Cardano.Unlog.LogObject (LOBody (..), LogObject (..)) +import Cardano.Unlog.LogObjectDB +import Cardano.Util + +import Prelude hiding (log) + +import Data.Bifunctor (second) +import Data.List.Split (chop) +import Data.Maybe +import System.Environment (getArgs) + +import Database.Sqlite.Easy hiding (Text) + + +main :: IO () +main = do + getArgs >>= \case + [] -> putStrLn "please specify DB file" + db : _ -> runDB $ fromString db + +-- sample case: +-- we want to know the txns in mempool for each slot + +runDB :: ConnectionString -> IO () +runDB dbName = do + (summary, res2) <- + withTimingInfo "withDb/selectMempoolTxs" $ + withDb dbName $ + (,) <$> getSummary <*> run selectMempoolTxs + + let logObjects = map (sqlToLogObject summary) res2 + + -- TODO: needs a reducer + mapM_ (print . second safeLast) (bySlotDomain logObjects) + where + safeLast [] = [] + safeLast xs = [last xs] + +bySlotDomain :: [LogObject] -> [(SlotNo, [LogObject])] +bySlotDomain logObjs = + case dropWhile (isNothing . newSlot) logObjs of + [] -> [] + xs -> chop go xs + where + newSlot LogObject{loBody} = case loBody of { LOTraceStartLeadershipCheck s _ _ -> Just s; _ -> Nothing } + + go (lo:los) = let (inSlot, rest) = span (isNothing . newSlot) los in ((fromJust $ newSlot lo, inSlot), rest) + go [] = error "bySlotDomain/chop: empty list" + +selectMempoolTxs :: SQL +selectMempoolTxs = sqlOrdered + [ sqlGetSlot + , sqlGetTxns `sqlAppend` "WHERE cons='LOMempoolTxs'" + ] diff --git a/bench/locli/locli.cabal b/bench/locli/locli.cabal index ba55c43f405..5d2ee1aa961 100644 --- a/bench/locli/locli.cabal +++ b/bench/locli/locli.cabal @@ -1,7 +1,7 @@ cabal-version: 3.0 name: locli -version: 1.36 +version: 2.0 synopsis: Cardano log analysis CLI description: Cardano log analysis CLI. category: Cardano, @@ -50,6 +50,9 @@ common project-config -Wcompat -Wno-all-missed-specialisations + if impl(ghc >= 9.8) + ghc-options: -Wno-x-partial + build-depends: base >= 4.14 && < 5, if os(windows) @@ -89,7 +92,10 @@ library Cardano.Org Cardano.Render + Cardano.Unlog.BackendDB + Cardano.Unlog.BackendFile Cardano.Unlog.LogObject + Cardano.Unlog.LogObjectDB Cardano.Unlog.Resources other-modules: Paths_locli @@ -116,6 +122,7 @@ library , ouroboros-network-api ^>= 0.10 , sop-core , split + , sqlite-easy >= 1.1.0.1 , statistics , strict-sop-core , text @@ -136,7 +143,7 @@ executable locli main-is: locli.hs ghc-options: -threaded -rtsopts - "-with-rtsopts=-T -N7 -A2m -qb -H64m" + "-with-rtsopts=-T -N7 -A2m -c -H64m" build-depends: aeson , cardano-prelude @@ -147,6 +154,30 @@ executable locli , transformers , transformers-except +executable locli-quick + import: project-config + + hs-source-dirs: app + main-is: locli-quick.hs + ghc-options: -threaded + -rtsopts + "-with-rtsopts=-T -N7 -A2m -c -H64m" + + build-depends: locli + , aeson + , async + , bytestring + , containers + , cardano-api + , extra + , split + , text + , text-short + , time + , trace-resources + , sqlite-easy >= 1.1.0.1 + , unordered-containers + test-suite test-locli import: project-config @@ -163,4 +194,5 @@ test-suite test-locli , text other-modules: Test.Analysis.CDF + Test.Unlog.LogObjectDB Test.Unlog.Org diff --git a/bench/locli/src/Cardano/Analysis/API/Ground.hs b/bench/locli/src/Cardano/Analysis/API/Ground.hs index 005108b4946..d3a7f25a693 100644 --- a/bench/locli/src/Cardano/Analysis/API/Ground.hs +++ b/bench/locli/src/Cardano/Analysis/API/Ground.hs @@ -1,6 +1,7 @@ -{-# LANGUAGE PolyKinds #-} {-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} +{-# LANGUAGE PolyKinds #-} {-# OPTIONS_GHC -Wno-orphans #-} module Cardano.Analysis.API.Ground ( module Cardano.Analysis.API.Ground @@ -10,28 +11,28 @@ module Cardano.Analysis.API.Ground ) where -import Prelude as P (show) -import Cardano.Prelude hiding (head, toText) -import Unsafe.Coerce qualified as Unsafe +import Cardano.Prelude hiding (head, toText) +import Cardano.Slotting.Slot (EpochNo (..), SlotNo (..)) +import Cardano.Util +import Ouroboros.Network.Block (BlockNo (..)) -import Data.Aeson -import Data.Aeson.Types (toJSONKeyText) -import Data.ByteString.Lazy.Char8 qualified as LBS -import Data.Map.Strict qualified as Map -import Data.Text qualified as T -import Data.Text.Short qualified as SText -import Data.Text.Short (ShortText, fromText, toText) -import Data.Time.Clock (UTCTime, NominalDiffTime) -import Options.Applicative -import Options.Applicative qualified as Opt -import System.FilePath qualified as F +import Prelude as P (show) -import Cardano.Slotting.Slot (EpochNo(..), SlotNo(..)) -import Ouroboros.Network.Block (BlockNo(..)) +import Data.Aeson +import Data.Aeson.Types (toJSONKeyText) +import qualified Data.ByteString.Lazy.Char8 as LBS +import Data.CDF +import Data.Data (Data) +import Data.DataDomain +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Data.Text.Short (ShortText, fromText, toText) +import qualified Data.Text.Short as SText +import Data.Time.Clock (NominalDiffTime, UTCTime) +import Options.Applicative as Opt +import qualified System.FilePath as F -import Data.CDF -import Data.DataDomain -import Cardano.Util +import qualified Unsafe.Coerce as Unsafe newtype FieldName = FieldName { unFieldName :: Text } @@ -51,7 +52,7 @@ instance Show TId where show = ("TId " ++) . P.show . unTId newtype Hash = Hash { unHash :: ShortText } - deriving (Eq, Generic, Ord) + deriving (Eq, Generic, Ord, Data) deriving newtype (FromJSON, ToJSON) deriving anyclass NFData @@ -154,17 +155,50 @@ newtype CsvOutputFile = CsvOutputFile { unCsvOutputFile :: FilePath } deriving (Show, Eq) +newtype SqliteOutputFile + = SqliteOutputFile { unSqliteOutputFile :: FilePath } + deriving (Show, Eq) + newtype OutputFile = OutputFile { unOutputFile :: FilePath } deriving (Show, Eq) +data LogObjectSource = + LogObjectSourceJSON JsonLogfile + | LogObjectSourceSQLite FilePath + | LogObjectSourceOther FilePath + deriving (Show, Eq, Generic, NFData) + +logObjectSourceFile :: LogObjectSource -> FilePath +logObjectSourceFile = \case + LogObjectSourceJSON j -> unJsonLogfile j + LogObjectSourceSQLite f -> f + LogObjectSourceOther f -> f + +toLogObjectSource :: FilePath -> LogObjectSource +toLogObjectSource fp + | ext == ".sqlite" || ext == ".sqlite3" = LogObjectSourceSQLite fp + | ext == ".json" = LogObjectSourceJSON (JsonLogfile fp) + | otherwise = LogObjectSourceOther fp + where + ext = map toLower $ F.takeExtension fp + +instance FromJSON LogObjectSource where + parseJSON = withText "LogObjectSource" (pure . toLogObjectSource . T.unpack) + +instance ToJSON LogObjectSource where + toJSON = toJSON . logObjectSourceFile + --- --- Orphans --- deriving newtype instance Real BlockNo deriving newtype instance Divisible BlockNo +deriving instance Data BlockNo + deriving newtype instance Real SlotNo deriving newtype instance Divisible SlotNo +deriving instance Data SlotNo --- --- Readers @@ -202,6 +236,14 @@ optJsonLogfile optname desc = <> metavar "JSONLOGFILE" <> help desc +optLogObjectSource :: String -> String -> Parser LogObjectSource +optLogObjectSource optname desc = + fmap toLogObjectSource $ + Opt.option Opt.str + $ long optname + <> metavar "JSONLOGFILE|SQLITE3LOGFILE" + <> help desc + argJsonLogfile :: Parser JsonLogfile argJsonLogfile = JsonLogfile <$> @@ -255,6 +297,14 @@ optCsvOutputFile optname desc = <> metavar "CSV-OUTFILE" <> help desc +optSqliteOutputFile :: String -> String -> Parser SqliteOutputFile +optSqliteOutputFile optname desc = + fmap SqliteOutputFile $ + Opt.option Opt.str + $ long optname + <> metavar "SQLITE-OUTFILE" + <> help desc + optOutputFile :: String -> String -> Parser OutputFile optOutputFile optname desc = fmap OutputFile $ @@ -279,6 +329,12 @@ optWord optname desc def = <> metavar "INT" <> help desc <> value def + +optString :: String -> String -> Parser String +optString optname desc = + Opt.option Opt.str $ + long optname <> metavar "STRING" <> Opt.help desc + -- /path/to/logs-HOSTNAME.some.ext -> HOSTNAME hostFromLogfilename :: JsonLogfile -> Host hostFromLogfilename (JsonLogfile f) = @@ -302,26 +358,26 @@ dumpObjects ident xs (JsonOutputFile f) = liftIO $ do withFile f WriteMode $ \hnd -> do forM_ xs $ LBS.hPutStrLn hnd . encode -dumpAssociatedObjects :: ToJSON a => String -> [(JsonLogfile, a)] -> ExceptT Text IO () +dumpAssociatedObjects :: ToJSON a => String -> [(LogObjectSource, a)] -> ExceptT Text IO () dumpAssociatedObjects ident xs = liftIO $ flip mapConcurrently_ xs $ - \(JsonLogfile f, x) -> + \(logObjectSourceFile -> f, x) -> withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> LBS.hPutStrLn hnd $ encode x readAssociatedObjects :: forall a. - FromJSON a => String -> [JsonLogfile] -> ExceptT Text IO [(JsonLogfile, a)] + FromJSON a => String -> [JsonLogfile] -> ExceptT Text IO [(LogObjectSource, a)] readAssociatedObjects ident fs = firstExceptT T.pack . newExceptT . fmap (mapM sequence) $ flip mapConcurrently fs $ \jf@(JsonLogfile f) -> do x <- eitherDecode @a <$> LBS.readFile (replaceExtension f $ ident <> ".json") progress ident (Q f) - pure (jf, x) + pure (LogObjectSourceJSON jf, x) -dumpAssociatedObjectStreams :: ToJSON a => String -> [(JsonLogfile, [a])] -> ExceptT Text IO () +dumpAssociatedObjectStreams :: ToJSON a => String -> [(LogObjectSource, [a])] -> ExceptT Text IO () dumpAssociatedObjectStreams ident xss = liftIO $ flip mapConcurrently_ xss $ - \(JsonLogfile f, xs) -> do + \(logObjectSourceFile -> f, xs) -> do withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> do forM_ xs $ LBS.hPutStrLn hnd . encode @@ -331,9 +387,9 @@ dumpText ident xs (TextOutputFile f) = liftIO $ do withFile f WriteMode $ \hnd -> do forM_ xs $ hPutStrLn hnd -dumpAssociatedTextStreams :: String -> [(JsonLogfile, [Text])] -> ExceptT Text IO () +dumpAssociatedTextStreams :: String -> [(LogObjectSource, [Text])] -> ExceptT Text IO () dumpAssociatedTextStreams ident xss = liftIO $ flip mapConcurrently_ xss $ - \(JsonLogfile f, xs) -> do + \(logObjectSourceFile -> f, xs) -> do withFile (replaceExtension f $ ident <> ".txt") WriteMode $ \hnd -> do forM_ xs $ hPutStrLn hnd diff --git a/bench/locli/src/Cardano/Analysis/BlockProp.hs b/bench/locli/src/Cardano/Analysis/BlockProp.hs index 73dfd8c11a2..ba24ce6fca7 100644 --- a/bench/locli/src/Cardano/Analysis/BlockProp.hs +++ b/bench/locli/src/Cardano/Analysis/BlockProp.hs @@ -1,17 +1,9 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE StrictData #-} -#if __GLASGOW_HASKELL__ >= 908 -{-# OPTIONS_GHC -Wno-x-partial #-} -#endif - {-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing #-} -{-# OPTIONS_GHC -Wno-unused-imports -Wno-partial-fields -Wno-unused-matches -Wno-deprecations -Wno-unused-local-binds -Wno-incomplete-record-updates #-} -{- HLINT ignore "Avoid lambda" -} {- HLINT ignore "Eta reduce" -} -{- HLINT ignore "Use head" -} module Cardano.Analysis.BlockProp ( summariseMultiBlockProp @@ -25,42 +17,18 @@ module Cardano.Analysis.BlockProp ) where -import Prelude (String, (!!), error, head, last, id, show, tail, read) -import Cardano.Prelude hiding (head, show) - -import Control.Arrow ((***), (&&&)) -import Data.Aeson (ToJSON(..), FromJSON(..)) -import Data.Bifunctor -import Data.Function (on) -import Data.List (break, dropWhileEnd, intercalate, partition, span) -import Data.Map.Strict (Map) -import Data.Map.Strict qualified as Map -import Data.Maybe (catMaybes, mapMaybe, isNothing) -import Data.Set (Set) -import Data.Set qualified as Set -import Data.Text qualified as T -import Data.Text.Short (toText) -import Data.Tuple (swap) -import Data.Tuple.Extra (both, fst3, snd3, thd3) -import Data.Vector (Vector) -import Data.Vector qualified as Vec - -import Data.Time.Clock (NominalDiffTime, UTCTime, addUTCTime, diffUTCTime) - -import Text.Printf (printf) +import Cardano.Analysis.API +import Cardano.Prelude hiding (head, show) +import Cardano.Unlog.LogObject +import Cardano.Util -import Cardano.Slotting.Slot (EpochNo(..), SlotNo(..)) -import Ouroboros.Network.Block (BlockNo(..)) +import Prelude (id, read, show) -import Data.Accum -import Data.CDF - -import Cardano.Render -import Cardano.Unlog.LogObject -import Cardano.Unlog.Resources -import Cardano.Util - -import Cardano.Analysis.API +import Data.List (partition) +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import qualified Data.Text as T +import Data.Tuple.Extra (both, fst3, snd3, thd3) summariseMultiBlockProp :: [Centile] -> [BlockPropOne] -> Either CDFError MultiBlockProp @@ -293,14 +261,14 @@ beForgedAt :: BlockEvents -> UTCTime beForgedAt BlockEvents{beForge=BlockForge{..}} = bfForged `afterSlot` bfSlotStart -buildMachViews :: Run -> [(JsonLogfile, [LogObject])] -> IO [(JsonLogfile, MachView)] +buildMachViews :: Run -> [(LogObjectSource, [LogObject])] -> IO [(LogObjectSource, MachView)] buildMachViews run = mapConcurrentlyPure (fst &&& blockEventMapsFromLogObjects run) blockEventsAcceptance :: Genesis -> [ChainFilter] -> BlockEvents -> [(ChainFilter, Bool)] blockEventsAcceptance genesis flts be = flts <&> (id &&& testBlockEvents genesis be) -rebuildChain :: Run -> [ChainFilter] -> [FilterName] -> [(JsonLogfile, MachView)] -> Chain -rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = +rebuildChain :: Run -> [ChainFilter] -> [FilterName] -> [(LogObjectSource, MachView)] -> Chain +rebuildChain Run{genesis} flts _fltNames (fmap snd -> machViews) = Chain { cDomSlots = DataDomain (Interval (blk0 & beSlotNo) (blkL & beSlotNo) <&> I) @@ -320,8 +288,8 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = doRebuildChain (fmap deltifyEvents <$> eventMaps) tipHash (accepta, cRejecta) = partition (all snd . beAcceptance) cMainChain - blkSets :: (Set Hash, Set Hash) - blkSets@(acceptaBlocks, rejectaBlocks) = + acceptaBlocks, rejectaBlocks :: Set Hash + (acceptaBlocks, rejectaBlocks) = both (Set.fromList . fmap beBlock) (accepta, cRejecta) mvBlockStats :: MachView -> HostBlockStats mvBlockStats (fmap bfeBlock . mvForges -> fs) = HostBlockStats {..} @@ -346,7 +314,7 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = finalBlockNo = mbeBlockNo finalBlockEv tipHash = rewindChain eventMaps finalBlockNo 1 (mbeBlock finalBlockEv) - tipBlock = getBlockForge eventMaps finalBlockNo tipHash + _tipBlock = getBlockForge eventMaps finalBlockNo tipHash computeChainBlockGaps :: [BlockEvents] -> [BlockEvents] computeChainBlockGaps [] = error "computeChainBlockGaps on an empty chain" @@ -376,11 +344,12 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = ]) & mapMbe id (error "Silly invariant failed.") (error "Silly invariant failed.") - adoptionMap :: [Map Hash UTCTime] - adoptionMap = Map.mapMaybe (lazySMaybe . mbeAdopted) <$> eventMaps + adoptionMap :: [Map Hash UTCTime] + adoptionMap = Map.mapMaybe (lazySMaybe . mbeAdopted) <$> eventMaps - heightHostMap :: (Map BlockNo (Set Hash), Map Host (Set Hash)) - heightHostMap@(heightMap, hostMap) + heightMap :: Map BlockNo (Set Hash) + _hostMap :: Map Host (Set Hash) + (heightMap, _hostMap) = foldr (\MachView{..} (accHeight, accHost) -> (,) (Map.foldr @@ -405,7 +374,7 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = [ "No forger for hash ", show hash , "\nErrors:\n" ] ++ intercalate "\n" (show <$> ers) - blkEvs@(forgerEv:_, oEvs, ers) -> + (forgerEv:_, oEvs, ers) -> go (bfePrevBlock forgerEv) (liftBlockEvents forgerEv oEvs ers : acc) liftBlockEvents :: ForgerEvents NominalDiffTime -> [ObserverEvents NominalDiffTime] -> [BPError] -> BlockEvents @@ -535,7 +504,7 @@ renderBlockPropError = \case rejs blockProp :: Run -> Chain -> Either BlockPropError BlockPropOne -blockProp run@Run{genesis} Chain{..} = do +blockProp _ Chain{..} = do (c :: [BlockEvents]) <- case filter (all snd . beAcceptance) cMainChain of [] -> Left $ @@ -589,11 +558,6 @@ blockProp run@Run{genesis} Chain{..} = do & filter (not . isNaN)) } where - ne :: String -> [a] -> [a] - ne desc = \case - [] -> error desc - xs -> xs - hostBlockStats = Map.elems cHostBlockStats boFetchedCum :: BlockObservation -> NominalDiffTime @@ -629,10 +593,10 @@ blockProp run@Run{genesis} Chain{..} = do cdfZ percs $ concatMap f cbes -- | Given a single machine's log object stream, recover its block map. -blockEventMapsFromLogObjects :: Run -> (JsonLogfile, [LogObject]) -> MachView -blockEventMapsFromLogObjects run (f@(unJsonLogfile -> fp), []) = - error $ mconcat ["0 LogObjects in ", fp] -blockEventMapsFromLogObjects run (f@(unJsonLogfile -> fp), xs@(x:_)) = +blockEventMapsFromLogObjects :: Run -> (LogObjectSource, [LogObject]) -> MachView +blockEventMapsFromLogObjects _ (f, []) = + error $ mconcat ["0 LogObjects in ", logObjectSourceFile f] +blockEventMapsFromLogObjects run (f, xs@(x:_)) = foldl' (blockPropMachEventsStep run f) initial xs where initial = @@ -648,8 +612,8 @@ blockEventMapsFromLogObjects run (f@(unJsonLogfile -> fp), xs@(x:_)) = , mvMemSnap = SNothing } -blockPropMachEventsStep :: Run -> JsonLogfile -> MachView -> LogObject -> MachView -blockPropMachEventsStep run@Run{genesis} (JsonLogfile fp) mv@MachView{..} lo = case lo of +blockPropMachEventsStep :: Run -> LogObjectSource -> MachView -> LogObject -> MachView +blockPropMachEventsStep Run{genesis} _ mv@MachView{..} lo = case lo of -- 0. Notice (observer only) LogObject{loAt, loHost, loBody=LOChainSyncClientSeenHeader{loBlock,loBlockNo,loSlotNo}} -> let mbe0 = getBlock loBlock diff --git a/bench/locli/src/Cardano/Analysis/MachPerf.hs b/bench/locli/src/Cardano/Analysis/MachPerf.hs index 4ee40096443..e77c0b1820e 100644 --- a/bench/locli/src/Cardano/Analysis/MachPerf.hs +++ b/bench/locli/src/Cardano/Analysis/MachPerf.hs @@ -1,12 +1,8 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE MultiWayIf #-} -#if __GLASGOW_HASKELL__ >= 908 -{-# OPTIONS_GHC -Wno-x-partial #-} -#endif +{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing #-} -{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing -Wno-orphans #-} {- HLINT ignore "Use head" -} {- HLINT ignore "Evaluate" -} @@ -33,21 +29,21 @@ import Cardano.Unlog.Resources -- * 1. Collect SlotStats & RunScalars: -- -collectSlotStats :: Run -> [(JsonLogfile, [LogObject])] - -> IO (Either Text [(JsonLogfile, (RunScalars, [SlotStats UTCTime]))]) +collectSlotStats :: Run -> [(LogObjectSource, [LogObject])] + -> IO (Either Text [(LogObjectSource, (RunScalars, [SlotStats UTCTime]))]) collectSlotStats run = fmap sequence <$> mapConcurrentlyPure (timelineFromLogObjects run) -timelineFromLogObjects :: Run -> (JsonLogfile, [LogObject]) - -> Either Text (JsonLogfile, (RunScalars, [SlotStats UTCTime])) -timelineFromLogObjects _ (JsonLogfile f, []) = - Left $ "timelineFromLogObjects: zero logobjects from " <> pack f +timelineFromLogObjects :: Run -> (LogObjectSource, [LogObject]) + -> Either Text (LogObjectSource, (RunScalars, [SlotStats UTCTime])) +timelineFromLogObjects _ (f, []) = + Left $ "timelineFromLogObjects: zero logobjects from " <> pack (logObjectSourceFile f) timelineFromLogObjects run@Run{genesis} (f, xs') = Right . (f,) $ foldl' (timelineStep run f) zeroTimelineAccum xs & (aRunScalars &&& reverse . aSlotStats) where - xs = filter (not . (`textRefEquals` "DecodeError") . loKind) xs' + xs = filter (not . ("DecodeError" `textRefEquals`) . loKind) xs' firstRelevantLogObjectTime :: UTCTime firstRelevantLogObjectTime = loAt (head xs) `max` systemStart genesis @@ -107,7 +103,7 @@ timelineFromLogObjects run@Run{genesis} (f, xs') = , slLogObjects = [] } -timelineStep :: Run -> JsonLogfile -> TimelineAccum -> LogObject -> TimelineAccum +timelineStep :: Run -> LogObjectSource -> TimelineAccum -> LogObject -> TimelineAccum timelineStep Run{genesis} f accum@TimelineAccum{aSlotStats=cur:_, ..} lo = -- 1. skip pre-historic events not subject to performance analysis; -- Potentially _collapsingly huge_, depending on what portion of logs you get. @@ -152,7 +148,7 @@ timelineStep Run{genesis} f accum@TimelineAccum{aSlotStats=cur:_, ..} lo = [ desc, " for a future slot=", show slot , " cur=", show (slSlot cur) , " host=", unpack . toText $ unHost host - , " file=", unJsonLogfile f + , " file=", logObjectSourceFile f ] else forExistingSlot slot acc x in @@ -467,14 +463,14 @@ runSlotFilters :: NFData a => Run -> [ChainFilter] - -> [(JsonLogfile, [SlotStats a])] - -> IO (DataDomain I SlotNo, [(JsonLogfile, [SlotStats a])]) + -> [(LogObjectSource, [SlotStats a])] + -> IO (DataDomain I SlotNo, [(LogObjectSource, [SlotStats a])]) runSlotFilters Run{genesis} flts slots = mapConcurrentlyPure (fmap $ filterSlotStats flts) slots <&> \filtered -> (,) (domain filtered) filtered where - domain :: [(JsonLogfile, [SlotStats a])] -> DataDomain I SlotNo + domain :: [(LogObjectSource, [SlotStats a])] -> DataDomain I SlotNo domain filtered = mkDataDomain ((CP.head samplePre <&> slSlot) & fromMaybe 0) ((lastMay samplePre <&> slSlot) & fromMaybe 0) @@ -567,9 +563,9 @@ slotStatsSummary Run{genesis=Genesis{epochLength}} slots = -- * 4. Summarise SlotStats & SlotStatsSummary into MachPerf: -- -slotStatsMachPerf :: Run -> (JsonLogfile, [SlotStats NominalDiffTime]) -> Either Text (JsonLogfile, MachPerfOne) -slotStatsMachPerf _ (JsonLogfile f, []) = - Left $ "slotStatsMachPerf: zero filtered slots from " <> pack f +slotStatsMachPerf :: Run -> (LogObjectSource, [SlotStats NominalDiffTime]) -> Either Text (LogObjectSource, MachPerfOne) +slotStatsMachPerf _ (f, []) = + Left $ "slotStatsMachPerf: zero filtered slots from " <> pack (logObjectSourceFile f) slotStatsMachPerf run (f, slots) = Right . (f,) $ MachPerf { mpVersion = getLocliVersion diff --git a/bench/locli/src/Cardano/Analysis/Summary.hs b/bench/locli/src/Cardano/Analysis/Summary.hs index 7994c08e2d6..3ff775af4a4 100644 --- a/bench/locli/src/Cardano/Analysis/Summary.hs +++ b/bench/locli/src/Cardano/Analysis/Summary.hs @@ -1,11 +1,6 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} {-# LANGUAGE UndecidableInstances #-} -#if __GLASGOW_HASKELL__ >= 908 -{-# OPTIONS_GHC -Wno-x-partial #-} -#endif - {-# OPTIONS_GHC -Wno-name-shadowing -Wno-orphans #-} {- HLINT ignore "Use mapMaybe" -} diff --git a/bench/locli/src/Cardano/Command.hs b/bench/locli/src/Cardano/Command.hs index 21a5a539152..782b8c68735 100644 --- a/bench/locli/src/Cardano/Command.hs +++ b/bench/locli/src/Cardano/Command.hs @@ -21,6 +21,7 @@ import Options.Applicative qualified as Opt import System.Directory (doesFileExist) import System.FilePath +import System.Mem qualified as Mem (performGC) import System.Posix.Files qualified as IO import Cardano.Analysis.API @@ -29,6 +30,8 @@ import Cardano.Analysis.MachPerf import Cardano.Analysis.Summary import Cardano.Render import Cardano.Report +import Cardano.Unlog.BackendDB +import Cardano.Unlog.BackendFile import Cardano.Unlog.LogObject import Cardano.Util hiding (head) @@ -50,9 +53,10 @@ data ChainCommand = ReadMetaGenesis (JsonInputFile RunPartial) (JsonInputFile Genesis) | WriteMetaGenesis TextOutputFile TextOutputFile - | Unlog (JsonInputFile (RunLogs ())) Bool (Maybe [LOAnyType]) + | Unlog (JsonInputFile (RunLogs ())) Bool (Maybe [LOAnyType]) | DumpLogObjects - + | PrepareDB String [TextInputFile] SqliteOutputFile + | UnlogDB (JsonInputFile (RunLogs ())) | ValidateHashTimeline (JsonInputFile [LogObject]) | BuildMachViews @@ -64,7 +68,7 @@ data ChainCommand | ReadChain (JsonInputFile [BlockEvents]) | TimelineChain RenderConfig TextOutputFile [TimelineComments BlockEvents] - | CollectSlots [JsonLogfile] + | CollectSlots [LogObjectSource] | DumpSlotsRaw | FilterSlots [JsonFilterFile] [ChainFilter] | DumpSlots @@ -122,6 +126,15 @@ parseChainCommand = (some (optLOAnyType "ok-loany" "[MULTI] Allow a particular LOAnyType")) ) + , op "unlog-db" "Read logs from DBs" + (UnlogDB + <$> optJsonInputFile "run-logs" "Run log manifest (API/Types.hs:RunLogs)" + ) + , op "prepare-db" "Prepare an SQLite DB from a host's log output" + (PrepareDB + <$> optString "mach" "host's machine name" + <*> some (optTextInputFile "log" "[MULTI] host log file(s)") + <*> optSqliteOutputFile "db" "DB output file") , op "dump-logobjects" "Dump lifted log object streams, alongside input files" (DumpLogObjects & pure) , op "hash-timeline" "Quickly validate timeline by hashes" @@ -164,7 +177,7 @@ parseChainCommand = , op "collect-slots" "Collect per-slot performance stats" (CollectSlots <$> many - (optJsonLogfile "ignore-log" "Omit data from listed log files from perf statistics")) + (optLogObjectSource "ignore-log" "Omit data from listed log sources from perf statistics")) , op "dump-slots-raw" "Dump unfiltered slot stats JSON streams, alongside input files" (DumpSlotsRaw & pure) , op "filter-slots" "Filter per-slot performance stats" @@ -329,15 +342,15 @@ data State , sRunLogs :: Maybe (RunLogs [LogObject]) , sDomSlots :: Maybe (DataDomain I SlotNo) -- propagation - , sMachViews :: Maybe [(JsonLogfile, MachView)] + , sMachViews :: Maybe [(LogObjectSource, MachView)] , sChain :: Maybe Chain , sBlockProp :: Maybe [BlockPropOne] , sMultiBlockProp :: Maybe MultiBlockProp -- performance - , sSlotsRaw :: Maybe [(JsonLogfile, [SlotStats NominalDiffTime])] - , sScalars :: Maybe [(JsonLogfile, RunScalars)] - , sSlots :: Maybe [(JsonLogfile, [SlotStats NominalDiffTime])] - , sMachPerf :: Maybe [(JsonLogfile, MachPerfOne)] + , sSlotsRaw :: Maybe [(LogObjectSource, [SlotStats NominalDiffTime])] + , sScalars :: Maybe [(LogObjectSource, RunScalars)] + , sSlots :: Maybe [(LogObjectSource, [SlotStats NominalDiffTime])] + , sMachPerf :: Maybe [(LogObjectSource, MachPerfOne)] , sClusterPerf :: Maybe [ClusterPerf] , sMultiClusterPerf :: Maybe MultiClusterPerf -- @@ -417,6 +430,19 @@ runChainCommand s & firstExceptT (CommandError c) pure s { sRunLogs = Just runLogs } +runChainCommand s + c@(UnlogDB rlf) = do + progress "logs" (Q $ printf "reading run log manifest %s" $ unJsonInputFile rlf) + runLogsBare <- Aeson.eitherDecode @(RunLogs ()) + <$> LBS.readFile (unJsonInputFile rlf) + & newExceptT + & firstExceptT (CommandError c . pack) + progress "logs" (Q $ printf "loading logs from DBs for %d hosts" $ + Map.size $ rlHostLogs runLogsBare) + runLogs <- runLiftLogObjectsDB runLogsBare + & firstExceptT (CommandError c) + pure s { sRunLogs = Just runLogs } + runChainCommand s@State{sRunLogs=Just (rlLogs -> objs)} c@DumpLogObjects = do progress "logobjs" (Q $ printf "dumping %d logobject streams" $ length objs) @@ -427,6 +453,13 @@ runChainCommand _ c@DumpLogObjects = missingCommandData c -- runChainCommand s c@(ReadMachViews _ _) -- () -> [(JsonLogfile, MachView)] +runChainCommand s + c@(PrepareDB machName inFiles outFile) = do + progress "prepare-db" (Q $ printf "preparing DB %s from '%s' logs" (unSqliteOutputFile outFile) machName) + prepareDB machName (map unTextInputFile inFiles) (unSqliteOutputFile outFile) + & firstExceptT (CommandError c) + pure s + runChainCommand s c@(ValidateHashTimeline timelineJson) = do progress "logs" (Q $ printf "validating hash timeline") @@ -446,6 +479,7 @@ runChainCommand s runChainCommand s@State{sRun=Just run, sRunLogs=Just (rlLogs -> objs)} BuildMachViews = do progress "machviews" (Q $ printf "building %d machviews" $ length objs) + performGC mvs <- buildMachViews run objs & liftIO pure s { sMachViews = Just mvs } runChainCommand _ c@BuildMachViews = missingCommandData c @@ -519,8 +553,9 @@ runChainCommand s@State{sRun=Just run, sRunLogs=Just (rlLogs -> objs)} c@(CollectSlots ignores) = do let nonIgnored = flip filter objs $ (`notElem` ignores) . fst forM_ ignores $ - progress "perf-ignored-log" . R . unJsonLogfile - progress "slots" (Q $ printf "building slot %d timelines" $ length objs) + progress "perf-ignored-log" . R . logObjectSourceFile + progress "slots" (Q $ printf "building %d slot timelines" $ length objs) + performGC (scalars, slotsRaw) <- fmap (mapAndUnzip redistribute) <$> collectSlotStats run nonIgnored & newExceptT @@ -648,6 +683,7 @@ runChainCommand _ c@RenderMultiPropagation{} = missingCommandData c runChainCommand s@State{sRun=Just run, sSlots=Just slots} c@ComputeMachPerf = do progress "machperf" (Q $ printf "computing %d machine performances" $ length slots) + performGC perf <- mapConcurrentlyPure (slotStatsMachPerf run) slots & fmap sequence & newExceptT @@ -833,6 +869,10 @@ fromAnalysisError :: ChainCommand -> AnalysisCmdError -> CommandError fromAnalysisError c (AnalysisCmdError t) = CommandError c t fromAnalysisError c o = CommandError c (show o) + +performGC :: ExceptT CommandError IO () +performGC = liftIO Mem.performGC + runCommand :: Command -> ExceptT CommandError IO () runCommand (ChainCommand cs) = do diff --git a/bench/locli/src/Cardano/Render.hs b/bench/locli/src/Cardano/Render.hs index 784df301d7f..827a709b112 100644 --- a/bench/locli/src/Cardano/Render.hs +++ b/bench/locli/src/Cardano/Render.hs @@ -1,11 +1,6 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} -#if __GLASGOW_HASKELL__ >= 908 -{-# OPTIONS_GHC -Wno-x-partial #-} -#endif - {- HLINT ignore "Use concatMap" -} {- HLINT ignore "Use fromMaybe" -} diff --git a/bench/locli/src/Cardano/Unlog/BackendDB.hs b/bench/locli/src/Cardano/Unlog/BackendDB.hs new file mode 100644 index 00000000000..c1afd9776ac --- /dev/null +++ b/bench/locli/src/Cardano/Unlog/BackendDB.hs @@ -0,0 +1,155 @@ + +module Cardano.Unlog.BackendDB + ( prepareDB + , runLiftLogObjectsDB + + -- specific SQLite queries or statements + , getSummary + , getTraceFreqs + , sqlGetEvent + , sqlGetTxns + , sqlGetResource + , sqlGetSlot + , sqlOrdered + ) where + +import Cardano.Analysis.API.Ground (Host (..), LogObjectSource (..)) +import Cardano.Prelude (ExceptT, Text) +import Cardano.Unlog.LogObject (HostLogs (..), LogObject (..), RunLogs (..), fromTextRef) +import Cardano.Unlog.LogObjectDB +import Cardano.Util (sequenceConcurrentlyChunksOf, withTimingInfo) + +import Prelude hiding (log) + +import Control.Exception (SomeException (..), catch) +import Control.Monad +import Data.Aeson as Aeson (decode, eitherDecode) +import qualified Data.ByteString.Lazy.Char8 as BSL +import Data.List (sort) +import qualified Data.Map.Lazy as ML +import qualified Data.Map.Strict as Map +import Data.Maybe +import qualified Data.Text.Short as ShortText (unpack) +import Data.Time.Clock (UTCTime, getCurrentTime) +import GHC.Conc (numCapabilities) +import System.Directory (removeFile) + +import Database.Sqlite.Easy hiding (Text) + + +runLiftLogObjectsDB :: RunLogs () -> ExceptT Text IO (RunLogs [LogObject]) +runLiftLogObjectsDB RunLogs{rlHostLogs, ..} = liftIO $ do + hostLogs' <- Map.fromList + <$> sequenceConcurrentlyChunksOf numCapabilities loadActions + pure $ RunLogs{ rlHostLogs = hostLogs', ..} + where + loadActions = map load (Map.toList rlHostLogs) + + load (host@(Host h), hl) = + withTimingInfo ("loadHostLogsDB/" ++ ShortText.unpack h) $ + (,) host <$> loadHostLogsDB hl + +-- If the logs have been split up into multiple files, e.g. by a log rotator, +-- this assumes sorting log files by *name* results in chronological order +-- of all *trace messages* contained in them. +prepareDB :: String -> [FilePath] -> FilePath -> ExceptT Text IO () +prepareDB machName (sort -> logFiles) outFile = liftIO $ do + + removeFile outFile `catch` \SomeException{} -> pure () + + withTimingInfo ("prepareDB/" ++ machName) $ withDb dbName $ do + mapM_ run createSchema + + tracefreqs <- foldM prepareFile (ML.empty :: TraceFreqs) logFiles + + transaction $ mapM_ runSqlRunnable (traceFreqsToSql tracefreqs) + + (tMin, tMax) <- liftIO $ tMinMax logFiles + now <- liftIO getCurrentTime + let + dbSummary = SummaryDB + { sdbName = fromString machName + , sdbLines = sum tracefreqs + , sdbFirstAt = tMin + , sdbLastAt = tMax + , sdbCreated = now + } + void $ runSqlRunnable $ summaryToSql dbSummary + where + dbName = fromString outFile + +prepareFile :: TraceFreqs -> FilePath -> SQLite TraceFreqs +prepareFile tracefreqs log = do + ls <- BSL.lines <$> liftIO (BSL.readFile log) + transaction $ foldM go tracefreqs ls + where + alterFunc :: Maybe Int -> Maybe Int + alterFunc = maybe (Just 1) (Just . succ) + + go acc line = case Aeson.eitherDecode line of + Right logObject@LogObject{loNS, loKind} -> do + forM_ (logObjectToSql logObject) + runSqlRunnable + + let name = fromTextRef loNS <> ":" <> fromTextRef loKind + pure $ ML.alter alterFunc name acc + + Left err -> runSqlRunnable (errorToSql err $ BSL.unpack line) >> pure acc + +tMinMax :: [FilePath] -> IO (UTCTime, UTCTime) +tMinMax [] = fail "tMinMax: empty list of log files" +tMinMax [log] = do + ls2 <- BSL.lines <$> BSL.readFile log + let + loMin, loMax :: LogObject + loMin = head $ mapMaybe Aeson.decode ls2 + loMax = fromJust (Aeson.decode $ last ls2) + pure (loAt loMin, loAt loMax) +tMinMax logs = do + (tMin, _ ) <- tMinMax [head logs] + (_ , tMax) <- tMinMax [last logs] + pure (tMin, tMax) + + +-- selects the entire LogObject stream, containing all objects relevant for standard analysis +selectAll :: SQL +selectAll = sqlOrdered + [ sqlGetEvent + , sqlGetTxns + , sqlGetResource + , sqlGetSlot + ] + +sqlGetEvent, sqlGetTxns, sqlGetResource, sqlGetSlot :: SQLSelect6Cols +sqlGetEvent = mkSQLSelectFrom "event" Nothing (Just "slot") (Just "block") Nothing (Just "hash") +sqlGetTxns = mkSQLSelectFrom "txns" Nothing (Just "count") (Just "rejected") Nothing (Just "tid") +sqlGetResource = mkSQLSelectFrom "resource" (Just "LOResources") Nothing Nothing Nothing (Just "as_blob") +sqlGetSlot = mkSQLSelectFrom "slot" (Just "LOTraceStartLeadershipCheck") (Just "slot") (Just "utxo_size") (Just "chain_dens") Nothing + +getSummary :: SQLite SummaryDB +getSummary = + fromSqlDataWithArgs . head + <$> run "SELECT * FROM summary" + +getTraceFreqs :: SQLite TraceFreqs +getTraceFreqs = + ML.fromList . map fromSqlDataPair + <$> run "SELECT * FROM tracefreq" + +loadHostLogsDB :: HostLogs a -> IO (HostLogs [LogObject]) +loadHostLogsDB hl = + case fst $ hlLogs hl of + log@(LogObjectSourceSQLite dbFile) -> + withDb (fromString dbFile) $ do + summary@SummaryDB{..} <- getSummary + traceFreqs <- getTraceFreqs + rows <- run selectAll + + pure $ hl + { hlRawTraceFreqs = traceFreqs + , hlRawFirstAt = Just sdbFirstAt + , hlRawLastAt = Just sdbLastAt + , hlRawLines = sdbLines + , hlLogs = (log, map (sqlToLogObject summary) rows) + } + other -> error $ "loadHostLogsDB: expected SQLite DB file, got " ++ show other diff --git a/bench/locli/src/Cardano/Unlog/BackendFile.hs b/bench/locli/src/Cardano/Unlog/BackendFile.hs new file mode 100644 index 00000000000..1bcc80afdde --- /dev/null +++ b/bench/locli/src/Cardano/Unlog/BackendFile.hs @@ -0,0 +1,88 @@ + +module Cardano.Unlog.BackendFile where + +import Cardano.Analysis.API.Ground +import Cardano.Prelude hiding (Text, show, toText) +import Cardano.Unlog.LogObject +import Cardano.Util + +import Prelude (id, show) + +import qualified Data.Aeson as AE (eitherDecode) +import qualified Data.ByteString.Lazy as LBS +import qualified Data.Map.Strict as Map +import qualified Data.Text as TS +import qualified Data.Text.Short as Text +import GHC.Conc (numCapabilities) + + +runLiftLogObjects :: RunLogs () -> Bool -> Maybe [LOAnyType] + -> ExceptT TS.Text IO (RunLogs [LogObject]) +runLiftLogObjects rl@RunLogs{..} okDErr loAnyLimit = liftIO $ + go Map.empty 0 simultaneousReads + where + go (force -> !acc) batchBase = \case + [] -> pure $ rl{ rlHostLogs = acc } + c:cs -> do + let batchBase' = batchBase + length c + when (length c > 1) $ + progress "logs" (Q $ printf "processing batch %d - %d" batchBase (batchBase' - 1)) + hlsMap <- readHostLogChunk c + go (acc `Map.union` hlsMap) batchBase' cs + + simultaneousReads = chunksOf numCapabilities (Map.toList rlHostLogs) + + readHostLogChunk :: [(Host, HostLogs ())] -> IO (Map Host (HostLogs [LogObject])) + readHostLogChunk hls = + Map.fromList <$> forConcurrently hls (uncurry readHostLogs) + + readHostLogs :: Host -> HostLogs () -> IO (Host, HostLogs [LogObject]) + readHostLogs h hl@HostLogs{..} = + case fst hlLogs of + LogObjectSourceJSON j -> + readLogObjectStream (unJsonLogfile j) okDErr loAnyLimit + <&> (h,) . setLogs hl . fmap (setLOhost h) + other -> error $ "readHostLogs: expected JSON log file, got " ++ show other + + setLogs :: HostLogs a -> b -> HostLogs b + setLogs hl x = hl { hlLogs = (fst $ hlLogs hl, x) } + setLOhost :: Host -> LogObject -> LogObject + setLOhost h lo = lo { loHost = h } + +readLogObjectStream :: FilePath -> Bool -> Maybe [LOAnyType] -> IO [LogObject] +readLogObjectStream f okDErr loAnyLimit = + LBS.readFile f + <&> + (if okDErr then id else + filter ((\case + LODecodeError input err -> error + (printf "Decode error while parsing %s:\n%s\non input:\n>>> %s" f (Text.toString err) (Text.toString input)) + _ -> True) + . loBody)) . + filter ((case loAnyLimit of + Nothing -> \case + LOAny{} -> False + _ -> True + Just constraint -> \case + LOAny laty obj -> + elem laty constraint + || error (printf "Unexpected LOAny while parsing %s -- %s: %s" + f (show laty) (show obj)) + _ -> True) + . loBody) . + filter (not . isDecodeError "Error in $: not enough input" . loBody) . + fmap (\bs -> + AE.eitherDecode bs & + either + (LogObject zeroUTCTime "Cardano.Analysis.DecodeError" "DecodeError" "" (TId "0") + . LODecodeError (Text.fromByteString (LBS.toStrict bs) + & fromMaybe "#") + . Text.fromText + . TS.pack) + id) + . filter (not . LBS.null) + . LBS.split (fromIntegral $ fromEnum '\n') + where + isDecodeError x = \case + LODecodeError _ x' -> x == x' + _ -> False diff --git a/bench/locli/src/Cardano/Unlog/LogObject.hs b/bench/locli/src/Cardano/Unlog/LogObject.hs index 8702e1e8f2f..c15bd136ca1 100644 --- a/bench/locli/src/Cardano/Unlog/LogObject.hs +++ b/bench/locli/src/Cardano/Unlog/LogObject.hs @@ -1,4 +1,5 @@ {-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DerivingVia #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} @@ -9,15 +10,13 @@ {-# OPTIONS_GHC -Wno-partial-fields -Wno-orphans #-} {- HLINT ignore "Redundant <$>" -} -{- HLINT ignore "Redundant if" -} -{- HLINT ignore "Use infix" -} module Cardano.Unlog.LogObject ( HostLogs (..) + , TraceFreqs , hlRawLogObjects , RunLogs (..) , rlLogs - , runLiftLogObjects , LogObject (..) , loPretty -- @@ -25,22 +24,28 @@ module Cardano.Unlog.LogObject , logObjectStreamInterpreterKeys , LOBody (..) , LOAnyType (..) - , readLogObjectStream + , fromTextRef , textRefEquals ) where +import Cardano.Analysis.API.Ground +import Cardano.Logging.Resources.Types import Cardano.Prelude hiding (Text, show, toText) -import GHC.Conc (numCapabilities) -import Prelude (id, show, unzip3) +import Cardano.Util + +import Prelude (show, unzip3) import qualified Data.Aeson as AE import qualified Data.Aeson.Key as Aeson import qualified Data.Aeson.KeyMap as KeyMap import Data.Aeson.Types (Parser) -import qualified Data.ByteString.Lazy as LBS +import Data.Data (Data) import Data.Hashable (hash) +import qualified Data.Map.Lazy as ML (Map) import qualified Data.Map.Strict as Map +import Data.Profile +import Data.String (IsString (..)) import qualified Data.Text as LText import Data.Text.Short (ShortText, fromText, toText) import qualified Data.Text.Short as Text @@ -48,15 +53,11 @@ import Data.Tuple.Extra (fst3, snd3, thd3) import Data.Vector (Vector) import qualified Data.Vector as V -import Cardano.Logging.Resources.Types - -import Data.Profile -import Cardano.Analysis.API.Ground -import Cardano.Util +type Text = ShortText +type TraceFreqs = ML.Map Text Int -type Text = ShortText -- | Us of the a TextRef replaces commonly expected string parses with references -- into a Map, reducing memory footprint - given that large runs can contain @@ -70,14 +71,20 @@ data TextRef toTextRef :: Text -> TextRef toTextRef t = let h = hash t in if Text.null (lookupTextRef h) then TextLit t else TextRef h -textRefEquals :: TextRef -> Text -> Bool -textRefEquals (TextRef i) = (== lookupTextRef i) -textRefEquals (TextLit t) = (== t) +fromTextRef :: TextRef -> Text +fromTextRef (TextRef i) = lookupTextRef i +fromTextRef (TextLit t) = t + +textRefEquals :: Text -> TextRef -> Bool +textRefEquals t = (t ==) . fromTextRef instance Show TextRef where show (TextRef i) = show $ lookupTextRef i show (TextLit t) = show t +instance IsString TextRef where + fromString = toTextRef . fromString + instance ToJSON TextRef where toJSON (TextRef i) = toJSON $ lookupTextRef i toJSON (TextLit t) = toJSON t @@ -87,8 +94,8 @@ data HostLogs a = HostLogs { hlRawLogfiles :: [FilePath] , hlRawLines :: Int - , hlRawTraceFreqs :: Map Text Int - , hlLogs :: (JsonLogfile, a) + , hlRawTraceFreqs :: TraceFreqs + , hlLogs :: (LogObjectSource, a) , hlProfile :: [ProfileEntry I] , hlRawFirstAt :: Maybe UTCTime , hlRawLastAt :: Maybe UTCTime @@ -108,76 +115,9 @@ data RunLogs a } deriving (Generic, FromJSON, ToJSON) -rlLogs :: RunLogs a -> [(JsonLogfile, a)] +rlLogs :: RunLogs a -> [(LogObjectSource, a)] rlLogs = fmap hlLogs . Map.elems . rlHostLogs -runLiftLogObjects :: RunLogs () -> Bool -> Maybe [LOAnyType] - -> ExceptT LText.Text IO (RunLogs [LogObject]) -runLiftLogObjects rl@RunLogs{..} okDErr loAnyLimit = liftIO $ - go Map.empty 0 simultaneousReads - where - go (force -> !acc) batchBase = \case - [] -> pure $ rl{ rlHostLogs = acc } - c:cs -> do - let batchBase' = batchBase + length c - when (length c > 1) $ - progress "logs" (Q $ printf "processing batch %d - %d" batchBase (batchBase' - 1)) - hlsMap <- readHostLogChunk c - go (acc `Map.union` hlsMap) batchBase' cs - - simultaneousReads = chunksOf numCapabilities (Map.toList rlHostLogs) - - readHostLogChunk :: [(Host, HostLogs ())] -> IO (Map Host (HostLogs [LogObject])) - readHostLogChunk hls = - Map.fromList <$> forConcurrently hls (uncurry readHostLogs) - - readHostLogs :: Host -> HostLogs () -> IO (Host, HostLogs [LogObject]) - readHostLogs h hl@HostLogs{..} = - readLogObjectStream (unJsonLogfile $ fst hlLogs) okDErr loAnyLimit - <&> (h,) . setLogs hl . fmap (setLOhost h) - - setLogs :: HostLogs a -> b -> HostLogs b - setLogs hl x = hl { hlLogs = (fst $ hlLogs hl, x) } - setLOhost :: Host -> LogObject -> LogObject - setLOhost h lo = lo { loHost = h } - -readLogObjectStream :: FilePath -> Bool -> Maybe [LOAnyType] -> IO [LogObject] -readLogObjectStream f okDErr loAnyLimit = - LBS.readFile f - <&> - (if okDErr then id else - filter ((\case - LODecodeError input err -> error - (printf "Decode error while parsing %s:\n%s\non input:\n>>> %s" f (Text.toString err) (Text.toString input)) - _ -> True) - . loBody)) . - filter ((case loAnyLimit of - Nothing -> \case - LOAny{} -> False - _ -> True - Just constraint -> \case - LOAny laty obj -> - elem laty constraint - || error (printf "Unexpected LOAny while parsing %s -- %s: %s" - f (show laty) (show obj)) - _ -> True) - . loBody) . - filter (not . isDecodeError "Error in $: not enough input" . loBody) . - fmap (\bs -> - AE.eitherDecode bs & - either - (LogObject zeroUTCTime (TextLit "Cardano.Analysis.DecodeError") (TextLit "DecodeError") "" (TId "0") - . LODecodeError (Text.fromByteString (LBS.toStrict bs) - & fromMaybe "#") - . Text.fromText - . LText.pack) - id) - . filter (not . LBS.null) - . LBS.split (fromIntegral $ fromEnum '\n') - where - isDecodeError x = \case - LODecodeError _ x' -> x == x' - _ -> False data LogObject = LogObject @@ -193,12 +133,9 @@ data LogObject instance ToJSON LogObject -instance Print ShortText where - hPutStr h = hPutStr h . toText - hPutStrLn h = hPutStrLn h . toText - deriving instance NFData a => NFData (Resources a) + loPretty :: LogObject -> LText.Text loPretty LogObject{..} = mconcat [ stripS . LText.pack $ show loAt, " " @@ -329,7 +266,6 @@ interpreters = map3ple Map.fromList . unzip3 . fmap ent $ <*> (v .:? "chainLengthDelta" -- Compat for node versions 1.27 and older: <&> fromMaybe 1) - -- TODO: we should clarify the distinction between the two cases (^ and v). , (,,,) "TraceAdoptedBlock" "Forge.AdoptedBlock" "Forge.Loop.AdoptedBlock" $ \v -> LOBlockAddedToCurrentChain <$> v .: "blockHash" @@ -339,6 +275,9 @@ interpreters = map3ple Map.fromList . unzip3 . fmap ent $ -- Ledger snapshots: , (,,,) "TraceSnapshotEvent.TookSnapshot" "TraceLedgerEvent.TookSnapshot" "ChainDB.LedgerEvent.TookSnapshot" $ \_ -> pure LOLedgerTookSnapshot + -- If needed, this could track slot and duration (SMaybe): + -- {"at":"2024-10-19T10:16:27.459112022Z","ns":"ChainDB.LedgerEvent.TookSnapshot","data":{"enclosedTime":{"tag":"RisingEdge"},"kind":"TookSnapshot","snapshot":{"kind":"snapshot"},"tip":"RealPoint (SlotNo 5319) adefbb19d6284aa68f902d33018face42d37e1a7970415d2a81bd4c2dea585ba"},"sev":"Info","thread":"81","host":"client-us-04"} + -- {"at":"2024-10-19T10:16:45.925381225Z","ns":"ChainDB.LedgerEvent.TookSnapshot","data":{"enclosedTime":{"contents":18.466253914,"tag":"FallingEdgeWith"},"kind":"TookSnapshot","snapshot":{"kind":"snapshot"},"tip":"RealPoint (SlotNo 5319) adefbb19d6284aa68f902d33018face42d37e1a7970415d2a81bd4c2dea585ba"},"sev":"Info","thread":"81","host":"client-us-04"} -- Tx receive path & mempool: , (,,,) "TraceBenchTxSubServAck" "TraceBenchTxSubServAck" "TraceBenchTxSubServAck" $ @@ -471,7 +410,7 @@ data LOBody { loRawText :: !ShortText , loError :: !ShortText } - deriving (Eq, Generic, Show) + deriving (Eq, Generic, Show, Data) deriving anyclass NFData data LOAnyType @@ -479,9 +418,10 @@ data LOAnyType | LANonBlocking | LARollback | LANoInterpreter - deriving (Eq, Generic, NFData, Read, Show, ToJSON) + deriving (Eq, Generic, NFData, Read, Show, ToJSON, Data) -deriving instance Eq ResourceStats +deriving instance Eq ResourceStats +deriving instance Data ResourceStats instance ToJSON LOBody diff --git a/bench/locli/src/Cardano/Unlog/LogObjectDB.hs b/bench/locli/src/Cardano/Unlog/LogObjectDB.hs new file mode 100644 index 00000000000..f335aa76b02 --- /dev/null +++ b/bench/locli/src/Cardano/Unlog/LogObjectDB.hs @@ -0,0 +1,508 @@ +{-# LANGUAGE UndecidableInstances #-} + +{-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-} + +-- | This module contains the schema in which `LogObjects`s are +-- stored to and retrieved from an SQLite database. +-- +-- `logObjectToSql` provides the mapping for `LOBody` with its constructors of varying arity to different DB tables and columns. +-- `toLOBodyConverters` implements the reverse, mapping the columns of a result row to a `LOBody` value. +-- +-- The `AsSqlData` type class provides lower level data marshalling. +-- +module Cardano.Unlog.LogObjectDB + ( AsSQLData (..) + , SummaryDB (..) + , SQLRunnable + , SQLSelect6Cols + , mkSQLSelectFrom + , sqlAppend + , sqlOrdered + , TraceFreqs + + , sqlToLogObject + , logObjectToSql + , errorToSql + , summaryToSql + , traceFreqsToSql + + , createSchema + , runSqlRunnable + + , fromSqlDataPair + , toSqlDataPair + , allLOBodyConstructors + , knownLOBodyConstructors + ) where + +import Cardano.Analysis.API.Ground +import Cardano.Logging.Resources.Types (ResourceStats, Resources (..)) +import Cardano.Unlog.LogObject +import Cardano.Util hiding (count) + +import Prelude + +import Data.Aeson as Aeson (decodeStrict, encode) +import Data.Bool (bool) +import qualified Data.ByteString.Lazy.Char8 as BSL (toStrict) +import Data.Data (dataTypeConstrs, dataTypeOf, showConstr, toConstr) +import qualified Data.Map.Lazy as ML +import Data.Maybe +import qualified Data.Set as Set +import Data.Text (Text) +import qualified Data.Text as TS (empty, intercalate, pack, splitOn, unpack) +import qualified Data.Text.Lazy as TL (Text, fromStrict, pack) +import qualified Data.Text.Short as ShortText (ShortText, empty, fromText, pack, toText) + +import Database.Sqlite.Easy hiding (Text) +import Database.Sqlite.Easy.Internal (SQL (..)) + + +data SummaryDB = SummaryDB + { sdbName :: Host + , sdbLines :: Int + , sdbFirstAt :: UTCTime + , sdbLastAt :: UTCTime + , sdbCreated :: UTCTime + } + +-- an SQL statement with its arguments +type SQLRunnable = (SQL, [SQLData]) + +-- | A select statement to retrieve log objects from the DB where the result set has exactly 6 columns, +-- with all 4 argument columns being nullable: +-- at (timestamp) | LogObject constructor (text) | int arg 1 | int arg 2 | float arg | text or blob arg +newtype SQLSelect6Cols = SQLSelect6Cols { unSQLSelect :: Text } + deriving (Eq, Ord, Show) + +-- | Smart constructor to ensure 6 columns in the result set. The 'at' column is assumed to exist in @table@. +mkSQLSelectFrom :: + Text + -> Maybe Text + -> Maybe Text + -> Maybe Text + -> Maybe Text + -> Maybe Text + -> SQLSelect6Cols +mkSQLSelectFrom table cons arg1 arg2 arg3 arg4 = + SQLSelect6Cols $ + "SELECT " <> argList <> " FROM " <> table <> " " + where + argList = TS.intercalate "," $ + ["at", cons'] ++ map arg [arg1, arg2, arg3, arg4] + cons' = maybe "cons" (\c -> "'" <> c <> "'") cons + arg = fromMaybe "null" + +-- | Append (possibly WHERE clause filters) to an existing select statement +sqlAppend :: SQLSelect6Cols -> Text -> SQLSelect6Cols +sqlAppend (SQLSelect6Cols t) t' = SQLSelect6Cols $ t <> t' + +-- | Union of SELECTs, with the result rows ordered by timestamp +sqlOrdered :: [SQLSelect6Cols] -> SQL +sqlOrdered selects = SQL $ + TS.intercalate " UNION " (map unSQLSelect selects) + `mappend` " ORDER BY at ASC" + +runSqlRunnable :: SQLRunnable -> SQLite [[SQLData]] +runSqlRunnable = uncurry runWith + +createSchema :: [SQL] +createSchema = + [ createError + , createSummary + , createTraceFreq + , createResource + , createSlot + , createEvent + , createTxns + ] + +-- table error + +createError, insertError :: SQL +createError = "CREATE TABLE error (msg TEXT NOT NULL, input TEXT)" +insertError = "INSERT INTO error VALUES (?,?)" + +errorToSql :: String -> String -> SQLRunnable +errorToSql errorMsg origInput = + (insertError, toArgs $ Tuple ("", errorMsg) ("", origInput)) + +-- table summary + +createSummary, insertSummary :: SQL +createSummary = "CREATE TABLE summary (name TEXT NOT NULL, lines INTEGER NOT NULL, first_at REAL NOT NULL, last_at REAL NOT NULL, created REAL NOT NULL)" +insertSummary = "INSERT INTO summary VALUES (?,?,?,?,?)" + +summaryToSql :: SummaryDB -> SQLRunnable +summaryToSql SummaryDB{sdbName = Host name, ..} = + ( insertSummary + , [ toSqlData name, toSqlData sdbLines, toSqlData sdbFirstAt, toSqlData sdbLastAt, toSqlData sdbCreated ] + ) + +-- table tracefreq + +createTraceFreq, insertTraceFreq :: SQL +createTraceFreq = "CREATE TABLE tracefreq (msg TEXT NOT NULL, count INTEGER NOT NULL)" +insertTraceFreq = "INSERT INTO tracefreq VALUES (?,?)" + +traceFreqsToSql :: TraceFreqs -> [SQLRunnable] +traceFreqsToSql ts = + [ (insertTraceFreq, toSqlDataPair kv) | kv <- ML.toAscList ts ] + +-- table resource + +createResource, insertResource :: SQL +-- While not strictly necessary for storage (which happens in the BLOB), we expose some key metrics as individual DB columns +-- to use in custom user queries. +-- When exposing additional fields, make sure the BLOB always remains the last column. +createResource = "CREATE TABLE resource (at REAL NOT NULL, centi_cpu INTEGER, rss INTEGER, heap INTEGER, alloc INTEGER, as_blob BLOB)" +insertResource = "INSERT INTO resource VALUES (?,?,?,?,?,?)" + +resourceArgs :: UTCTime -> ResourceStats -> [SQLData] +resourceArgs at rs@Resources{rCentiCpu, rRSS, rHeap, rAlloc} = + [ toSqlData at + , toSqlData rCentiCpu + , toSqlData rRSS + , toSqlData rHeap + , toSqlData rAlloc + , toSqlData rs + ] + +-- table slot + +createSlot, insertSlot :: SQL +createSlot = "CREATE TABLE slot (at REAL NOT NULL, slot INTEGER, utxo_size INTEGER, chain_dens REAL)" +insertSlot = "INSERT INTO slot VALUES (?,?,?,?)" + +slotArgs :: UTCTime -> ArgNTuple -> [SQLData] +slotArgs at args@Triple{} = toSqlData at : toArgs args +slotArgs _ _ = error "slotArgs: three arguments expected" + +-- tables event and txns + +createEvent, createTxns :: SQL +createEvent = "CREATE TABLE event (at REAL NOT NULL, cons TEXT NOT NULL, slot INTEGER, block INTEGER, hash TEXT)" +createTxns = "CREATE TABLE txns (at REAL NOT NULL, cons TEXT NOT NULL, count INTEGER, rejected INTEGER, tid TEXT)" + + +logObjectToSql :: LogObject -> Maybe SQLRunnable +logObjectToSql lo@LogObject{loAt, loBody, loTid} = + case loBody of + + -- no suitable interpreter found when parsing log object stream + LOAny{} -> Nothing + -- trace not emitted by the node + LOGeneratorSummary{} -> Nothing + -- not required for analysis + LOTxsAcked{} -> Nothing + + LOResources stats -> Just (insertResource, resourceArgs loAt stats) + + LOTraceStartLeadershipCheck slot utxoSize chainDensity + -> Just (insertSlot, slotArgs loAt (Triple ("", slot) ("", utxoSize) ("", chainDensity))) + -- forging + LOBlockContext slot block -> newLOEvent $ Tuple ("slot", slot) ("block", block) + LOLedgerState s -> newLOEvent $ Singleton ("slot", s) + LOLedgerView s -> newLOEvent $ Singleton ("slot", s) + LOTraceLeadershipDecided s b -> newLOEvent $ Tuple ("slot", s) ("block", b) + LOTickedLedgerState s -> newLOEvent $ Singleton ("slot", s) + LOMempoolSnapshot s -> newLOEvent $ Singleton ("slot", s) + LOBlockForged s b h1 h2 -> newLOEvent $ Triple ("slot", s) ("block", b) ("hash", (h1, h2)) + + -- diffusion + LOChainSyncClientSeenHeader s b h + -> newLOEvent $ Triple ("slot", s) ("block", b) ("hash", h) + LOBlockFetchClientRequested h len + -> newLOEvent $ Tuple ("block", len) ("hash", h) + LOBlockFetchClientCompletedFetch h + -> newLOEvent $ Singleton ("hash", h) + LOChainSyncServerSendHeader h + -> newLOEvent $ Singleton ("hash", h) + LOBlockFetchServerSending h + -> newLOEvent $ Singleton ("hash", h) + LOBlockAddedToCurrentChain h mSz len + -> newLOEvent $ Triple ("slot", mSz) ("block", len) ("hash", h) + + LOLedgerTookSnapshot -> newLOEvent Empty + + -- txn receive path + LOTxsCollected c -> newLOTxns $ Tuple ("count", c) ("tid", loTid) + LOTxsProcessed c r -> newLOTxns $ Triple ("count", c) ("rejected", r) ("tid", loTid) + LOMempoolTxs c -> newLOTxns $ Singleton ("count", c) + LOMempoolRejectedTx -> newLOTxns Empty + + -- that goes to the error table + LODecodeError rawText err -> Just (insertError, toArgs $ Tuple ("", err) ("", rawText)) + + where + newLOEvent = Just . insertVariadic "event" lo + newLOTxns = Just . insertVariadic "txns" lo + + +insertVariadic :: SQL -> LogObject -> ArgNTuple -> SQLRunnable +insertVariadic table LogObject{loAt, loBody} argNTuple = (sql, args) + where + args = toSqlData loAt : toSqlData loBody : toArgs argNTuple + (columns, templ) = toFieldList argNTuple + sql = "INSERT INTO " <> table <>"(at,cons" <> columns <> ") VALUES (?,?" <> templ <> ")" + + +-- some minimal guarantees for the variadic INSERTs on tables event and txns + +type Column = Text + +-- values to store, paired with their column name +data ArgNTuple where + Empty :: ArgNTuple + Singleton :: forall x. (AsSQLData x) => (Column, x) -> ArgNTuple + Tuple :: forall x y. (AsSQLData x, AsSQLData y) => (Column, x) -> (Column, y) -> ArgNTuple + Triple :: forall x y z. (AsSQLData x, AsSQLData y, AsSQLData z) => (Column, x) -> (Column, y) -> (Column, z) -> ArgNTuple + +toArgs :: ArgNTuple -> [SQLData] +toArgs = \case + Empty -> [] + Singleton (_, x) -> [toSqlData x] + Tuple (_, x) (_, y) -> [toSqlData x, toSqlData y] + Triple (_, x) (_, y) (_, z) -> [toSqlData x, toSqlData y, toSqlData z] + +-- for simplicity's sake, this yields both the column names +-- and the correct number of additional placeholders to extend the template +toFieldList :: ArgNTuple -> (SQL, SQL) +toFieldList = \case + Empty -> ("" , "") + Singleton (x, _) -> (go [x] , ",?") + Tuple (x, _) (y, _) -> (go [x, y] , ",?,?") + Triple (x, _) (y, _) (z, _) -> (go [x, y, z] , ",?,?,?") + where + go = SQL . TS.intercalate "," . (TS.empty :) + + +sqlToLogObject :: SummaryDB -> [SQLData] -> LogObject +sqlToLogObject _ [] = error "toLogObject: no columns in result row" +sqlToLogObject SummaryDB{sdbName} (at : rest) = + let body = fromSqlDataWithArgs rest + in LogObject + { loAt = fromSqlData at + , loNS = "" + , loKind = "" + , loHost = sdbName + , loTid = logObjectNeedsTIdforAnalysis rest body + , loBody = body + } + +-- There's only a couple of log objects that need the TId field for analysis. +-- Hence, it's only stored for those. +-- NB. The assumption here is it is the last column in the schema for table 'txns' +logObjectNeedsTIdforAnalysis :: [SQLData] -> LOBody -> TId +logObjectNeedsTIdforAnalysis args = \case + LOTxsCollected{} -> theTId + LOTxsProcessed{} -> theTId + _ -> TId ShortText.empty + where + theTId = fromSqlData $ last args + +toLOBodyConverters :: [SQLData] -> ML.Map TL.Text LOBody +toLOBodyConverters args = ML.fromList + [ ( "LOResources", LOResources (fromSqlData $ last args)) + + , ( "LOTraceStartLeadershipCheck" + , LOTraceStartLeadershipCheck (fromSqlData slot) (fromSqlData utxoSize) (fromSqlData chainDens) + ) + + -- forging + , ( "LOBlockContext", LOBlockContext (fromSqlData slot) (fromSqlData block)) + , ( "LOLedgerState", LOLedgerState (fromSqlData slot)) + , ( "LOLedgerView", LOLedgerView (fromSqlData slot)) + , ( "LOTraceLeadershipDecided" + , LOTraceLeadershipDecided (fromSqlData slot) (fromSqlData block) + ) + , ( "LOTickedLedgerState", LOTickedLedgerState (fromSqlData slot)) + , ( "LOMempoolSnapshot", LOMempoolSnapshot (fromSqlData slot)) + , ( "LOBlockForged", uncurry (LOBlockForged (fromSqlData slot) (fromSqlData block)) (fromSqlData hash)) + + -- diffusion + , ( "LOChainSyncClientSeenHeader" + , LOChainSyncClientSeenHeader (fromSqlData slot) (fromSqlData block) (fromSqlData hash) + ) + , ( "LOBlockFetchClientRequested" + , LOBlockFetchClientRequested (fromSqlData hash) (fromSqlData block) + ) + , ( "LOBlockFetchClientCompletedFetch" + , LOBlockFetchClientCompletedFetch (fromSqlData hash) + ) + , ( "LOChainSyncServerSendHeader" + , LOChainSyncServerSendHeader (fromSqlData hash) + ) + , ( "LOBlockFetchServerSending" + , LOBlockFetchServerSending (fromSqlData hash) + ) + , ( "LOBlockAddedToCurrentChain" + , LOBlockAddedToCurrentChain (fromSqlData hash) (fromSqlData slot) (fromSqlData block) + ) + + , ( "LOLedgerTookSnapshot", LOLedgerTookSnapshot) + + -- txn receive path + , ( "LOTxsCollected", LOTxsCollected (fromSqlData count)) + , ( "LOTxsProcessed", LOTxsProcessed (fromSqlData count) (fromSqlData rejected)) + , ( "LOMempoolTxs", LOMempoolTxs (fromSqlData count)) + , ( "LOMempoolRejectedTx", LOMempoolRejectedTx) + + -- constructor not expected to appear given the definition of `selectAll` + , ( "LODecodeError", errorGiven "LODecodeError") + + -- all constructors not expected to appear given the definition of `logLineToSQL` + , ( "LOAny", errorGiven "LOAny") + , ( "LOGeneratorSummary", errorGiven "LOGeneratorSummary") + , ( "LOTxsAcked", errorGiven "LOTxsAcked") + ] + where + errorGiven cons = LODecodeError (ShortText.pack $ show args) ("toLOBodyConverters: unexpected " <> cons <> " (with args)") + + -- match remaining columns (after 'at' and 'cons') on a result row from `selectAll`, + -- offering custom matches for each table. + + -- table: event + slot : block : _ : hash : _ = args + + -- table: slot + _ : utxoSize : chainDens : _ = args + + -- table: txns + count : rejected : _ = args + +toLOBody :: [SQLData] -> LOBody +toLOBody (SQLText cons : args) = fromMaybe unresolved resolve + where + resolve = TL.fromStrict cons `ML.lookup` toLOBodyConverters args + unresolved = LODecodeError (ShortText.fromText cons) "toLOBody: no converter for that constructor; LOBody type definition may have changed in `locli` code" +toLOBody r = error $ "toLOBody: could not pattern match on result row " ++ show r + + +allLOBodyConstructors, knownLOBodyConstructors :: Set.Set TL.Text +knownLOBodyConstructors = ML.keysSet $ toLOBodyConverters [] +allLOBodyConstructors = Set.fromList $ map (TL.pack . showConstr) (dataTypeConstrs $ dataTypeOf (undefined :: LOBody)) + + +-- +-- data marshalling +-- + +class AsSQLData x where + toSqlData :: x -> SQLData + + fromSqlData :: SQLData -> x + + fromSqlDataWithArgs :: [SQLData] -> x + fromSqlDataWithArgs = \case + [x] -> fromSqlData x + _ -> error "fromSqlDataWithArgs(default): arg count must be exactly one" + + +instance {-# OVERLAPPABLE #-} Integral a => AsSQLData a where + toSqlData = SQLInteger . fromIntegral + fromSqlData = withSqlInteger fromIntegral + +instance AsSQLData Bool where + toSqlData = bool (SQLInteger 0) (SQLInteger 1) + fromSqlData = withSqlInteger (== 1) + +instance AsSQLData Double where + toSqlData = SQLFloat + fromSqlData = withSqlFloat id + +instance AsSQLData String where + toSqlData = SQLText . TS.pack + fromSqlData = withSqlText TS.unpack + +instance AsSQLData UTCTime where + toSqlData = SQLFloat . realToFrac . utcTimeToPOSIXSeconds + fromSqlData = withSqlFloat (posixSecondsToUTCTime . realToFrac) + +instance AsSQLData LOBody where + toSqlData = SQLText . TS.pack . showConstr . toConstr + fromSqlData = const $ error "fromSqlData(LOBody): argument list needed" + fromSqlDataWithArgs = toLOBody + +instance AsSQLData SlotNo where + toSqlData = toSqlData . unSlotNo + fromSqlData = SlotNo . fromSqlData + +instance AsSQLData BlockNo where + toSqlData = toSqlData . unBlockNo + fromSqlData = BlockNo . fromSqlData + +instance AsSQLData ShortText.ShortText where + toSqlData = SQLText . ShortText.toText + fromSqlData = withSqlText ShortText.fromText + +instance AsSQLData Hash where + toSqlData = toSqlData . unHash + fromSqlData = Hash . fromSqlData + +instance AsSQLData TId where + toSqlData = toSqlData . unTId + fromSqlData = TId . fromSqlData + +-- a shortcut, so we only need one TEXT argument column in table `event` +instance AsSQLData (Hash, Hash) where + toSqlData (unHash -> h1, unHash -> h2) = + SQLText . ShortText.toText $ h1 <> "|" <> h2 + fromSqlData = withSqlText $ \t -> + case TS.splitOn "|" t of + [h1, h2] -> (Hash $ ShortText.fromText h1, Hash $ ShortText.fromText h2) + _ -> error "fromSqlData(Hash,Hash): unexpected pipe-separation" + +instance AsSQLData ResourceStats where + toSqlData = SQLBlob . BSL.toStrict . Aeson.encode + fromSqlData = withSqlBlob (fromJust . Aeson.decodeStrict) + +-- this must conform to the columns in table `summary` / serialization in `summaryToSql` +instance AsSQLData SummaryDB where + toSqlData = const $ error "toSqlData(SummaryDB): can't be represented as a single SQLData; use `summaryToSql`" + fromSqlData = const $ error "fromSqlData(SummaryDB): argument list needed" + fromSqlDataWithArgs [c1, c2, c3, c4, c5] = + SummaryDB + { sdbName = Host (fromSqlData c1) + , sdbLines = fromSqlData c2 + , sdbFirstAt = fromSqlData c3 + , sdbLastAt = fromSqlData c4 + , sdbCreated = fromSqlData c5 + } + fromSqlDataWithArgs x = error $ "fromSqlDataWithArgs(SummaryDB): expected 5 columns, got:" ++ show x + +instance AsSQLData a => AsSQLData (SMaybe a) where + toSqlData = smaybe SQLNull toSqlData + fromSqlData = \case + SQLNull -> SNothing + a -> SJust (fromSqlData a) + + +withSqlText :: (Text -> a) -> SQLData -> a +withSqlText f = \case + SQLText t -> f t + a -> error $ "withSqlText: no match on " ++ show a + +withSqlInteger :: (Int64 -> a) -> SQLData -> a +withSqlInteger f = \case + SQLInteger i -> f i + a -> error $ "withSqlInteger: no match on " ++ show a + +withSqlFloat :: (Double -> a) -> SQLData -> a +withSqlFloat f = \case + SQLFloat d -> f d + a -> error $ "withSqlFloat: no match on " ++ show a + +withSqlBlob :: (ByteString -> a) -> SQLData -> a +withSqlBlob f = \case + SQLBlob b -> f b + a -> error $ "withSqlBlob: no match on " ++ show a + +toSqlDataPair :: (AsSQLData a, AsSQLData b) => (a, b) -> [SQLData] +toSqlDataPair (a, b) = [toSqlData a, toSqlData b] + +-- is lenient on remainder of row +fromSqlDataPair :: (AsSQLData a, AsSQLData b) => [SQLData] -> (a, b) +fromSqlDataPair = \case + a : b : _ -> (fromSqlData a, fromSqlData b) + _ -> error "fromSqlDataPair: row has less than 2 columns" diff --git a/bench/locli/src/Cardano/Util.hs b/bench/locli/src/Cardano/Util.hs index 7ccbe0b6643..cc7301107fb 100644 --- a/bench/locli/src/Cardano/Util.hs +++ b/bench/locli/src/Cardano/Util.hs @@ -1,9 +1,9 @@ {-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} {-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-incomplete-patterns #-} -{- HLINT ignore "Use list literal pattern" -} module Cardano.Util ( module Prelude , module Data.Aeson @@ -24,36 +24,40 @@ module Cardano.Util ) where -import Prelude (String, error, head, last) -import Text.Show qualified as Show (Show(..)) -import Cardano.Prelude - -import Data.Aeson (FromJSON (..), ToJSON (..), Object, Value (..), (.:), (.:?), (.!=), withObject, object) -import Data.Aeson qualified as AE -import Control.Arrow ((&&&), (***)) -import Control.Applicative ((<|>)) -import Control.Concurrent.Async (forConcurrently, forConcurrently_, mapConcurrently, mapConcurrently_) -import Control.DeepSeq qualified as DS -import Control.Monad.Trans.Except.Extra (firstExceptT, newExceptT) -import Data.ByteString.Lazy.Char8 qualified as LBS -import Data.IntervalMap.FingerTree (Interval (..), low, high, point) -import Data.List (span) -import Data.List.Split (chunksOf) -import Data.Text qualified as T -import Data.SOP (I (..), unI) -import Data.SOP.Strict -import Data.Time.Clock (NominalDiffTime, UTCTime (..), diffUTCTime, addUTCTime) -import Data.Time.Clock.POSIX -import Data.Vector (Vector) -import Data.Vector qualified as Vec -import GHC.Base (build) -import Text.Printf (printf) - -import System.FilePath qualified as F - -import Ouroboros.Consensus.Util.Time - -import Cardano.Ledger.BaseTypes (StrictMaybe (..), fromSMaybe) +import Cardano.Ledger.BaseTypes (StrictMaybe (..), fromSMaybe) +import Cardano.Prelude +import Ouroboros.Consensus.Util.Time + +import Prelude (String, error, head, last) + +import Control.Applicative ((<|>)) +import Control.Arrow ((&&&), (***)) +import Control.Concurrent.Async (forConcurrently, forConcurrently_, mapConcurrently, + mapConcurrently_) +import Control.Concurrent.Extra (Lock, newLock, withLock) +import qualified Control.DeepSeq as DS +import Control.Monad.Trans.Except.Extra (firstExceptT, newExceptT) +import Data.Aeson (FromJSON (..), Object, ToJSON (..), Value (..), object, withObject, + (.!=), (.:), (.:?)) +import qualified Data.Aeson as AE +import qualified Data.ByteString.Lazy.Char8 as LBS +import Data.Data (Data) +import Data.IntervalMap.FingerTree (Interval (..), high, low, point) +import Data.List (span) +import Data.List.Split (chunksOf) +import Data.SOP (I (..), unI) +import Data.SOP.Strict +import qualified Data.Text as T +import Data.Time.Clock (NominalDiffTime, UTCTime (..), addUTCTime, diffUTCTime) +import Data.Time.Clock.POSIX +import Data.Vector (Vector) +import qualified Data.Vector as Vec +import GHC.Base (build) +import qualified GHC.Stats as RTS +import qualified System.FilePath as F +import System.IO.Unsafe (unsafePerformIO) +import Text.Printf (printf) +import qualified Text.Show as Show (Show (..)) deriving newtype instance FromJSON a => (FromJSON (I a)) @@ -84,6 +88,8 @@ intvDurationSec = uncurry diffUTCTime . (high &&& low) -- type SMaybe a = StrictMaybe a +deriving instance Data a => Data (SMaybe a) + smaybe :: b -> (a -> b) -> StrictMaybe a -> b smaybe x _ SNothing = x smaybe _ f (SJust x) = f x @@ -146,7 +152,7 @@ mapLast :: (a -> a) -> [a] -> [a] mapLast _ [] = error "mapHead: partial" mapLast f xs' = reverse $ go [] xs' where go acc = \case - x:[] -> f x:acc + [x] -> f x:acc x:xs -> go ( x:acc) xs redistribute :: (a, (b, c)) -> ((a, b), (a, c)) @@ -160,23 +166,55 @@ toDouble :: forall a. Real a => a -> Double toDouble = fromRational . toRational data F - = R String - | Q String - | L [String] + = R String + | RNoCR String + | Q String + | L [String] | forall a. ToJSON a => J a +-- makes console output with `progress` thread-safe +progressLock :: Lock +progressLock = unsafePerformIO newLock +{-# NOINLINE progressLock #-} + progress :: MonadIO m => String -> F -> m () -progress key = putStr . T.pack . \case - R x -> printf "{ \"%s\": %s }\n" key x - Q x -> printf "{ \"%s\": \"%s\" }\n" key x - L xs -> printf "{ \"%s\": \"%s\" }\n" key (Cardano.Prelude.intercalate "\", \"" xs) - J x -> printf "{ \"%s\": %s }\n" key (LBS.unpack $ AE.encode x) +progress key format = liftIO $ + withLock progressLock $ + putStr $ T.pack $ case format of + R x -> printf "{ \"%s\": %s }\n" key x + RNoCR x -> printf "{ \"%s\": %s } " key x + Q x -> printf "{ \"%s\": \"%s\" }\n" key x + L xs -> printf "{ \"%s\": \"%s\" }\n" key (Cardano.Prelude.intercalate "\", \"" xs) + J x -> printf "{ \"%s\": %s }\n" key (LBS.unpack $ AE.encode x) + +withTimingInfo :: MonadIO m => String -> m a -> m a +withTimingInfo name action = do + before <- liftIO getPOSIXTime + result <- action + after <- liftIO getPOSIXTime + heap <- liftIO $ RTS.gcdetails_mem_in_use_bytes . RTS.gc <$> RTS.getRTSStats + let + seconds :: Int + seconds = floor $ after - before + mibibytes = heap `div` 1024 `div` 1024 + progress "timing" (R $ "time: " ++ show seconds ++ "s; heap: " ++ show mibibytes ++ "MiB; <" ++ name ++ ">") + pure result -- Dumping to files -- replaceExtension :: FilePath -> String -> FilePath replaceExtension f new = F.dropExtension f <> "." <> new +-- Run asyncs concurrently, but at most `n` at the same time. +-- Two words of warning though - if careless, you can create deadlocks that way: +-- 1. don't use looping actions (like `forever`) - they might block another async from getting kicked off +-- 2. avoid using blocking synchronization between actions - you may be blocking the kick-off of an async you're actually waiting on +-- 3. it's not guaranteed that _at least_ `n` asyncs run concurrently - a long running one may delay the kick-off of others +sequenceConcurrentlyChunksOf :: Int -> [IO a] -> IO [a] +sequenceConcurrentlyChunksOf n actions = do + locks <- cycle <$> replicateM n newLock + let withLockActions = zipWith ($) (map withLock locks) actions + runConcurrently $ traverse Concurrently withLockActions spans :: forall a. (a -> Bool) -> [a] -> [Vector a] spans f = go [] diff --git a/bench/locli/test/Test/Analysis/CDF.hs b/bench/locli/test/Test/Analysis/CDF.hs index 3f330f9f439..c859d65fbcc 100644 --- a/bench/locli/test/Test/Analysis/CDF.hs +++ b/bench/locli/test/Test/Analysis/CDF.hs @@ -1,11 +1,6 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE TemplateHaskell #-} {-# OPTIONS_GHC -Wno-missing-signatures #-} -#if __GLASGOW_HASKELL__ >= 908 -{-# OPTIONS_GHC -Wno-x-partial #-} -#endif - module Test.Analysis.CDF where import Cardano.Prelude hiding (handle, head) diff --git a/bench/locli/test/Test/Unlog/LogObjectDB.hs b/bench/locli/test/Test/Unlog/LogObjectDB.hs new file mode 100644 index 00000000000..36bd6cdc200 --- /dev/null +++ b/bench/locli/test/Test/Unlog/LogObjectDB.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# OPTIONS_GHC -Wno-missing-signatures #-} +module Test.Unlog.LogObjectDB where + +import Cardano.Prelude +import Cardano.Unlog.LogObjectDB + +import qualified Data.Set as Set (difference, empty) + +import Hedgehog + + +-- This property ensures there are converter implementations +-- for all LOBody constructors. These converters are used +-- to reliably reconstruct a LOBody value from a database result row. + +prop_LOBody_converter_for_each_constructor = property $ + allLOBodyConstructors `Set.difference` knownLOBodyConstructors + === + Set.empty + +tests :: IO Bool +tests = + checkSequential $$discover diff --git a/bench/locli/test/test-locli.hs b/bench/locli/test/test-locli.hs index b352106773b..77edcbf25bb 100644 --- a/bench/locli/test/test-locli.hs +++ b/bench/locli/test/test-locli.hs @@ -4,10 +4,12 @@ import Hedgehog.Main (defaultMain) import qualified Test.Analysis.CDF import qualified Test.Unlog.Org +import qualified Test.Unlog.LogObjectDB main :: IO () main = defaultMain [ Test.Analysis.CDF.tests , Test.Unlog.Org.tests + , Test.Unlog.LogObjectDB.tests ] diff --git a/nix/workbench/analyse/analyse.sh b/nix/workbench/analyse/analyse.sh index 56e2350503f..e818342d415 100644 --- a/nix/workbench/analyse/analyse.sh +++ b/nix/workbench/analyse/analyse.sh @@ -99,6 +99,15 @@ then backend=$WB_BACKEND else backend= fi +if test -v "WB_LOCLI_DB" +then storage=$WB_LOCLI_DB +else storage=0 +fi +if [[ $storage -eq 1 ]] +then info analyse "$(red locli storage backend: database)" +else info analyse "$(red locli storage backend: file)" +fi + progress "analyse" "args: $(yellow $*)" while test $# -gt 0 do case "$1" in @@ -411,7 +420,10 @@ EOF local v0 v1 v2 v3 v4 v5 v6 v7 v8 v9 va vb vc vd ve vf vg vh vi vj vk vl vm vn vo v0=( $* ) - v1=("${v0[@]/#logs/ 'unlog' --run-logs \"$adir\"/log-manifest.json ${analysis_allowed_loanys[*]/#/--ok-loany } }") + if [[ $storage -eq 1 ]] + then v1=("${v0[@]/#logs/ 'unlog-db' --run-logs \"$adir\"/log-manifest-db.json }") + else v1=("${v0[@]/#logs/ 'unlog' --run-logs \"$adir\"/log-manifest.json ${analysis_allowed_loanys[*]/#/--ok-loany } }") + fi v2=("${v1[@]/#read-context/ 'read-meta-genesis' --run-metafile \"$dir\"/meta.json --shelley-genesis \"$dir\"/genesis-shelley.json }") v3=("${v2[@]/#write-context/ 'write-meta-genesis' --run-metafile \"$dir\"/meta.json --shelley-genesis \"$dir\"/genesis-shelley.json }") v4=("${v3[@]/#read-chain/ 'read-chain' --chain \"$adir\"/chain.json}") @@ -452,6 +464,7 @@ EOF -e 'chain.json' \ -e 'hash-timeline.json' \ -e 'log-manifest.json' \ + -e 'log-manifest-db.json' \ -e 'mach-views.json' \ -e 'prof.json' \ -e 'tracefreq.json' @@ -540,6 +553,12 @@ EOF ;; prepare | prep ) + if [[ $storage -eq 1 ]] + then analyse prepare-db "$@" + else analyse prepare-file "$@" + fi;; + + prepare-file | prep-file ) local usage="USAGE: wb analyse $op [[IDENT:]RUN-NAME=current].." local runspec=${1:-current}; if test $# != 0; then shift; fi @@ -640,6 +659,90 @@ EOF grep -h 'TraceForgedBlock\|DownloadedHeader' $adir/logs-*.flt.json | sort > $ht_json fi;; + prepare-db | prep-db ) + local usage="USAGE: wb analyse $op [--force] [[IDENT:]RUN-NAME=current].." + + local remanifest_reasons=() + while test $# -gt 0; do + case "$1" in + --force ) remanifest_reasons+=("$(blue the --force was used)");; + * ) break;; + esac + shift + done + + local runspec=${1:-current}; if test $# != 0; then shift; fi + + ## Parse 'runspec' into either IDENT:RUN or RUN + local nrun=$(runspec_normalise $runspec) + local run=$(runspec_run $nrun) + local dir=$(run get "$run") + test -n "$dir" || fail "malformed run: $run" + + progress "analyse(db)" "preparing run for analysis: $(white $run)" + + run trim "$run" + local adir=$dir/analysis + mkdir -p "$adir"/{cdf,png} + + ## unless already done, filter logs to contain trace objects only + local logdirs=($(ls -d "$dir"/node-*/ 2>/dev/null)) + local run_logs=$adir/log-manifest-db.json + + test ${#logdirs[*]} -gt 0 || + fail "Missing node-* subdirs in: $dir" + + if test ! -f "$run_logs" + then remanifest_reasons+=("$(green missing $run_logs)") + fi + + if test ${#remanifest_reasons[*]} = 0 + then progress "analyse(db)" "log manifest up to date for raw logs" + else progress "analyse(db)" "assembling log manifest: ${remanifest_reasons[*]}" + echo '{}' > $run_logs + # with useCabalRun we have to make sure the binary is built before launching off shell job spawns + local dummy=($(locli 2>/dev/null)) + time { + for d in "${logdirs[@]}" + do throttle_shell_job_spawns + local logfiles=($(ls --reverse -t "$d"stdout* "$d"node-[0-9]*.json \ + 2>/dev/null)) + if test -z "${logfiles[*]}" + then msg "no logs in $d, skipping.."; fi + local mach=$(basename "$d") + local out="$adir"/logs-$mach + call_locli "silent" "serial" \ + prepare-db \ + --mach "$mach" \ + --db "$adir/logs-$mach.sqlite3" \ + ${logfiles[*]/#/--log } & + jq_fmutate "$run_logs" ' + .rlHostLogs["'"$mach"'"] = + { hlRawLogfiles: ["'"$(echo ${logfiles[*]} | + sed 's/ /", "/')"'"] + , hlRawLines: 0 + , hlRawTraceFreqs: {} + , hlLogs: ["'"$adir/logs-$mach.sqlite3"'", null] + , hlProfile: [] + } + | .rlFilterDate = ('$(if test -z "$without_datever_meta" + then echo -n now + else echo -n 0; fi)' | todate) + ' + + local ghc_rts_prof=$d/cardano-node.prof + if test -f "$ghc_rts_prof" + then progress "analyse(db) | profiling" "processing cardano-node.prof for $mach" + ghc_rts_minusp_tojson "$ghc_rts_prof" > "$out".flt.prof.json + jq_fmutate "$run_logs" ' + .rlHostLogs["'"$mach"'"] += { hlProfile: $profile } + ' --slurpfile profile "$out".flt.prof.json + fi + done + wait + } + fi;; + trace-frequencies | trace-freq | freq | tf ) local new_only= sargs=() while test $# -gt 0 @@ -686,10 +789,16 @@ EOF } call_locli() { + local silent= + while test $# -gt 0 + do case "$1" in + silent ) silent='silent';; + * ) break;; esac; shift; done + local rtsmode="${1:-hipar}"; shift local args=("$@") - echo "{ \"rtsmode\": \"$rtsmode\" }" + test "$silent" = "silent" || echo "{ \"rtsmode\": \"$rtsmode\" }" case "$rtsmode" in serial )locli_args+=(+RTS -N1 -A128M -RTS);; lomem ) locli_args+=(+RTS -N3 -A8M -RTS);; @@ -698,7 +807,10 @@ call_locli() { esac verbose "analysis | locli" "$(with_color reset ${locli_args[@]}) $(colorise ${args[*]})" - time locli "${locli_args[@]}" "${args[@]}" + if test "$silent" = "silent" + then locli "${locli_args[@]}" "${args[@]}" + else time locli "${locli_args[@]}" "${args[@]}" + fi } num_jobs="\j" diff --git a/nix/workbench/lib-cabal.sh b/nix/workbench/lib-cabal.sh index 6aa5f05e930..8ad4263d14f 100644 --- a/nix/workbench/lib-cabal.sh +++ b/nix/workbench/lib-cabal.sh @@ -63,11 +63,13 @@ function cardano-tracer() { } function locli() { - cabal -v0 build ${WB_FLAGS_CABAL} exe:locli - set-git-rev \ - $(git rev-parse HEAD) \ - $(cabal list-bin locli) || true - cabal -v0 exec ${WB_FLAGS_CABAL} locli -- ${WB_FLAGS_RTS} "$@" + #cabal -v0 build ${WB_FLAGS_CABAL} exe:locli + #set-git-rev \ + # $(git rev-parse HEAD) \ + # $(cabal list-bin locli) || true + # cabal -v0 exec ${WB_FLAGS_CABAL} locli -- ${WB_FLAGS_RTS} "$@" + + cabal -v0 run ${WB_FLAGS_CABAL} exe:locli -- ${WB_FLAGS_RTS} "$@" } function tx-generator() { diff --git a/nix/workbench/shell.nix b/nix/workbench/shell.nix index 5f5593d1b9d..0acb7865c64 100644 --- a/nix/workbench/shell.nix +++ b/nix/workbench/shell.nix @@ -40,6 +40,7 @@ in project.shellFor { export WB_CREATE_TESTNET_DATA=''${WB_CREATE_TESTNET_DATA:-1} export WB_DEPLOYMENT_NAME=''${WB_DEPLOYMENT_NAME:-$(basename $(pwd))} export WB_MODULAR_GENESIS=''${WB_MODULAR_GENESIS:-0} + export WB_LOCLI_DB=''${WB_LOCLI_DB:-0} export WB_SHELL_PROFILE=${profileName} export WB_SHELL_PROFILE_DATA=${profileData} @@ -48,6 +49,7 @@ in project.shellFor { progress "deployment name" $WB_DEPLOYMENT_NAME progress "params" 'useCabalRun=${toString backend.useCabalRun} workbenchDevMode=${toString workbenchDevMode} profiling=${toString profiling}' progress "WB_BACKEND_DATA=" $WB_BACKEND_DATA + progress "WB_LOCLI_DB=" $WB_LOCLI_DB progress "WB_CREATE_TESTNET_DATA=" $WB_CREATE_TESTNET_DATA progress "WB_MODULAR_GENESIS=" $WB_MODULAR_GENESIS progress "WB_SHELL_PROFILE_DATA=" $WB_SHELL_PROFILE_DATA diff --git a/nix/workbench/wb b/nix/workbench/wb index dd0565df53d..52110564d7c 100755 --- a/nix/workbench/wb +++ b/nix/workbench/wb @@ -13,6 +13,9 @@ global_basedir=${global_basedir:-$(realpath "$(dirname "$0")")} # For genesis creating, create-testnet-data is the default CLI command; set to 0 to fall back to create-staked : "${WB_CREATE_TESTNET_DATA:=1}" +# By default, do not enable the new (experimental) databse storage backend for `locli` +: "${WB_LOCLI_DB:=0}" + . "$global_basedir"/lib.sh . "$global_basedir"/env.sh . "$global_basedir"/chaindb.sh @@ -192,6 +195,7 @@ start() { --analysis-can-fail | -af ) analysis_can_fail=t;; --dump-logobjects ) analyse_args+=($1);; --filters ) analyse_args+=($1 $2); shift;; + --locli-db ) export WB_LOCLI_DB=1;; ## Aux --verbose | -v ) export verbose=t;;