diff --git a/bench/locli/locli.cabal b/bench/locli/locli.cabal index 1835e0e60e1..611ee1c8ba0 100644 --- a/bench/locli/locli.cabal +++ b/bench/locli/locli.cabal @@ -107,6 +107,7 @@ library , extra , file-embed , filepath + , fingertree , ghc , gnuplot , iohk-monitoring diff --git a/bench/locli/src/Cardano/Analysis/API.hs b/bench/locli/src/Cardano/Analysis/API.hs index 430e1dad016..d89b7f771be 100644 --- a/bench/locli/src/Cardano/Analysis/API.hs +++ b/bench/locli/src/Cardano/Analysis/API.hs @@ -1,5 +1,6 @@ module Cardano.Analysis.API - ( module Cardano.Analysis.API.Chain + ( module Data.CDF + , module Cardano.Analysis.API.Chain , module Cardano.Analysis.API.ChainFilter , module Cardano.Analysis.API.Context , module Cardano.Analysis.API.Dictionary @@ -12,6 +13,7 @@ module Cardano.Analysis.API ) where +import Data.CDF import Cardano.Analysis.API.Chain import Cardano.Analysis.API.ChainFilter import Cardano.Analysis.API.Context diff --git a/bench/locli/src/Cardano/Analysis/API/Field.hs b/bench/locli/src/Cardano/Analysis/API/Field.hs index c00d85e99c9..935d1053de5 100644 --- a/bench/locli/src/Cardano/Analysis/API/Field.hs +++ b/bench/locli/src/Cardano/Analysis/API/Field.hs @@ -11,7 +11,6 @@ import Data.Text (unpack) import Cardano.JSON import Cardano.Util import Cardano.Analysis.API.Ground -import Cardano.Analysis.API.Run data Scale @@ -23,6 +22,7 @@ data Range = Free -- No range restriction | Z0 Int -- 1-based range | Z1 Int -- 1-based range + | R01 deriving (Eq, Show) data Unit @@ -33,6 +33,7 @@ data Unit | MB -- Mibibyte: 2^20 | KBs -- Kibibyte/s | MBs -- Mibibyte/s + | Era -- Era | Epo -- Epoch | Slo -- Slots | Blk -- Blocks @@ -41,10 +42,16 @@ data Unit | Sig -- Sign: +/- | Pct -- Unspecified ratio, percents | Ev -- Events + | KEv -- Events: 10^3 + | Dat -- Date + | Tim -- Time + | Ver -- Version | Ix -- Unspecified index | Len -- Unspecified length + | Cnt -- Unspecified count | Rto -- Unspecified ratio | Uni -- Unspecified unit + | Id -- Unspefified identifier deriving (Eq, Show) renderUnit :: Unit -> Text @@ -56,6 +63,7 @@ renderUnit = \case MB -> "MB" KBs -> "KB/s" MBs -> "MB/s" + Era -> "era" Epo -> "epoch" Slo -> "slots" Blk -> "blocks" @@ -63,14 +71,20 @@ renderUnit = \case Hos -> "host" Sig -> "+/-" Pct -> "%" - Ev -> "" - Ix -> "" - Len -> "" - Rto -> "" - Uni -> "" + Ev -> "#" + KEv -> "#" + Dat -> "on" + Tim -> "at" + Ver -> "v" + Ix -> "[]" + Len -> "#" + Cnt -> "#" + Rto -> "/" + Uni -> "#" + Id -> "" data Width - = W0 + = Wno | W1 | W2 | W3 @@ -83,6 +97,15 @@ data Width | W10 | W11 | W12 + | W13 + | W14 + | W15 + | W16 + | W17 + | W18 + | W19 + | W20 + | W21 deriving (Eq, Enum, Ord, Show) data Precision @@ -94,7 +117,8 @@ data Precision {-# INLINE width #-} width :: Width -> Int -width = fromEnum +width Wno = 80 +width x = fromEnum x -- | Encapsulate all metadata about a metric (a projection) of -- a certain projectible (a kind of analysis results): @@ -122,10 +146,14 @@ class CDFFields a p where class TimelineFields a where data TimelineComments a :: Type - timelineFields :: Run -> [Field ISelect I a] + timelineFields :: [Field ISelect I a] rtCommentary :: a -> TimelineComments a -> [Text] rtCommentary _ _ = [] +data FSelect where + ISel :: TimelineFields a => (Field ISelect I a -> Bool) -> FSelect + DSel :: CDFFields a p => (Field DSelect p a -> Bool) -> FSelect + data DSelect p a = DInt (a p -> CDF p Int) | DWord64 (a p -> CDF p Word64) @@ -139,8 +167,16 @@ data ISelect p a | IFloat (a -> Double) | IDeltaT (a -> NominalDiffTime) | IDeltaTM (a -> SMaybe NominalDiffTime) + | IDate (a -> UTCTime) + | ITime (a -> UTCTime) | IText (a -> Text) +dFields :: [FieldName] -> Field DSelect p a -> Bool +dFields fs Field{fId} = FieldName fId `elem` fs + +iFields :: [FieldName] -> Field ISelect I a -> Bool +iFields fs Field{fId} = FieldName fId `elem` fs + filterFields :: CDFFields a p => (Field DSelect p a -> Bool) -> [Field DSelect p a] filterFields f = filter f cdfFields diff --git a/bench/locli/src/Cardano/Analysis/API/Ground.hs b/bench/locli/src/Cardano/Analysis/API/Ground.hs index 63f2949eeec..829dc2465ff 100644 --- a/bench/locli/src/Cardano/Analysis/API/Ground.hs +++ b/bench/locli/src/Cardano/Analysis/API/Ground.hs @@ -34,6 +34,12 @@ import Data.DataDomain import Cardano.Util +newtype FieldName = FieldName { unFieldName :: Text } + deriving (Eq, Generic, Ord) + deriving newtype (FromJSON, IsString, ToJSON) + deriving anyclass NFData + deriving Show via Quiet FieldName + newtype TId = TId { unTId :: ShortText } deriving (Eq, Generic, Ord) deriving newtype (FromJSON, ToJSON) @@ -60,11 +66,17 @@ newtype Count a = Count { unCount :: Int } deriving newtype (FromJSON, Num, ToJSON) deriving anyclass NFData -countOfList :: [a] -> Count a -countOfList = Count . fromIntegral . length +countList :: (a -> Bool) -> [a] -> Count a +countList f = Count . fromIntegral . count f + +countLists :: (a -> Bool) -> [[a]] -> Count a +countLists f = Count . fromIntegral . sum . fmap (count f) + +countListAll :: [a] -> Count a +countListAll = Count . fromIntegral . length -countOfLists :: [[a]] -> Count a -countOfLists = Count . fromIntegral . sum . fmap length +countListsAll :: [[a]] -> Count a +countListsAll = Count . fromIntegral . sum . fmap length unsafeCoerceCount :: Count a -> Count b unsafeCoerceCount = Unsafe.unsafeCoerce diff --git a/bench/locli/src/Cardano/Analysis/API/Metrics.hs b/bench/locli/src/Cardano/Analysis/API/Metrics.hs index 10d2e809d39..baf95a8d3bc 100644 --- a/bench/locli/src/Cardano/Analysis/API/Metrics.hs +++ b/bench/locli/src/Cardano/Analysis/API/Metrics.hs @@ -27,15 +27,182 @@ import Data.CDF import Cardano.JSON import Cardano.Util +import Cardano.Analysis.API.Context import Cardano.Analysis.API.Field import Cardano.Analysis.API.Ground import Cardano.Analysis.API.Types -perfSubsetFn :: PerfSubset -> (Field DSelect p a -> Bool) -perfSubsetFn = \case - PerfFull -> const True - PerfSummary -> mtFieldsReport +sumFieldsReport :: [FieldName] +sumFieldsReport = + [ "date.systemStart", "time.systemStart", "sumAnalysisTime" + , "batch" + , "cardano-node", "ouroboros-network" , "cardano-ledger", "plutus", "cardano-crypto", "cardano-base" + , "era" + , "delegators", "utxo" + , "add_tx_size", "inputs_per_tx", "outputs_per_tx" , "tps", "tx_count" + , "plutusScript" + , "sumLogStreams", "sumLogObjectsTotal" + , "sumFilters" + , "ddRawCount.sumDomainTime", "ddFilteredCount.sumDomainTime", "dataDomainFilterRatio.sumDomainTime" + , "ddRaw.sumStartSpread", "ddRaw.sumStopSpread" + , "ddFiltered.sumStartSpread", "ddFiltered.sumStopSpread" + , "sumDomainSlots", "sumDomainBlocks", "sumBlocksRejected" ] + +instance TimelineFields SummaryOne where + data TimelineComments SummaryOne + deriving Show + + timelineFields = + fScalar "sumAnalysisTime" W10 Dat (IText $ showText.roundUTCTimeDay.sumAnalysisTime) + "Analysis date" + "Date of analysis" + + <> fScalar "date.systemStart" W10 Dat (IDate $ systemStart.sumGenesis) + "Cluster system start date" + "Date of cluster genesis systemStart" + + <> fScalar "time.systemStart" W8 Tim (ITime $ systemStart.sumGenesis) + "Cluster system start date" + "Date of cluster genesis systemStart" + + <> fScalar "batch" Wno Id (IText $ batch.sumMeta) + "Run batch" + "" + + <> fScalar "cardano-node" W5 Ver (IText $ unCommit.mNode.manifest.sumMeta) + "cardano-node version" + "" + + <> fScalar "ouroboros-network" W5 Ver (IText $ unCommit.mNetwork.manifest.sumMeta) + "ouroboros-network version" + "" + + <> fScalar "cardano-ledger" W5 Ver (IText $ unCommit.mLedger.manifest.sumMeta) + "cardano-ledger version" + "" + + <> fScalar "plutus" W5 Ver (IText $ unCommit.mPlutus.manifest.sumMeta) + "plutus version" + "" + + <> fScalar "cardano-crypto" W5 Ver (IText $ unCommit.mCrypto.manifest.sumMeta) + "cardano-crypto version" + "" + + <> fScalar "cardano-base" W5 Ver (IText $ unCommit.mBase.manifest.sumMeta) + "cardano-base version" + "" + + <> fScalar "era" Wno Era (IText $ era.sumMeta) + "Era" + "Benchmark era" + + <> fScalar "delegators" Wno Cnt (IWord64 $ delegators.sumGenesisSpec) + "Delegation map size" + "" + + <> fScalar "utxo" Wno Cnt (IWord64 $ utxo.sumGenesisSpec) + "Starting UTxO set size" + "Extra UTxO set size at the beginning of the benchmark" + + <> fScalar "add_tx_size" Wno B (IWord64 $ add_tx_size.sumGenerator) + "Extra tx payload" + "" + + <> fScalar "inputs_per_tx" Wno Cnt (IWord64 $ inputs_per_tx.sumGenerator) + "Tx inputs" + "" + + <> fScalar "outputs_per_tx" Wno Cnt (IWord64 $ outputs_per_tx.sumGenerator) + "Tx Outputs" + "" + + <> fScalar "tps" Wno Hz (IFloat $ tps.sumGenerator) + "TPS" + "Offered load, transactions per second" + + <> fScalar "tx_count" Wno Cnt (IWord64 $ tx_count.sumGenerator) + "Transaction count" + "Number of transactions prepared for submission, but not necessarily submitted" + + <> fScalar "plutusScript" Wno Id (IText $ T.pack.fromMaybe "---".plutusLoopScript.sumGenerator) + "Plutus script" + "Name of th Plutus script used for smart contract workload generation, if any" + + <> fScalar "sumLogStreams" Wno Cnt (IInt $ unCount.sumLogStreams) + "Machines" + "Number of machines under analysis" + + <> fScalar "sumLogObjectsTotal" Wno Cnt (IInt $ unCount.sumLogObjectsTotal) + "Total log objects analysed" + "" + + <> fScalar "sumFilters" Wno Cnt (IInt $ length.snd.sumFilters) + "Number of filters applied" + "" + + <> fScalar "ddRawCount.sumDomainTime" Wno Sec (IInt $ ddRawCount.sumDomainTime) + "Run time, s" + "" + + <> fScalar "ddFilteredCount.sumDomainTime" Wno Sec (IInt $ ddFilteredCount.sumDomainTime) + "Analysed run duration, s" + "" + + <> fScalar "dataDomainFilterRatio.sumDomainTime" W4 Rto (IFloat $ dataDomainFilterRatio.sumDomainTime) + "Run time efficiency" + "" + + <> fScalar "ddRaw.sumStartSpread" Wno Sec (IDeltaT$ intvDurationSec.ddRaw.sumStartSpread) + "Node start spread, s" + "" + + <> fScalar "ddRaw.sumStopSpread" Wno Sec (IDeltaT$ intvDurationSec.ddRaw.sumStopSpread) + "Node stop spread, s" + "" + + <> fScalar "ddFiltered.sumStartSpread" Wno Sec (IDeltaT$ maybe 0 intvDurationSec.ddFiltered.sumStartSpread) + "Perf analysis start spread, s" + "" + + <> fScalar "ddFiltered.sumStopSpread" Wno Sec (IDeltaT$ maybe 0 intvDurationSec.ddFiltered.sumStopSpread) + "Perf analysis stop spread, s" + "" + + <> fScalar "sumDomainSlots" Wno Slo (IInt $ ddFilteredCount.sumDomainSlots) + "Slots analysed" + "" + + <> fScalar "sumDomainBlocks" Wno Blk (IInt $ ddFilteredCount.sumDomainBlocks) + "Blocks analysed" + "" + + <> fScalar "sumBlocksRejected" Wno Cnt (IInt $ unCount . sumBlocksRejected) + "Blocks rejected" + "" + -- fieldJSONOverlay f = (:[]) . tryOverlayFieldDescription f + +propSubsetFn :: PropSubset -> (Field DSelect p a -> Bool) +propSubsetFn = \case + PropFull -> const True + PropControl -> dFields bpFieldsControl + PropForger -> dFields bpFieldsForger + PropPeers -> dFields bpFieldsPeers + PropEndToEnd -> dFields bpFieldsEndToEnd + PropEndToEndBrief -> dFields bpFieldsEndToEndBrief + +bpFieldsControl, bpFieldsForger, bpFieldsPeers, bpFieldsEndToEnd, bpFieldsEndToEndBrief :: [FieldName] +bpFieldsControl = + [ "cdfBlocksPerHost", "cdfBlocksFilteredRatio", "cdfBlocksChainedRatio", "cdfBlockBattles", "cdfBlockSizes" ] +bpFieldsForger = + [ "cdfForgerStarts", "cdfForgerBlkCtx", "cdfForgerLgrState", "cdfForgerLgrView", "cdfForgerLeads", "cdfForgerForges", "cdfForgerAnnouncements", "cdfForgerSends", "cdfForgerAdoptions" ] +bpFieldsPeers = + [ "cdfPeerNotices", "cdfPeerRequests", "cdfPeerFetches", "cdfPeerAnnouncements", "cdfPeerSends", "cdfPeerAdoptions" ] +bpFieldsEndToEnd = + adoptionCentiles <&> FieldName . renderAdoptionCentile +bpFieldsEndToEndBrief = + adoptionCentilesBrief <&> FieldName . renderAdoptionCentile instance CDFFields BlockProp p where cdfFields = @@ -76,9 +243,6 @@ instance CDFFields BlockProp p where "Forged to self-adopted" "Time it took to adopt the block (TraceAdoptedBlock), since block forging completion" ] - <> fBoth "cdfForks" "fork" " #" W4 Uni P0 Lin Free (DInt cdfForks) - "Forks at this block height" - "For a given block, number of abandoned blocks at its block height" <> fGrp ",------- Peer event Δt: -------." W4 Sec P3 Log Free [ fGrp' "cdfPeerNotices" "Noti" (DDeltaT cdfPeerNotices) @@ -120,9 +284,26 @@ instance CDFFields BlockProp p where -- (T.pack $ printf "Block adopted by %.2f fraction of the entire cluster." centi) | (i, ct@(Centile centi)) <- zip [0::Int ..] adoptionCentiles ] - <> fBoth "cdfSizes" "Size" "bytes" W9 B P0 Lin Free (DInt cdfSizes) + + <> fBoth "cdfBlocksPerHost" "Host" "blks" W4 Blk P0 Lin Free (DInt cdfBlocksPerHost) + "Blocks per host" + "For a given host, number of blocks made during the entire observation period" + + <> fBoth "cdfBlocksFilteredRatio" "Filtr" "blks" W4 Rto P3 Lin R01 (DFloat cdfBlocksFilteredRatio) + "Filtered to chained block ratio" + "For a given host, ratio of blocks that passed filtering / all on chain" + + <> fBoth "cdfBlocksChainedRatio" "Chain" "blks" W4 Rto P3 Lin R01 (DFloat cdfBlocksChainedRatio) + "Chained to forged block ratio" + "For a given host, ratio of blocks that made into chain / all forged" + + <> fBoth "cdfBlockBattles" "Battl" " #" W4 Blk P0 Lin Free (DInt cdfBlockBattles) + "Height & slot battles" + "For a given block, number of all abandoned blocks at its block height. Sum of height and slot battles" + + <> fBoth "cdfBlockSizes" "Size" "bytes" W9 B P0 Lin Free (DInt cdfBlockSizes) "Block size" - "Block size, in bytes." + "Block size, in bytes" where r = nChunksEachOf (length adoptionCentiles) 5 ",-- Slot-rel. Δt to adoption centile: -." checkCentile i centi (centi', d) = @@ -137,7 +318,7 @@ instance CDFFields BlockProp p where where overlay = tryOverlayFieldDescription f instance TimelineFields BlockEvents where - timelineFields _ = + timelineFields = fBoth' "beBlockNo" "block" "no." W5 Blk P0 Lin Free (IWord64 (unBlockNo.beBlockNo)) <> fBoth' "beSlotNo" "abs." "slot#" W5 Slo P0 Lin Free (IWord64 (unSlotNo .beSlotNo)) <> fBoth' "beBlock" "block" "hash" W6 Hsh P0 Lin Free (IText (shortHash.beBlock)) @@ -172,7 +353,7 @@ instance TimelineFields BlockEvents where [ fGrp' "0.50" "0.5" (IDeltaT (percSpec 0.50 . bePropagation)) "" "" , fGrp' "0.80" "0.8" (IDeltaT (percSpec 0.80 . bePropagation)) "" "" , fGrp' "0.96" "0.96" (IDeltaT (percSpec 0.96 . bePropagation)) "" "" - , fGrp' "1.00" "1.0" (IDeltaT (snd . cdfRange. bePropagation)) "" "" + , fGrp' "1.00" "1.0" (IDeltaT (high .cdfRange. bePropagation)) "" "" ] <> fBoth' "beAcceptance" "va-" "lid" W3 Sig P0 Lin Free (IText (bool "-" "+" . (== 0) . length . filter (not . snd) . beAcceptance)) @@ -204,8 +385,6 @@ instance TimelineFields BlockEvents where avg :: [NominalDiffTime] -> NominalDiffTime avg [] = 0 avg xs = (/ fromInteger (fromIntegral $ length xs)) $ sum xs - count :: (a -> Bool) -> [a] -> Int - count f = length . filter f bpeIsFork :: BPError -> Bool bpeIsFork BPError{eDesc=BPEFork{}} = True @@ -227,9 +406,14 @@ instance TimelineFields BlockEvents where BEErrors -> (" " <>) . show <$> beErrors BEFilterOuts -> (" " <>) . show <$> filter (not . snd) beAcceptance -mtFieldsReport :: Field DSelect p a -> Bool -mtFieldsReport Field{fId} = fId `elem` - [ "CentiCpu", "CentiGC", "CentiMut", "cdfSpanLensCpu", "RSS", "Heap", "Live", "Alloc", "GcsMinor", "GcsMajor" ] +perfSubsetFn :: PerfSubset -> (Field DSelect p a -> Bool) +perfSubsetFn = \case + PerfFull -> const True + PerfReport -> dFields mtFieldsReport + +mtFieldsReport :: [FieldName] +mtFieldsReport = + [ "CentiCpu", "CentiGC", "CentiMut", "cdfSpanLensCpu", "RSS", "Heap", "Live", "Alloc", "GcsMinor", "GcsMajor", "NetRd", "NetWr", "FsRd", "FsWr", "cdfStarts" ] instance CDFFields MachPerf p where cdfFields = @@ -263,43 +447,24 @@ instance CDFFields MachPerf p where "Time spent forging the block (TraceForgedBlock), relative to positive leadership decision" ] <> fBoth "cdfBlockGap" "Block" "gap" W4 Sec P2 Lin Free (DWord64 cdfBlockGap) - "Interblock gap" + "Interblock gap, s" "Time between blocks" - <> fGrp "NetIO, kB/s" W5 KBs P0 Lin Free - [ fGrp' "NetRd" "recv" (DWord64 (rNetRd.mpResourceCDFs)) - "Network reads kB sec" - "Network reads, kB/sec" - - , fGrp' "NetWr" "send" (DWord64 (rNetWr.mpResourceCDFs)) - "Network writes kB sec" - "Network writes, kB/sec" - ] - <> fGrp "FS IO, kB/s" W5 KBs P0 Lin Free - [ fGrp' "FsRd" "read" (DWord64 (rFsRd.mpResourceCDFs)) - "Filesystem reads kB sec" - "Number of bytes which this process really did cause to be fetched from the storage layer, per second" - - , fGrp' "FsWr" "write" (DWord64 (rFsWr.mpResourceCDFs)) - "Filesystem writes kB sec" - "Number of bytes which this process caused to be sent to the storage layer, modulo truncate(), per second" - ] <> fBoth "cdfDensity" "Dens" "ity" W5 Rto P2 Lin Free (DFloat cdfDensity) "Chain density" "Chain density, for the last 'k' slots" <> fPct "CentiCpu" "CPU" (Z1 200) (DWord64 (rCentiCpu.mpResourceCDFs)) - "Process CPU usage pct" + "Process CPU usage, %" "Kernel-reported CPU process usage, of a single core" <> fPct "CentiGC" "GC" (Z1 200) (DWord64 (rCentiGC .mpResourceCDFs)) - "RTS GC CPU usage pct" + "RTS GC CPU usage, %" "RTS-reported GC CPU usage, of a single core" <> fPct "CentiMut" "MUT" (Z1 200) (DWord64 (rCentiMut.mpResourceCDFs)) - "RTS Mutator CPU usage pct" + "RTS Mutator CPU usage, %" "RTS-reported mutator CPU usage, of a single core" - <> fW64 "GcsMajor" "GC" "Maj" W3 Ev (DWord64 (rGcsMajor.mpResourceCDFs)) "Major GCs" "Major GC events" @@ -310,24 +475,43 @@ instance CDFFields MachPerf p where <> fGrp "Memory usage, MB" W5 MB P0 Lin Free [ fGrp' "RSS" "RSS" (DWord64 (rRSS.mpResourceCDFs)) - "Kernel RSS MB" + "Kernel RSS, MB" "Kernel-reported RSS (Resident Set Size) of the process, MB" , fGrp' "Heap" "Heap" (DWord64 (rHeap.mpResourceCDFs)) - "RTS heap size MB" + "RTS heap size, MB" "RTS-reported heap size, MB" , fGrp' "Live" "Live" (DWord64 (rLive.mpResourceCDFs)) - "RTS GC live bytes MB" + "RTS GC live bytes, MB" "RTS-reported GC live data size, MB" ] <> fBoth "Alloc" "Alloc" "MB" W5 MB P0 Lin (Z0 5000) (DWord64 (rAlloc.mpResourceCDFs)) - "RTS alloc rate MB sec" + "RTS alloc rate MB/s" "RTS-reported allocation rate, MB/sec" + <> fGrp "NetIO, kB/s" W5 KBs P0 Lin Free + [ fGrp' "NetRd" "recv" (DWord64 (rNetRd.mpResourceCDFs)) + "Network reads kB/s" + "Network reads, kB/sec" + + , fGrp' "NetWr" "send" (DWord64 (rNetWr.mpResourceCDFs)) + "Network writes kB/s" + "Network writes, kB/sec" + ] + <> fGrp "FS IO, kB/s" W5 KBs P0 Lin Free + [ fGrp' "FsRd" "read" (DWord64 (rFsRd.mpResourceCDFs)) + "Filesystem reads, kB/s" + "Number of bytes which this process really did cause to be fetched from the storage layer, per second" + + , fGrp' "FsWr" "write" (DWord64 (rFsWr.mpResourceCDFs)) + "Filesystem writes, kB/s" + "Number of bytes which this process caused to be sent to the storage layer, modulo truncate(), per second" + ] + <> fGrp "CPU% spans" W5 Len P0 Lin Free [ fGrp' "cdfSpanLensCpu" "All" (DInt cdfSpanLensCpu) - "CPU 85pct spans" + "CPU 85% spans" "Length of over-85% CPU usage peaks" , fGrp' "cdfSpanLensCpuEpoch" "Epoch" (DInt cdfSpanLensCpuEpoch) @@ -346,7 +530,7 @@ instance TimelineFields (SlotStats NominalDiffTime) where data TimelineComments (SlotStats NominalDiffTime) deriving Show - timelineFields _ = + timelineFields = fW64' "slot" "abs." "slot#" W5 Slo (IWord64 (unSlotNo .slSlot)) <> fW64' "epochSlot" "ep." "slot" W4 Slo (IWord64 (unEpochSlot .slEpochSlot)) @@ -402,13 +586,13 @@ instance TimelineFields (SlotStats NominalDiffTime) where -- * Instances, depending on the metrics' instances: -- -instance (ToJSON (f NominalDiffTime), ToJSON (f Int), ToJSON (f Double), ToJSON (f (Count BlockEvents))) => ToJSON (BlockProp f) where +instance (ToJSON (f NominalDiffTime), ToJSON (f Int), ToJSON (f Double), ToJSON (f (Count BlockEvents)), ToJSON (f (DataDomain SlotNo)), ToJSON (f (DataDomain BlockNo))) => ToJSON (BlockProp f) where toJSON x = AE.genericToJSON AE.defaultOptions x & \case Object o -> Object $ processFieldOverlays x o _ -> error "Heh, serialised BlockProp to a non-Object." -instance (ToJSON (a Double), ToJSON (a Int), ToJSON (a NominalDiffTime), ToJSON (a Word64)) => ToJSON (MachPerf a) where +instance (ToJSON (a Double), ToJSON (a Int), ToJSON (a NominalDiffTime), ToJSON (a (DataDomain UTCTime)), ToJSON (a Word64), ToJSON (a (DataDomain SlotNo)), ToJSON (a (DataDomain BlockNo))) => ToJSON (MachPerf a) where toJSON x = AE.genericToJSON AE.defaultOptions x & \case Object o -> Object $ processFieldOverlays x o @@ -418,6 +602,9 @@ deriving newtype instance ToJSON MultiClusterPerf -- * Field definition auxiliaries: -- +fScalar :: Text -> Width -> Unit -> s p a -> Text -> Text -> [Field s p a] +fScalar id w u sel sd d = [Field id "" "" w u P0 Lin Free sel sd d] + fBoth :: Text -> Text -> Text -> Width -> Unit -> Precision -> Scale -> Range -> s p a -> Text -> Text -> [Field s p a] fBoth id h1 h2 wi u p s r sel sd d = [Field id h1 h2 wi u p s r sel sd d] diff --git a/bench/locli/src/Cardano/Analysis/API/Run.hs b/bench/locli/src/Cardano/Analysis/API/Run.hs index 062f1972fe5..32080e9ff03 100644 --- a/bench/locli/src/Cardano/Analysis/API/Run.hs +++ b/bench/locli/src/Cardano/Analysis/API/Run.hs @@ -7,76 +7,11 @@ import Cardano.Prelude import Control.Monad (fail) import Data.Aeson qualified as Aeson -import Data.Aeson (FromJSON(..), Object, ToJSON(..), withObject, (.:), (.:?)) -import Data.Text qualified as T -import Data.Time.Clock hiding (secondsToNominalDiffTime) -import Data.Time.Clock.POSIX import Cardano.Util import Cardano.Analysis.API.ChainFilter import Cardano.Analysis.API.Context import Cardano.Analysis.API.Ground -import Cardano.Analysis.API.LocliVersion - --- | Explain the poor human a little bit of what was going on: -data Anchor - = Anchor - { aRuns :: [Text] - , aFilters :: ([FilterName], [ChainFilter]) - , aSlots :: Maybe (DataDomain SlotNo) - , aBlocks :: Maybe (DataDomain BlockNo) - , aVersion :: Cardano.Analysis.API.LocliVersion.LocliVersion - , aWhen :: UTCTime - } - -runAnchor :: Run -> UTCTime -> ([FilterName], [ChainFilter]) -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor -runAnchor Run{..} = tagsAnchor [tag metadata] - -tagsAnchor :: [Text] -> UTCTime -> ([FilterName], [ChainFilter]) -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor -tagsAnchor aRuns aWhen aFilters aSlots aBlocks = - Anchor { aVersion = getLocliVersion, .. } - -renderAnchor :: Anchor -> Text -renderAnchor a = mconcat - [ "runs: ", renderAnchorRuns a, ", " - , renderAnchorNoRuns a - ] - -renderAnchorRuns :: Anchor -> Text -renderAnchorRuns Anchor{..} = mconcat - [ T.intercalate ", " aRuns ] - -renderAnchorFiltersAndDomains :: Anchor -> Text -renderAnchorFiltersAndDomains a@Anchor{..} = mconcat - [ "filters: ", case fst aFilters of - [] -> "unfiltered" - xs -> T.intercalate ", " (unFilterName <$> xs) - , renderAnchorDomains a] - -renderAnchorDomains :: Anchor -> Text -renderAnchorDomains Anchor{..} = mconcat $ - maybe [] ((:[]) . renderDomain "slot" (show . unSlotNo)) aSlots - <> - maybe [] ((:[]) . renderDomain "block" (show . unBlockNo)) aBlocks - where renderDomain :: Text -> (a -> Text) -> DataDomain a -> Text - renderDomain ty r DataDomain{..} = mconcat - [ ", ", ty - , " range: raw(", r ddRawFirst, "-", r ddRawLast, ", ", show ddRawCount, " total)" - , " filtered(" - , maybe "none" r ddFilteredFirst, "-" - , maybe "none" r ddFilteredLast, ", ", show ddFilteredCount, " total)" - ] - -renderAnchorNoRuns :: Anchor -> Text -renderAnchorNoRuns a@Anchor{..} = mconcat - [ renderAnchorFiltersAndDomains a - , ", ", renderProgramAndVersion aVersion - , ", analysed at ", renderAnchorDate a - ] - --- Rounds time to seconds. -renderAnchorDate :: Anchor -> Text -renderAnchorDate = show . posixSecondsToUTCTime . secondsToNominalDiffTime . fromIntegral @Int . round . utcTimeToPOSIXSeconds . aWhen data AnalysisCmdError = AnalysisCmdError !Text diff --git a/bench/locli/src/Cardano/Analysis/API/Types.hs b/bench/locli/src/Cardano/Analysis/API/Types.hs index 37b64c721ab..0aa22e604f0 100644 --- a/bench/locli/src/Cardano/Analysis/API/Types.hs +++ b/bench/locli/src/Cardano/Analysis/API/Types.hs @@ -4,10 +4,8 @@ {-# OPTIONS_GHC -Wno-name-shadowing -Wno-orphans #-} module Cardano.Analysis.API.Types (module Cardano.Analysis.API.Types) where -import Util (count) import Cardano.Prelude hiding (head) -import Data.Aeson (ToJSON(..), FromJSON(..)) import Data.Text qualified as T import Options.Applicative qualified as Opt @@ -21,7 +19,6 @@ import Cardano.Util import Cardano.Analysis.API.Chain import Cardano.Analysis.API.ChainFilter import Cardano.Analysis.API.Context -import Cardano.Analysis.API.Field import Cardano.Analysis.API.Ground import Cardano.Analysis.API.LocliVersion @@ -30,26 +27,53 @@ import Cardano.Analysis.API.LocliVersion -- -- | Overall summary of all analyses. -data Summary where +data Summary f where Summary :: - { sumWhen :: !UTCTime + { sumAnalysisTime :: !UTCTime + , sumMeta :: !Metadata + , sumGenesis :: !Genesis + , sumGenesisSpec :: !GenesisSpec + , sumGenerator :: !GeneratorProfile , sumLogStreams :: !(Count [LogObject]) - , sumLogObjects :: !(Count LogObject) + , sumLogObjectsTotal :: !(Count LogObject) , sumFilters :: !([FilterName], [ChainFilter]) , sumChainRejectionStats :: ![(ChainFilter, Int)] , sumBlocksRejected :: !(Count BlockEvents) + , sumDomainTime :: !(DataDomain UTCTime) + , sumStartSpread :: !(DataDomain UTCTime) + , sumStopSpread :: !(DataDomain UTCTime) , sumDomainSlots :: !(DataDomain SlotNo) , sumDomainBlocks :: !(DataDomain BlockNo) - } -> Summary - deriving (Generic, FromJSON, ToJSON, Show) + , cdfLogObjects :: !(CDF f Int) + , cdfLogObjectsEmitted :: !(CDF f Int) + } -> Summary f + deriving (Generic) + +type SummaryOne = Summary I +type MultiSummary = Summary (CDF I) + +deriving instance (FromJSON (f Int), FromJSON (f Double)) => FromJSON (Summary f) +deriving instance ( ToJSON (f Int), ToJSON (f Double)) => ToJSON (Summary f) +deriving instance ( Show (f Int), Show (f Double)) => Show (Summary f) + +data BlockStats + = BlockStats + { bsFiltered :: Count ForgerEvents + , bsRejected :: Count ForgerEvents + , bsUnchained :: Count ForgerEvents + } + deriving (Generic, FromJSON, ToJSON) +bsTotal, bsChained :: BlockStats -> Count ForgerEvents +bsTotal BlockStats{..} = bsFiltered + bsRejected + bsUnchained +bsChained BlockStats{..} = bsFiltered + bsRejected -- | Results of block propagation analysis. data BlockProp f = BlockProp { bpVersion :: !Cardano.Analysis.API.LocliVersion.LocliVersion - , bpDomainSlots :: !(DataDomain SlotNo) - , bpDomainBlocks :: !(DataDomain BlockNo) + , bpDomainSlots :: ![DataDomain SlotNo] + , bpDomainBlocks :: ![DataDomain BlockNo] , cdfForgerStarts :: !(CDF f NominalDiffTime) , cdfForgerBlkCtx :: !(CDF f NominalDiffTime) , cdfForgerLgrState :: !(CDF f NominalDiffTime) @@ -65,17 +89,93 @@ data BlockProp f , cdfPeerAnnouncements :: !(CDF f NominalDiffTime) , cdfPeerAdoptions :: !(CDF f NominalDiffTime) , cdfPeerSends :: !(CDF f NominalDiffTime) - , cdfForks :: !(CDF f Int) - , cdfSizes :: !(CDF f Int) + , cdfBlocksPerHost :: !(CDF f Int) + , cdfBlocksFilteredRatio :: !(CDF f Double) + , cdfBlocksChainedRatio :: !(CDF f Double) + , cdfBlockBattles :: !(CDF f Int) + , cdfBlockSizes :: !(CDF f Int) , bpPropagation :: !(Map Text (CDF f NominalDiffTime)) } deriving (Generic) -deriving instance (Show (f NominalDiffTime), Show (f Int), Show (f Double), Show (f (Count BlockEvents))) => Show (BlockProp f) -deriving instance (FromJSON (f NominalDiffTime), FromJSON (f Int), FromJSON (f Double), FromJSON (f (Count BlockEvents))) => FromJSON (BlockProp f) +deriving instance (Show (f NominalDiffTime), Show (f Int), Show (f Double), Show (f (Count BlockEvents)), Show (f (DataDomain SlotNo)), Show (f (DataDomain BlockNo))) => Show (BlockProp f) +deriving instance (FromJSON (f NominalDiffTime), FromJSON (f Int), FromJSON (f Double), FromJSON (f (Count BlockEvents)), FromJSON (f (DataDomain SlotNo)), FromJSON (f (DataDomain BlockNo))) => FromJSON (BlockProp f) type BlockPropOne = BlockProp I type MultiBlockProp = BlockProp (CDF I) +-- | The top-level representation of the machine timeline analysis results. +data MachPerf f + = MachPerf + { mpVersion :: !Cardano.Analysis.API.LocliVersion.LocliVersion + , mpDomainSlots :: ![DataDomain SlotNo] + , cdfHostSlots :: !(CDF f Word64) + -- distributions + , cdfStarts :: !(CDF f Word64) + , cdfLeads :: !(CDF f Word64) + , cdfUtxo :: !(CDF f Word64) + , cdfDensity :: !(CDF f Double) + , cdfStarted :: !(CDF f NominalDiffTime) + , cdfBlkCtx :: !(CDF f NominalDiffTime) + , cdfLgrState :: !(CDF f NominalDiffTime) + , cdfLgrView :: !(CDF f NominalDiffTime) + , cdfLeading :: !(CDF f NominalDiffTime) + , cdfForged :: !(CDF f NominalDiffTime) + , cdfBlockGap :: !(CDF f Word64) + , cdfSpanLensCpu :: !(CDF f Int) + , cdfSpanLensCpuEpoch :: !(CDF f Int) + , cdfSpanLensCpuRwd :: !(CDF f Int) + , mpResourceCDFs :: !(Resources (CDF f Word64)) + } + deriving (Generic) + +-- | One machine's performance +type MachPerfOne = MachPerf I + +-- | Bunch'a machines performances +type ClusterPerf = MachPerf (CDF I) + +-- | Bunch'a bunches'a machine performances. +-- Same as above, since we collapse [CDF I] into CDF I -- just with more statistical confidence. +newtype MultiClusterPerf + = MultiClusterPerf { unMultiClusterPerf :: ClusterPerf } + deriving newtype (FromJSON) + +-- * BlockProp +-- +data Chain + = Chain + { cDomSlots :: !(DataDomain SlotNo) + , cDomBlocks :: !(DataDomain BlockNo) + , cRejecta :: ![BlockEvents] + , cMainChain :: ![BlockEvents] + , cBlockStats :: !(Map Host BlockStats) + } + +-- | Block's events, as seen by its forger. +data ForgerEvents a + = ForgerEvents + { bfeHost :: !Host + , bfeBlock :: !Hash + , bfeBlockPrev :: !Hash + , bfeBlockNo :: !BlockNo + , bfeSlotNo :: !SlotNo + , bfeSlotStart :: !SlotStart + , bfeEpochNo :: !EpochNo + , bfeBlockSize :: !(SMaybe Int) + , bfeStarted :: !(SMaybe a) + , bfeBlkCtx :: !(SMaybe a) + , bfeLgrState :: !(SMaybe a) + , bfeLgrView :: !(SMaybe a) + , bfeLeading :: !(SMaybe a) + , bfeForged :: !(SMaybe a) + , bfeAnnounced :: !(SMaybe a) + , bfeSending :: !(SMaybe a) + , bfeAdopted :: !(SMaybe a) + , bfeChainDelta :: !Int + , bfeErrs :: [BPError] + } + deriving (Generic, NFData, FromJSON, ToJSON, Show) + -- | All events related to a block. data BlockEvents = BlockEvents @@ -199,47 +299,13 @@ data RunScalars deriving stock Generic deriving anyclass NFData --- | The top-level representation of the machine timeline analysis results. -data MachPerf f - = MachPerf - { mpVersion :: !Cardano.Analysis.API.LocliVersion.LocliVersion - , mpDomainSlots :: !(DataDomain SlotNo) - -- distributions - , cdfStarts :: !(CDF f Word64) - , cdfLeads :: !(CDF f Word64) - , cdfUtxo :: !(CDF f Word64) - , cdfDensity :: !(CDF f Double) - , cdfStarted :: !(CDF f NominalDiffTime) - , cdfBlkCtx :: !(CDF f NominalDiffTime) - , cdfLgrState :: !(CDF f NominalDiffTime) - , cdfLgrView :: !(CDF f NominalDiffTime) - , cdfLeading :: !(CDF f NominalDiffTime) - , cdfForged :: !(CDF f NominalDiffTime) - , cdfBlockGap :: !(CDF f Word64) - , cdfSpanLensCpu :: !(CDF f Int) - , cdfSpanLensCpuEpoch :: !(CDF f Int) - , cdfSpanLensCpuRwd :: !(CDF f Int) - , mpResourceCDFs :: !(Resources (CDF f Word64)) - } - deriving (Generic) - --- | One machine's performance -type MachPerfOne = MachPerf I - --- | Bunch'a machines performances -type ClusterPerf = MachPerf (CDF I) - --- | Bunch'a bunches'a machine performances. --- Same as above, since we collapse [CDF I] into CDF I -- just with more statistical confidence. -newtype MultiClusterPerf - = MultiClusterPerf { unMultiClusterPerf :: ClusterPerf } - deriving newtype (FromJSON) - +-- * MachPerf / ClusterPef +-- deriving newtype instance FromJSON a => FromJSON (I a) deriving newtype instance ToJSON a => ToJSON (I a) -deriving instance (FromJSON (a Double), FromJSON (a Int), FromJSON (a NominalDiffTime), FromJSON (a Word64)) => FromJSON (MachPerf a) -deriving instance (NFData (a Double), NFData (a Int), NFData (a NominalDiffTime), NFData (a Word64)) => NFData (MachPerf a) -deriving instance (Show (a Double), Show (a Int), Show (a NominalDiffTime), Show (a Word64)) => Show (MachPerf a) +deriving instance (FromJSON (a Double), FromJSON (a Int), FromJSON (a NominalDiffTime), FromJSON (a Word64), FromJSON (a (DataDomain SlotNo)), FromJSON (a (DataDomain UTCTime))) => FromJSON (MachPerf a) +deriving instance (NFData (a Double), NFData (a Int), NFData (a NominalDiffTime), NFData (a Word64), NFData (a (DataDomain SlotNo)), NFData (a (DataDomain UTCTime))) => NFData (MachPerf a) +deriving instance (Show (a Double), Show (a Int), Show (a NominalDiffTime), Show (a Word64), Show (a (DataDomain SlotNo)), Show (a (DataDomain UTCTime))) => Show (MachPerf a) data SlotStats a = SlotStats @@ -334,35 +400,17 @@ testSlotStats g SlotStats{..} = \case -- data PropSubset = PropFull + | PropControl | PropForger | PropPeers | PropEndToEnd | PropEndToEndBrief deriving Show -bpFieldSelectEndToEnd :: Field DSelect p a -> Bool -bpFieldSelectEndToEnd Field{fId} = fId `elem` adoptionCentilesRendered - where - adoptionCentilesRendered :: [Text] - adoptionCentilesRendered = adoptionCentiles <&> renderAdoptionCentile - -bpFieldSelectEndToEndBrief :: Field DSelect p a -> Bool -bpFieldSelectEndToEndBrief Field{fId} = fId `elem` adoptionCentilesRendered - where - adoptionCentilesRendered :: [Text] - adoptionCentilesRendered = adoptionCentilesBrief <&> renderAdoptionCentile - -propSubsetFn :: PropSubset -> (Field DSelect p a -> Bool) -propSubsetFn = \case - PropFull -> const True - PropForger -> bpFieldSelectForger - PropPeers -> bpFieldSelectPeers - PropEndToEnd -> bpFieldSelectEndToEnd - PropEndToEndBrief -> bpFieldSelectEndToEndBrief - parsePropSubset :: Opt.Parser PropSubset parsePropSubset = [ Opt.flag' PropFull (Opt.long "full" <> Opt.help "Complete propagation data") + , Opt.flag' PropControl (Opt.long "control" <> Opt.help "Only overall control data") , Opt.flag' PropForger (Opt.long "forger" <> Opt.help "Only forger propagation") , Opt.flag' PropPeers (Opt.long "peers" <> Opt.help "Only peer propagation") , Opt.flag' PropEndToEnd (Opt.long "end-to-end" <> Opt.help "Only end-to-end propagation") @@ -386,27 +434,18 @@ adoptionCentilesBrief :: [Centile] adoptionCentilesBrief = [ Centile 0.5, Centile 0.9, Centile 0.96 ] - -bpFieldSelectForger :: Field DSelect p a -> Bool -bpFieldSelectForger Field{fId} = fId `elem` - [ "cdfForgerStarts", "cdfForgerBlkCtx", "cdfForgerLgrState", "cdfForgerLgrView", "cdfForgerLeads", "cdfForgerForges", "cdfForgerAnnouncements", "cdfForgerSends", "cdfForgerAdoptions", "cdfForks" ] - -bpFieldSelectPeers :: Field DSelect p a -> Bool -bpFieldSelectPeers Field{fId} = fId `elem` - [ "cdfPeerNotices", "cdfPeerRequests", "cdfPeerFetches", "cdfPeerAnnouncements", "cdfPeerSends", "cdfPeerAdoptions" ] - -- -- * Machine performance report subsetting -- data PerfSubset = PerfFull - | PerfSummary + | PerfReport deriving Show parsePerfSubset :: Opt.Parser PerfSubset parsePerfSubset = - [ Opt.flag' PerfFull (Opt.long "full" <> Opt.help "Complete performance data") - , Opt.flag' PerfSummary (Opt.long "summary" <> Opt.help "Only report-relevant perf data") + [ Opt.flag' PerfFull (Opt.long "full" <> Opt.help "Complete performance data") + , Opt.flag' PerfReport (Opt.long "report" <> Opt.help "Only report-relevant perf data") ] & \case (x:xs) -> foldl (<|>) x xs [] -> error "Crazy world." diff --git a/bench/locli/src/Cardano/Analysis/BlockProp.hs b/bench/locli/src/Cardano/Analysis/BlockProp.hs index 0bbbdc9597e..13711bc5313 100644 --- a/bench/locli/src/Cardano/Analysis/BlockProp.hs +++ b/bench/locli/src/Cardano/Analysis/BlockProp.hs @@ -67,8 +67,11 @@ summariseMultiBlockProp centiles bs@(headline:_) = do cdfPeerAdoptions <- cdf2OfCDFs comb $ bs <&> cdfPeerAdoptions cdfPeerAnnouncements <- cdf2OfCDFs comb $ bs <&> cdfPeerAnnouncements cdfPeerSends <- cdf2OfCDFs comb $ bs <&> cdfPeerSends - cdfForks <- cdf2OfCDFs comb $ bs <&> cdfForks - cdfSizes <- cdf2OfCDFs comb $ bs <&> cdfSizes + cdfBlockBattles <- cdf2OfCDFs comb $ bs <&> cdfBlockBattles + cdfBlockSizes <- cdf2OfCDFs comb $ bs <&> cdfBlockSizes + cdfBlocksPerHost <- cdf2OfCDFs comb $ bs <&> cdfBlocksPerHost + cdfBlocksFilteredRatio <- cdf2OfCDFs comb $ bs <&> cdfBlocksFilteredRatio + cdfBlocksChainedRatio <- cdf2OfCDFs comb $ bs <&> cdfBlocksChainedRatio bpPropagation <- sequence $ transpose (bs <&> Map.toList . bpPropagation) <&> \case [] -> Left CDFEmptyDataset @@ -78,8 +81,8 @@ summariseMultiBlockProp centiles bs@(headline:_) = do (d,) <$> cdf2OfCDFs comb (snd <$> xs) pure $ BlockProp { bpVersion = bpVersion headline - , bpDomainSlots = dataDomainsMergeOuter $ bs <&> bpDomainSlots - , bpDomainBlocks = dataDomainsMergeOuter $ bs <&> bpDomainBlocks + , bpDomainSlots = concat $ bs <&> bpDomainSlots + , bpDomainBlocks = concat $ bs <&> bpDomainBlocks , bpPropagation = Map.fromList bpPropagation , .. } @@ -87,31 +90,6 @@ summariseMultiBlockProp centiles bs@(headline:_) = do comb :: forall a. Divisible a => Combine I a comb = stdCombine1 centiles --- | Block's events, as seen by its forger. -data ForgerEvents a - = ForgerEvents - { bfeHost :: !Host - , bfeBlock :: !Hash - , bfeBlockPrev :: !Hash - , bfeBlockNo :: !BlockNo - , bfeSlotNo :: !SlotNo - , bfeSlotStart :: !SlotStart - , bfeEpochNo :: !EpochNo - , bfeBlockSize :: !(SMaybe Int) - , bfeStarted :: !(SMaybe a) - , bfeBlkCtx :: !(SMaybe a) - , bfeLgrState :: !(SMaybe a) - , bfeLgrView :: !(SMaybe a) - , bfeLeading :: !(SMaybe a) - , bfeForged :: !(SMaybe a) - , bfeAnnounced :: !(SMaybe a) - , bfeSending :: !(SMaybe a) - , bfeAdopted :: !(SMaybe a) - , bfeChainDelta :: !Int - , bfeErrs :: [BPError] - } - deriving (Generic, NFData, FromJSON, ToJSON, Show) - bfePrevBlock :: ForgerEvents a -> Maybe Hash bfePrevBlock x = case bfeBlockNo x of 0 -> Nothing @@ -176,6 +154,9 @@ mapMbe f o e = \case MOE x -> o x MBE x -> e x +mbeForge :: MachBlockEvents a -> Maybe (ForgerEvents a) +mbeForge = mapMbe Just (const Nothing) (const Nothing) + partitionMbes :: [MachBlockEvents a] -> ([ForgerEvents a], [ObserverEvents a], [BPError]) partitionMbes = go [] [] [] where @@ -246,13 +227,14 @@ mbeBlockNo :: MachBlockEvents a -> BlockNo mbeBlockNo = mapMbe bfeBlockNo boeBlockNo (const (-1)) -- | Machine's private view of all the blocks. -type MachBlockMap a +type MachHashBlockEvents a = Map.Map Hash (MachBlockEvents a) +-- An accumulator for: tip-block-events & the set of all blocks events data MachView = MachView { mvHost :: !Host - , mvBlocks :: !(MachBlockMap UTCTime) + , mvBlocks :: !(MachHashBlockEvents UTCTime) , mvStarted :: !(SMaybe UTCTime) , mvBlkCtx :: !(SMaybe UTCTime) , mvLgrState :: !(SMaybe UTCTime) @@ -261,6 +243,9 @@ data MachView } deriving (FromJSON, Generic, NFData, ToJSON) +mvForges :: MachView -> [ForgerEvents UTCTime] +mvForges = mapMaybe (mbeForge . snd) . Map.toList . mvBlocks + machViewMaxBlock :: MachView -> MachBlockEvents UTCTime machViewMaxBlock MachView{..} = Map.elems mvBlocks @@ -278,37 +263,48 @@ buildMachViews run = mapConcurrentlyPure (fst &&& blockEventMapsFromLogObjects r blockEventsAcceptance :: Genesis -> [ChainFilter] -> BlockEvents -> [(ChainFilter, Bool)] blockEventsAcceptance genesis flts be = flts <&> (id &&& testBlockEvents genesis be) -rebuildChain :: Run -> [ChainFilter] -> [FilterName] -> [(JsonLogfile, MachView)] -> IO (DataDomain SlotNo, DataDomain BlockNo, [BlockEvents], [BlockEvents]) -rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do - progress "tip" $ Q $ show $ bfeBlock tipBlock - forM_ flts $ - progress "filter" . Q . show - pure (domSlot, domBlock, chainRejecta, chain) +rebuildChain :: Run -> [ChainFilter] -> [FilterName] -> [(JsonLogfile, MachView)] -> Chain +rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = + Chain + { cDomSlots = DataDomain + (Interval (blk0 & beSlotNo) (blkL & beSlotNo)) + (mFltDoms <&> fst3) + (beSlotNo blkL - beSlotNo blk0 & fromIntegral . unSlotNo) + (mFltDoms <&> thd3 & fromMaybe 0) + , cDomBlocks = DataDomain + (Interval (blk0 & beBlockNo) (blkL & beBlockNo)) + (mFltDoms <&> snd3) + (length cMainChain) + (length accepta) + , cBlockStats = Map.fromList $ machViews <&> (mvHost &&& mvBlockStats) + , .. + } where - (blk0, blkL) = (head chain, last chain) - mblkV = - liftA2 (,) (find (all snd . beAcceptance) chain) - (find (all snd . beAcceptance) (reverse chain)) - domSlot = DataDomain - (blk0 & beSlotNo) (blkL & beSlotNo) - (mblkV <&> beSlotNo . fst) - (mblkV <&> beSlotNo . snd) - (beSlotNo blkL - beSlotNo blk0 & fromIntegral . unSlotNo) - (mblkV & - maybe 0 (fromIntegral . unSlotNo . uncurry (on (flip (-)) beSlotNo))) - domBlock = DataDomain - (blk0 & beBlockNo) (blkL & beBlockNo) - (mblkV <&> beBlockNo . fst) - (mblkV <&> beBlockNo . snd) - (length chain) - (length acceptableChain) - - (acceptableChain, chainRejecta) = partition (all snd . beAcceptance) chain - - chain = computeChainBlockGaps $ - doRebuildChain (fmap deltifyEvents <$> eventMaps) tipHash - - eventMaps = mvBlocks <$> machViews + cMainChain = computeChainBlockGaps $ + doRebuildChain (fmap deltifyEvents <$> eventMaps) tipHash + (accepta, cRejecta) = partition (all snd . beAcceptance) cMainChain + + blkSets :: (Set Hash, Set Hash) + blkSets@(acceptaBlocks, rejectaBlocks) = + both (Set.fromList . fmap beBlock) (accepta, cRejecta) + mvBlockStats :: MachView -> BlockStats + mvBlockStats (fmap bfeBlock . mvForges -> fs) = BlockStats {..} + where bsUnchained = (countListAll fs & unsafeCoerceCount) + - bsFiltered - bsRejected + bsFiltered = countList (`Set.member` acceptaBlocks) fs & unsafeCoerceCount + bsRejected = countList (`Set.member` rejectaBlocks) fs & unsafeCoerceCount + + (blk0, blkL) = (head &&& last) cMainChain + mFltDoms :: Maybe (Interval SlotNo, Interval BlockNo, Int) + mFltDoms = + liftA2 (,) (find (all snd . beAcceptance) cMainChain) + (find (all snd . beAcceptance) (reverse cMainChain)) + <&> \firstLastBlk -> + (,,) (uncurry Interval $ both beSlotNo firstLastBlk) + (uncurry Interval $ both beBlockNo firstLastBlk) + (fromIntegral . unSlotNo . uncurry (on (flip (-)) beSlotNo) $ firstLastBlk) + + eventMaps = machViews <&> mvBlocks finalBlockEv = maximumBy ordBlockEv $ machViewMaxBlock <$> machViews @@ -324,13 +320,13 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do step prevForge x@(beForgedAt -> at) = (at, x { beForge = (beForge x) { bfBlockGap = at `diffUTCTime` prevForge } }) - rewindChain :: [MachBlockMap a] -> Int -> Hash -> Hash + rewindChain :: [MachHashBlockEvents a] -> Int -> Hash -> Hash rewindChain eventMaps count tip = go tip count where go tip = \case 0 -> tip n -> go (bfeBlockPrev $ getBlockForge eventMaps tip) (n - 1) - getBlockForge :: [MachBlockMap a] -> Hash -> ForgerEvents a + getBlockForge :: [MachHashBlockEvents a] -> Hash -> ForgerEvents a getBlockForge xs h = mapMaybe (Map.lookup h) xs & find mbeForgP @@ -344,23 +340,30 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do adoptionMap :: [Map Hash UTCTime] adoptionMap = Map.mapMaybe (lazySMaybe . mbeAdopted) <$> eventMaps - heightMap :: Map BlockNo (Set Hash) - heightMap = foldr (\em acc -> - Map.foldr - (\mbe -> Map.alter - (maybe (Just $ Set.singleton (mbeBlock mbe)) - (Just . Set.insert (mbeBlock mbe))) - (mbeBlockNo mbe)) - acc em) - mempty eventMaps - - doRebuildChain :: [MachBlockMap NominalDiffTime] -> Hash -> [BlockEvents] - doRebuildChain machBlockMaps tip = go (Just tip) [] + heightHostMap :: (Map BlockNo (Set Hash), Map Host (Set Hash)) + heightHostMap@(heightMap, hostMap) + = foldr (\MachView{..} (accHeight, accHost) -> + (,) + (Map.foldr + (\mbe -> Map.alter + (maybe (Just $ Set.singleton (mbeBlock mbe)) + (Just . Set.insert (mbeBlock mbe))) + (mbeBlockNo mbe)) + accHeight mvBlocks) + (Map.insert + mvHost + (Map.elems mvBlocks + & Set.fromList . fmap bfeBlock . mapMaybe mbeForge) + accHost)) + (mempty, mempty) machViews + + doRebuildChain :: [MachHashBlockEvents NominalDiffTime] -> Hash -> [BlockEvents] + doRebuildChain machBlockMaps chainTipHash = go (Just chainTipHash) [] where go Nothing acc = acc - go (Just h) acc = - case partitionMbes $ mapMaybe (Map.lookup h) machBlockMaps of + go (Just hash) acc = + case partitionMbes $ mapMaybe (Map.lookup hash) machBlockMaps of ([], _, ers) -> error $ mconcat - [ "No forger for hash ", show h + [ "No forger for hash ", show hash , "\nErrors:\n" ] ++ intercalate "\n" (show <$> ers) blkEvs@(forgerEv:_, oEvs, ers) -> @@ -407,7 +410,7 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do & handleMiss "Δt Adopted (forger)" , bfChainDelta = bfeChainDelta } - , beForks = unsafeCoerceCount $ countOfList otherBlocks + , beForks = unsafeCoerceCount $ countListAll otherBlocks , beObservations = catSMaybes $ os <&> \ObserverEvents{..}-> @@ -467,11 +470,11 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do , " -- missing: ", slotDesc ] -blockProp :: Run -> [BlockEvents] -> DataDomain SlotNo -> DataDomain BlockNo -> IO BlockPropOne -blockProp run@Run{genesis} fullChain domSlot domBlock = do +blockProp :: Run -> Chain -> IO BlockPropOne +blockProp run@Run{genesis} Chain{..} = do pure $ BlockProp - { bpDomainSlots = domSlot - , bpDomainBlocks = domBlock + { bpDomainSlots = [cDomSlots] + , bpDomainBlocks = [cDomBlocks] , cdfForgerStarts = forgerEventsCDF (SJust . bfStarted . beForge) , cdfForgerBlkCtx = forgerEventsCDF (bfBlkCtx . beForge) , cdfForgerLgrState = forgerEventsCDF (bfLgrState . beForge) @@ -491,12 +494,25 @@ blockProp run@Run{genesis} fullChain domSlot domBlock = do [ ( T.pack $ printf "cdf%.2f" p' , forgerEventsCDF (SJust . unI . projectCDF' "bePropagation" p . bePropagation)) | p@(Centile p') <- adoptionCentiles <> [Centile 1.0] ] - , cdfForks = forgerEventsCDF (SJust . unCount . beForks) - , cdfSizes = forgerEventsCDF (SJust . bfBlockSize . beForge) + , cdfBlockBattles = forgerEventsCDF (SJust . unCount . beForks) + , cdfBlockSizes = forgerEventsCDF (SJust . bfBlockSize . beForge) , bpVersion = getLocliVersion + , cdfBlocksPerHost = cdf stdCentiles (blockStats <&> unCount + . bsTotal) + , cdfBlocksFilteredRatio = cdf stdCentiles (blockStats <&> + uncurry ((/) `on` + fromIntegral . unCount) + . (bsFiltered &&& bsChained)) + , cdfBlocksChainedRatio = cdf stdCentiles (blockStats <&> + uncurry ((/) `on` + fromIntegral . unCount) + . (bsChained &&& bsTotal)) } where - analysisChain = filter (all snd . beAcceptance) fullChain + blockStats = Map.elems cBlockStats + + analysisChain :: [BlockEvents] + analysisChain = filter (all snd . beAcceptance) cMainChain forgerEventsCDF :: Divisible a => (BlockEvents -> SMaybe a) -> CDF I a forgerEventsCDF = flip (witherToDistrib (cdf stdCentiles)) analysisChain diff --git a/bench/locli/src/Cardano/Analysis/MachPerf.hs b/bench/locli/src/Cardano/Analysis/MachPerf.hs index f56fa4dfa98..4cccc818dab 100644 --- a/bench/locli/src/Cardano/Analysis/MachPerf.hs +++ b/bench/locli/src/Cardano/Analysis/MachPerf.hs @@ -3,7 +3,6 @@ {- HLINT ignore "Use head" -} module Cardano.Analysis.MachPerf (module Cardano.Analysis.MachPerf) where -import Prelude (head, last) import Cardano.Prelude hiding (head) import Cardano.Prelude qualified as CP @@ -14,7 +13,6 @@ import Data.Text.Short (toText) import Data.Vector (Vector) import Data.Vector qualified as Vec -import Data.Time.Clock (diffUTCTime) import Data.Time.Clock qualified as Time import Data.CDF @@ -491,8 +489,8 @@ slotStatsMachPerf _ (JsonLogfile f, []) = slotStatsMachPerf run (f, slots) = Right . (f,) $ MachPerf { mpVersion = getLocliVersion - , mpDomainSlots = mkDataDomainInj (slSlot $ head slots) (slSlot $ last slots) - (fromIntegral . unSlotNo) + , mpDomainSlots = [domSlots] + , cdfHostSlots = dist [fromIntegral $ ddFilteredCount domSlots] -- , cdfStarts = dist (slCountStarts <$> slots) , cdfLeads = dist (slCountLeads <$> slots) @@ -509,8 +507,13 @@ slotStatsMachPerf run (f, slots) = , cdfSpanLensCpuEpoch = dist sssSpanLensCpuEpoch , cdfSpanLensCpuRwd = dist sssSpanLensCpuRwd , mpResourceCDFs = computeResCDF stdCentiles slResources slots + , .. } where + domSlots = mkDataDomainInj sFirst sLast (fromIntegral . unSlotNo) + + (,) sFirst sLast = (slSlot . head &&& slSlot . last) slots + dist :: Divisible a => [a] -> CDF I a dist = cdf stdCentiles @@ -518,9 +521,10 @@ slotStatsMachPerf run (f, slots) = -- * 5. Multi-machine & multi-run summaries: -- -summariseMultiClusterPerf :: [Centile] -> [ClusterPerf] -> Either CDFError MultiClusterPerf -summariseMultiClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" -summariseMultiClusterPerf centiles mps@(headline:_) = do +summariseClusterPerf :: [Centile] -> [MachPerfOne] -> Either CDFError ClusterPerf +summariseClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" +summariseClusterPerf centiles mps@(headline:_) = do + cdfHostSlots <- cdf2OfCDFs comb $ mps <&> cdfHostSlots cdfStarts <- cdf2OfCDFs comb $ mps <&> cdfStarts cdfLeads <- cdf2OfCDFs comb $ mps <&> cdfLeads cdfUtxo <- cdf2OfCDFs comb $ mps <&> cdfUtxo @@ -538,20 +542,23 @@ summariseMultiClusterPerf centiles mps@(headline:_) = do mpResourceCDFs <- sequence $ traverse identity (mps <&> mpResourceCDFs) <&> \case [] -> Left CDFEmptyDataset - (xs :: [CDF (CDF I) Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) + (xs :: [CDF I Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) - pure . MultiClusterPerf $ MachPerf - { mpVersion = mpVersion headline - , mpDomainSlots = dataDomainsMergeOuter $ mps <&> mpDomainSlots + pure MachPerf + { mpVersion = mpVersion headline + , mpDomainSlots = domSlots , .. } where - comb :: forall a. Divisible a => Combine (CDF I) a - comb = stdCombine2 centiles + domSlots = concat $ mps <&> mpDomainSlots -summariseClusterPerf :: [Centile] -> [MachPerfOne] -> Either CDFError ClusterPerf -summariseClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" -summariseClusterPerf centiles mps@(headline:_) = do + comb :: forall a. Divisible a => Combine I a + comb = stdCombine1 centiles + +summariseMultiClusterPerf :: [Centile] -> [ClusterPerf] -> Either CDFError MultiClusterPerf +summariseMultiClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" +summariseMultiClusterPerf centiles mps@(headline:_) = do + cdfHostSlots <- cdf2OfCDFs comb $ mps <&> cdfHostSlots cdfStarts <- cdf2OfCDFs comb $ mps <&> cdfStarts cdfLeads <- cdf2OfCDFs comb $ mps <&> cdfLeads cdfUtxo <- cdf2OfCDFs comb $ mps <&> cdfUtxo @@ -569,13 +576,13 @@ summariseClusterPerf centiles mps@(headline:_) = do mpResourceCDFs <- sequence $ traverse identity (mps <&> mpResourceCDFs) <&> \case [] -> Left CDFEmptyDataset - (xs :: [CDF I Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) + (xs :: [CDF (CDF I) Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) - pure MachPerf + pure . MultiClusterPerf $ MachPerf { mpVersion = mpVersion headline - , mpDomainSlots = dataDomainsMergeOuter $ mps <&> mpDomainSlots + , mpDomainSlots = concat $ mps <&> mpDomainSlots , .. } where - comb :: forall a. Divisible a => Combine I a - comb = stdCombine1 centiles + comb :: forall a. Divisible a => Combine (CDF I) a + comb = stdCombine2 centiles diff --git a/bench/locli/src/Cardano/Analysis/Summary.hs b/bench/locli/src/Cardano/Analysis/Summary.hs index 0ae96da4f98..eb8a492458a 100644 --- a/bench/locli/src/Cardano/Analysis/Summary.hs +++ b/bench/locli/src/Cardano/Analysis/Summary.hs @@ -2,38 +2,81 @@ {-# OPTIONS_GHC -Wno-name-shadowing -Wno-orphans #-} module Cardano.Analysis.Summary (module Cardano.Analysis.Summary) where -import Cardano.Prelude hiding (head) +import Prelude (head, last) +import Cardano.Prelude import Data.Map.Strict qualified as Map import Cardano.Analysis.API import Cardano.Unlog.LogObject +import Cardano.Util computeSummary :: UTCTime - -> [[LogObject]] + -> Metadata + -> Genesis + -> GenesisSpec + -> GeneratorProfile + -> [(Count Cardano.Prelude.Text, [LogObject])] -> ([FilterName], [ChainFilter]) - -> DataDomain SlotNo - -> DataDomain BlockNo - -> [BlockEvents] - -> Summary -computeSummary sumWhen - objLists + -> ClusterPerf + -> BlockPropOne + -> Chain + -> Summary I +computeSummary sumAnalysisTime + sumMeta + sumGenesis + sumGenesisSpec + sumGenerator + loCountsObjLists sumFilters - sumDomainSlots - sumDomainBlocks - chainRejecta + MachPerf{..} + BlockProp{..} + Chain{..} = Summary - { sumLogStreams = countOfList objLists - , sumLogObjects = countOfLists objLists - , sumBlocksRejected = countOfList chainRejecta + { sumLogStreams = countListAll objLists + , sumLogObjectsTotal = countListsAll objLists + , sumBlocksRejected = countListAll cRejecta + , sumDomainTime = + DataDomain (Interval minStartRaw maxStopRaw) (Just $ Interval minStartFlt maxStopFlt) + (maxStopRaw `utcTimeDeltaSec` minStartRaw) + (maxStopFlt `utcTimeDeltaSec` minStartFlt) + , sumStartSpread = + DataDomain (Interval minStartRaw maxStartRaw) (Just $ Interval minStartFlt maxStartFlt) + (maxStartRaw `utcTimeDeltaSec` minStartRaw) + (maxStartFlt `utcTimeDeltaSec` minStartFlt) + , sumStopSpread = + DataDomain (Interval minStopRaw maxStopRaw) (Just $ Interval minStopFlt maxStopFlt) + (maxStopRaw `utcTimeDeltaSec` minStopRaw) + (maxStopFlt `utcTimeDeltaSec` minStopFlt) + , sumDomainSlots = Prelude.head mpDomainSlots + , sumDomainBlocks = Prelude.head bpDomainBlocks + -- + , cdfLogObjects = cdf stdCentiles (length <$> objLists) + , cdfLogObjectsEmitted = cdf stdCentiles (loCountsObjLists <&> unCount . fst) , .. } where + objLists = loCountsObjLists <&> snd + + (,) minStartRaw maxStartRaw = (minimum &&& maximum) losFirsts + (,) minStopRaw maxStopRaw = (minimum &&& maximum) losLasts + losFirsts = objLists <&> loAt . Prelude.head + losLasts = objLists <&> loAt . Prelude.last + + (,) minStartFlt maxStartFlt = (timeOf *** timeOf) startMinMaxS + (,) minStopFlt maxStopFlt = (timeOf *** timeOf) stopMinMaxS + startMinMaxS = (minimum &&& maximum) slotFirsts + stopMinMaxS = (minimum &&& maximum) slotLasts + slotFirsts = slotDomains <&> low + slotLasts = slotDomains <&> high + slotDomains = catMaybes (ddFiltered <$> mpDomainSlots) + timeOf = unSlotStart . slotStart sumGenesis + sumChainRejectionStats = - chainRejecta + cRejecta <&> fmap fst . filter (not . snd) . beAcceptance & concat & foldr' (\k m -> Map.insertWith (+) k 1 m) Map.empty diff --git a/bench/locli/src/Cardano/Command.hs b/bench/locli/src/Cardano/Command.hs index 51dfe31f655..e2f4ec84b60 100644 --- a/bench/locli/src/Cardano/Command.hs +++ b/bench/locli/src/Cardano/Command.hs @@ -1,7 +1,7 @@ {-# OPTIONS_GHC -fmax-pmcheck-models=25000 #-} module Cardano.Command (module Cardano.Command) where -import Cardano.Prelude hiding (State, head) +import Cardano.Prelude hiding (State) import Data.Aeson qualified as Aeson import Data.ByteString qualified as BS @@ -22,9 +22,8 @@ import Cardano.Analysis.MachPerf import Cardano.Analysis.Summary import Cardano.Render import Cardano.Report -import Cardano.Unlog.LogObject hiding (Text) -import Cardano.Util -import Data.CDF +import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Util hiding (head) data CommandError = CommandError ChainCommand Text @@ -81,11 +80,10 @@ data ChainCommand | ComputeSummary | RenderSummary RenderFormat TextOutputFile - | ReadSummaries [JsonInputFile Summary] + | ReadSummaries [JsonInputFile SummaryOne] | Compare InputDir (Maybe TextInputFile) TextOutputFile - [( JsonInputFile RunPartial - , JsonInputFile Genesis + [( JsonInputFile SummaryOne , JsonInputFile ClusterPerf , JsonInputFile BlockPropOne)] @@ -233,11 +231,10 @@ parseChainCommand = (optTextInputFile "template" "Template to use as base.") <*> optTextOutputFile "report" "Report .org file to create." <*> some - ((,,,) - <$> optJsonInputFile "run-metafile" "The meta.json file of a benchmark run" - <*> optJsonInputFile "shelley-genesis" "Genesis file of the run" - <*> optJsonInputFile "perf" "JSON cluster performance input file" - <*> optJsonInputFile "prop" "JSON block propagation input file" + ((,,) + <$> optJsonInputFile "summary" "JSON analysis summary input file" + <*> optJsonInputFile "perf" "JSON cluster performance input file" + <*> optJsonInputFile "prop" "JSON block propagation input file" )) ]) where @@ -268,17 +265,6 @@ parseTimelineCommentsBP = (x:xs) -> foldl (<|>) x xs [] -> error "Crazy world." -parseRenderFormat :: Parser RenderFormat -parseRenderFormat = - [ Opt.flag' AsJSON (Opt.long "json" <> Opt.help "Full JSON dump output file") - , Opt.flag' AsGnuplot (Opt.long "gnuplot" <> Opt.help "%s-pattern for separate Gnuplot output files, per CDF") - , Opt.flag' AsOrg (Opt.long "org" <> Opt.help "Org mode table output file") - , Opt.flag' AsReport (Opt.long "report" <> Opt.help "Org mode table output file, brief stats") - , Opt.flag' AsPretty (Opt.long "pretty" <> Opt.help "Text report output file") - ] & \case - (x:xs) -> foldl (<|>) x xs - [] -> error "Crazy world." - writerOpt :: (RenderFormat -> TextOutputFile -> a) -> String -> RenderFormat -> Parser a writerOpt ctor desc mode = ctor mode <$> optTextOutputFile opt (desc <> descSuf) where @@ -288,7 +274,7 @@ writerOpt ctor desc mode = ctor mode <$> optTextOutputFile opt (desc <> descSuf) AsJSON -> (,) "json" " results as complete JSON dump" AsGnuplot -> (,) "gnuplot" " as individual Gnuplot files" AsOrg -> (,) "org" " as Org-mode table" - AsReport -> (,) "report" " as Org-mode summary table" + AsReport -> (,) "org-report" " as Org-mode summary table" AsPretty -> (,) "pretty" " as text report" writerOpts :: (RenderFormat -> TextOutputFile -> a) -> String -> Parser a @@ -307,11 +293,9 @@ data State , sRun :: Maybe Run , sObjLists :: Maybe [(JsonLogfile, [LogObject])] , sDomSlots :: Maybe (DataDomain SlotNo) - , sDomBlocks :: Maybe (DataDomain BlockNo) -- propagation , sMachViews :: Maybe [(JsonLogfile, MachView)] - , sChain :: Maybe [BlockEvents] - , sChainRejecta :: Maybe [BlockEvents] + , sChain :: Maybe Chain , sBlockProp :: Maybe [BlockPropOne] , sMultiBlockProp :: Maybe MultiBlockProp -- performance @@ -322,10 +306,10 @@ data State , sClusterPerf :: Maybe [ClusterPerf] , sMultiClusterPerf :: Maybe MultiClusterPerf -- - , sSummaries :: Maybe [Summary] + , sSummaries :: Maybe [SummaryOne] } -callComputeSummary :: State -> Either Text Summary +callComputeSummary :: State -> Either Text SummaryOne callComputeSummary = \case State{sRun = Nothing} -> err "a run" @@ -333,29 +317,34 @@ callComputeSummary = State{sObjLists = Just []} -> err "logobjects" State{sClusterPerf = Nothing} -> err "cluster performance results" State{sBlockProp = Nothing} -> err "block propagation results" - State{sChainRejecta = Nothing} -> err "chain rejects" - State{sDomSlots = Nothing} -> err "a slot domain" - State{sDomBlocks = Nothing} -> err "a block domain" + State{sChain = Nothing} -> err "chain" State{ sObjLists = Just (fmap snd -> objLists) - -- , sClusterPerf = Just clusterPerf - -- , sBlockProp = Just blockProp - , sChainRejecta = Just chainRejecta - , sDomSlots = Just sumDomainSlots - , sDomBlocks = Just sumDomainBlocks + , sClusterPerf = Just [clusterPerf] + , sBlockProp = Just [blockProp'] + , sChain = Just chain + , sRun = Just Run{..} , ..} -> Right $ - computeSummary sWhen objLists sFilters - sumDomainSlots sumDomainBlocks chainRejecta + computeSummary sWhen metadata genesis genesisSpec generatorProfile + (zip (Count <$> [0..]) objLists) sFilters + clusterPerf blockProp' chain + _ -> err "Impossible to get here." where err = Left . ("Summary of a run requires " <>) sRunAnchor :: State -> Anchor -sRunAnchor State{sRun = Just run, sFilters, sWhen, sDomSlots, sDomBlocks} - = runAnchor run sWhen sFilters sDomSlots sDomBlocks +sRunAnchor State{sRun = Just run, sFilters, sWhen, sClusterPerf, sChain} + = runAnchor run sWhen sFilters + ((sClusterPerf <&> fmap (head . mpDomainSlots) . head & join.join) <|> + (sChain <&> cDomSlots)) + (sChain <&> cDomBlocks) sRunAnchor _ = error "sRunAnchor with no run." sTagsAnchor :: State -> Anchor -sTagsAnchor State{sFilters, sTags, sWhen, sDomSlots, sDomBlocks} - = tagsAnchor sTags sWhen sFilters sDomSlots sDomBlocks +sTagsAnchor State{sFilters, sTags, sWhen, sClusterPerf, sChain} + = tagsAnchor sTags sWhen sFilters + ((sClusterPerf <&> fmap (head . mpDomainSlots) . head & join.join) <|> + (sChain <&> cDomSlots)) + (sChain <&> cDomBlocks) runChainCommand :: State -> ChainCommand -> ExceptT CommandError IO State @@ -423,43 +412,44 @@ runChainCommand s@State{sRun=Just run, sMachViews=Just mvs} -> fltNames) <- readFilters fltfs & firstExceptT (CommandError c) let flts = fltFiles <> fltExprs - (domSlot, domBlock, chainRejecta, chain) <- rebuildChain run flts fltNames mvs - & liftIO + forM_ flts $ + progress "filter" . Q . show + + let chain = rebuildChain run flts fltNames mvs + progress "tip" $ Q . show . beBlock . last $ cMainChain chain + pure s { sChain = Just chain - , sChainRejecta = Just chainRejecta - , sDomSlots = Just domSlot - , sDomBlocks = Just domBlock , sFilters = (fltNames, flts) } -- pure s { sChain = Just chain } runChainCommand _ c@RebuildChain{} = missingCommandData c ["run metadata & genesis", "reconstructed chain"] -runChainCommand s - c@(ReadChain f) = do - progress "chain" (Q $ printf "reading chain") - chain <- mapM (Aeson.eitherDecode @BlockEvents) - . filter ((> 5) . LBS.length) - . LBS.split '\n' - <$> LBS.readFile (unJsonInputFile f) - & newExceptT - & firstExceptT (CommandError c . pack) - pure s { sChain = Just chain } - -runChainCommand s@State{sChain=Just chain, sChainRejecta=Just chainRejecta} +runChainCommand _ ReadChain{} = do + pure $ error "ReadChain not implemented" + -- progress "chain" (Q $ printf "reading chain") + -- chain <- mapM (Aeson.eitherDecode @BlockEvents) + -- . filter ((> 5) . LBS.length) + -- . LBS.split '\n' + -- <$> LBS.readFile (unJsonInputFile f) + -- & newExceptT + -- & firstExceptT (CommandError c . pack) + -- pure s { sChain = Just chain } + +runChainCommand s@State{sChain=Just Chain{..}} c@(DumpChain f fRej) = do progress "chain" (Q $ printf "dumping chain") - dumpObjects "chain" chain f & firstExceptT (CommandError c) + dumpObjects "chain" cMainChain f & firstExceptT (CommandError c) progress "chain-rejecta" (Q $ printf "dumping chain rejecta") - dumpObjects "chain-rejecta" chainRejecta fRej & firstExceptT (CommandError c) + dumpObjects "chain-rejecta" cRejecta fRej & firstExceptT (CommandError c) pure s runChainCommand _ c@DumpChain{} = missingCommandData c ["chain"] -runChainCommand s@State{sRun=Just run, sChain=Just chain} +runChainCommand s@State{sRun=Just _run, sChain=Just Chain{..}} c@(TimelineChain f comments) = do progress "chain" (Q $ printf "dumping prettyprinted chain") - dumpText "chain" (renderTimeline run (const True) comments chain) f + dumpText "chain" (renderTimeline (const True) comments cMainChain) f & firstExceptT (CommandError c) pure s runChainCommand _ c@TimelineChain{} = missingCommandData c @@ -519,20 +509,20 @@ runChainCommand s@State{sSlots=Just slots} runChainCommand _ c@DumpSlots = missingCommandData c ["filtered slots"] -runChainCommand s@State{sRun=Just run, sSlots=Just slots} +runChainCommand s@State{sRun=Just _run, sSlots=Just slots} c@TimelineSlots = do progress "mach" (Q $ printf "dumping %d slot timelines" $ length slots) dumpAssociatedTextStreams "mach" - (fmap (fmap $ renderTimeline run (const True) []) slots) + (fmap (fmap $ renderTimeline (const True) []) slots) & firstExceptT (CommandError c) pure s runChainCommand _ c@TimelineSlots{} = missingCommandData c ["run metadata & genesis", "filtered slots"] -runChainCommand s@State{sRun=Just run, sChain=Just chain, sDomSlots=Just domS, sDomBlocks=Just domB} +runChainCommand s@State{sRun=Just run, sChain=Just chain@Chain{..}} ComputePropagation = do - progress "block-propagation" $ J (domS, domB) - prop <- blockProp run chain domS domB & liftIO + progress "block-propagation" $ J (cDomBlocks, cDomSlots) + prop <- blockProp run chain & liftIO pure s { sBlockProp = Just [prop] } runChainCommand _ c@ComputePropagation = missingCommandData c ["run metadata & genesis", "chain", "data domains for slots & blocks"] @@ -663,14 +653,14 @@ runChainCommand s@State{sSummaries = Just (summary:_)} c@(RenderSummary fmt f) = dumpText "summary" body (modeFilename f "" fmt) & firstExceptT (CommandError c) pure s - where body = renderSummary fmt (sRunAnchor s) summary + where body = renderSummary fmt (sRunAnchor s) (iFields sumFieldsReport) summary runChainCommand _ c@RenderSummary{} = missingCommandData c ["run summary"] runChainCommand s@State{} c@(ReadSummaries fs) = do progress "summaries" (Q $ printf "reading %d run summaries" $ length fs) - xs <- mapConcurrently (fmap (Aeson.eitherDecode @Summary) . LBS.readFile . unJsonInputFile) fs + xs <- mapConcurrently (fmap (Aeson.eitherDecode @SummaryOne) . LBS.readFile . unJsonInputFile) fs & fmap sequence & newExceptT & firstExceptT (CommandError c . show) @@ -678,12 +668,12 @@ runChainCommand s@State{} runChainCommand s c@(Compare ede mTmpl outf@(TextOutputFile outfp) runs) = do progress "report" (Q $ printf "rendering report for %d runs" $ length runs) - xs :: [(ClusterPerf, BlockPropOne, Run)] <- forM runs $ - \(mf,gf,cpf,bpf)-> + xs :: [(SummaryOne, ClusterPerf, BlockPropOne)] <- forM runs $ + \(sumf,cpf,bpf)-> (,,) - <$> readJsonData cpf (CommandError c) - <*> readJsonData bpf (CommandError c) - <*> (readRun gf mf & firstExceptT (fromAnalysisError c)) + <$> readJsonData sumf (CommandError c) + <*> readJsonData cpf (CommandError c) + <*> readJsonData bpf (CommandError c) (tmpl, orgReport) <- case xs of baseline:deltas@(_:_) -> liftIO $ Cardano.Report.generate ede mTmpl baseline deltas @@ -734,7 +724,7 @@ runCommand (ChainCommand cs) = do Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing + Nothing Nothing opts :: ParserInfo Command opts = diff --git a/bench/locli/src/Cardano/Render.hs b/bench/locli/src/Cardano/Render.hs index 16b76db63bd..a6e6095cce8 100644 --- a/bench/locli/src/Cardano/Render.hs +++ b/bench/locli/src/Cardano/Render.hs @@ -2,10 +2,9 @@ {-# LANGUAGE ScopedTypeVariables #-} module Cardano.Render (module Cardano.Render) where -import Prelude (head, id, show) +import Prelude (id, show) import Cardano.Prelude hiding (head, show) -import Data.Aeson (ToJSON) import Data.Aeson.Text (encodeToLazyText) import Data.List (dropWhileEnd) import Data.Text qualified as T @@ -19,6 +18,69 @@ import Cardano.Util import Cardano.Analysis.API +-- | Explain the poor human a little bit of what was going on: +data Anchor + = Anchor + { aRuns :: [Text] + , aFilters :: ([FilterName], [ChainFilter]) + , aSlots :: Maybe (DataDomain SlotNo) + , aBlocks :: Maybe (DataDomain BlockNo) + , aVersion :: LocliVersion + , aWhen :: UTCTime + } + +runAnchor :: Run -> UTCTime -> ([FilterName], [ChainFilter]) -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor +runAnchor Run{..} = tagsAnchor [tag metadata] + +tagsAnchor :: [Text] -> UTCTime -> ([FilterName], [ChainFilter]) -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor +tagsAnchor aRuns aWhen aFilters aSlots aBlocks = + Anchor { aVersion = getLocliVersion, .. } + +renderAnchor :: Anchor -> Text +renderAnchor a = mconcat + [ "runs: ", renderAnchorRuns a, ", " + , renderAnchorNoRuns a + ] + +renderAnchorRuns :: Anchor -> Text +renderAnchorRuns Anchor{..} = mconcat + [ T.intercalate ", " aRuns ] + +renderAnchorFiltersAndDomains :: Anchor -> Text +renderAnchorFiltersAndDomains a@Anchor{..} = mconcat + [ "filters: ", case fst aFilters of + [] -> "unfiltered" + xs -> T.intercalate ", " (unFilterName <$> xs) + , renderAnchorDomains a] + +renderAnchorDomains :: Anchor -> Text +renderAnchorDomains Anchor{..} = mconcat $ + maybe [] ((:[]) . renderDomain "slot" (showText . unSlotNo)) aSlots + <> + maybe [] ((:[]) . renderDomain "block" (showText . unBlockNo)) aBlocks + where renderDomain :: Text -> (a -> Text) -> DataDomain a -> Text + renderDomain ty r DataDomain{..} = mconcat + [ ", ", ty + , " range: raw(", renderIntv r ddRaw, ", " + , showText ddRawCount, " total)" + , " filtered(", maybe "none" + (renderIntv r) ddFiltered, ", " + , showText ddFilteredCount, " total), " + , "filtered ", T.take 4 . showText $ ((/) @Double `on` fromIntegral) + ddFilteredCount ddRawCount + ] + +renderAnchorNoRuns :: Anchor -> Text +renderAnchorNoRuns a@Anchor{..} = mconcat + [ renderAnchorFiltersAndDomains a + , ", ", renderProgramAndVersion aVersion + , ", analysed at ", renderAnchorDate a + ] + +-- Rounds time to seconds. +renderAnchorDate :: Anchor -> Text +renderAnchorDate = showText . posixSecondsToUTCTime . secondsToNominalDiffTime . fromIntegral @Int . round . utcTimeToPOSIXSeconds . aWhen + justifyHead, justifyData, justifyCentile, justifyProp :: Int -> Text -> Text justifyHead w = T.center w ' ' justifyData w = T.justifyLeft w ' ' @@ -28,6 +90,24 @@ justifyProp w = T.center w ' ' renderCentiles :: Int -> [Centile] -> [Text] renderCentiles wi = fmap (T.take wi . T.pack . printf "%f" . unCentile) +renderScalar :: a -> Field ISelect I a -> Text +renderScalar v Field{..} = + let wi = width fWidth + packWi = T.pack.take wi + showDt = packWi.dropWhileEnd (== 's').show + showInt = T.pack.printf "%d" + showW64 = T.pack.printf "%d" + in case fSelect of + IInt (($v)->x) -> showInt x + IWord64M (($v)->x) -> smaybe "---" showW64 x + IWord64 (($v)->x) -> showW64 x + IFloat (($v)->x) -> packWi $ printf "%F" x + IDeltaTM (($v)->x) -> smaybe "---" showDt x + IDeltaT (($v)->x) -> showDt x + IDate (($v)->x) -> packWi $ take 10 $ show x + ITime (($v)->x) -> packWi $ take 8 $ drop 11 $ show x + IText (($v)->x) -> T.take wi . T.dropWhileEnd (== 's') $ x + renderFieldCentiles :: a p -> (forall v. Divisible v => CDF p v -> [[v]]) -> Field DSelect p a -> [[Text]] renderFieldCentiles x cdfProj Field{..} = case fSelect of @@ -53,11 +133,12 @@ renderFloatStr w = justifyData w'. T.take w' . T.pack . stripLeadingZero '0':xs@('.':_) -> xs xs -> xs -renderSummary :: RenderFormat -> Anchor -> Summary -> [Text] -renderSummary AsJSON _ x = (:[]) . LT.toStrict $ encodeToLazyText x -renderSummary AsGnuplot _ _ = error "renderSummary: output not supported: gnuplot" -renderSummary AsPretty _ _ = error "renderSummary: output not supported: pretty" -renderSummary _ a Summary{..} = +renderSummary :: forall f a. (a ~ Summary f, TimelineFields a, ToJSON a) + => RenderFormat -> Anchor -> (Field ISelect I a -> Bool) -> a -> [Text] +renderSummary AsJSON _ _ x = (:[]) . LT.toStrict $ encodeToLazyText x +renderSummary AsGnuplot _ _ _ = error "renderSummary: output not supported: gnuplot" +renderSummary AsPretty _ _ _ = error "renderSummary: output not supported: pretty" +renderSummary _ a fieldSelr summ = render $ Props { oProps = [ ("TITLE", renderAnchorRuns a ) @@ -69,10 +150,12 @@ renderSummary _ a Summary{..} = , oBody = (:[]) $ Table { tColHeaders = ["Value"] - , tExtended = False - , tApexHeader = Just "Property" - , tColumns = [kvs <&> snd] - , tRowHeaders = kvs <&> fst + , tExtended = True + , tApexHeader = Just "Parameter" + , tColumns = --transpose $ + [fields' <&> renderScalar summ] + -- , tColumns = [kvs <&> snd] + , tRowHeaders = fields' <&> fShortDesc , tSummaryHeaders = [] , tSummaryValues = [] , tFormula = [] @@ -80,17 +163,11 @@ renderSummary _ a Summary{..} = } } where - kvs = [ ("Date", showText $ sumWhen) - , ("Machines", showText $ sumLogStreams) - , ("Log objects", showText $ sumLogObjects) - , ("Slots considered", showText $ ddFilteredCount sumDomainSlots) - , ("Blocks considered", showText $ ddFilteredCount sumDomainBlocks) - , ("Blocks dropped", showText $ sumBlocksRejected) - ] + fields' :: [Field ISelect I a] + fields' = filter fieldSelr timelineFields - -renderTimeline :: forall (a :: Type). TimelineFields a => Run -> (Field ISelect I a -> Bool) -> [TimelineComments a] -> [a] -> [Text] -renderTimeline run flt comments xs = +renderTimeline :: forall (a :: Type). TimelineFields a => (Field ISelect I a -> Bool) -> [TimelineComments a] -> [a] -> [Text] +renderTimeline flt comments xs = concatMap (uncurry fLine) $ zip xs [(0 :: Int)..] where fLine :: a -> Int -> [Text] @@ -103,37 +180,25 @@ renderTimeline run flt comments xs = : concat (fmap (rtCommentary l) comments)) entry :: a -> Text - entry v = renderLineDist $ - \Field{..} -> - let wi = width fWidth - showDt = T.pack.take wi.printf "%-*s" (width fWidth).dropWhileEnd (== 's').show - showW64 = T.pack . printf "%*d" wi - in - case fSelect of - IInt (($v)->x) -> T.pack $ printf "%*d" wi x - IWord64M (($v)->x) -> smaybe "---" showW64 x - IWord64 (($v)->x) -> showW64 x - IFloat (($v)->x) -> T.pack $ take wi $ printf "%*F" (width fWidth - 2) x - IDeltaTM (($v)->x) -> smaybe "---" showDt x - IDeltaT (($v)->x) -> showDt x - IText (($v)->x) -> T.take wi . T.dropWhileEnd (== 's') $ x - - fields :: [Field ISelect I a] - fields = filter flt $ timelineFields run + entry = renderLineDist . renderScalar + + fields' :: [Field ISelect I a] + fields' = filter flt timelineFields head1, head2 :: Maybe Text - head1 = if all ((== 0) . T.length . fHead1) fields then Nothing + head1 = if all ((== 0) . T.length . fHead1) fields' then Nothing else Just (renderLineHead (uncurry T.take . ((+1).width.fWidth&&&fHead1))) - head2 = if all ((== 0) . T.length . fHead2) fields then Nothing + head2 = if all ((== 0) . T.length . fHead2) fields' then Nothing else Just (renderLineHead (uncurry T.take . ((+1).width.fWidth&&&fHead2))) -- Different strategies: fields are forcefully separated, -- whereas heads can use the extra space renderLineHead = mconcat . renderLine' justifyHead (toEnum.(+ 1).width.fWidth) + renderLineDist :: (Field ISelect I a -> Text) -> Text renderLineDist = T.intercalate " " . renderLine' justifyData fWidth renderLine' :: (Int -> Text -> Text) -> (Field ISelect I a -> Width) -> (Field ISelect I a -> Text) -> [Text] - renderLine' jfn wfn rfn = fields + renderLine' jfn wfn rfn = fields' <&> \f -> jfn (width $ wfn f) (rfn f) mapRenderCDF :: forall p a. CDFFields a p @@ -142,15 +207,15 @@ mapRenderCDF :: forall p a. CDFFields a p -> a p -> [[Text]] mapRenderCDF fieldSelr centiSelr fSampleProps x = - fields -- list of fields + fields' -- list of fields <&> renderFieldCentiles x cdfSamplesProps -- for each field, list of per-sample lists of properties & (transpose [renderCentiles 6 centiles] :) & transpose -- for each sample, list of per-field lists of properties & fmap (fmap $ T.intercalate " ") where -- Pick relevant fields: - fields :: [Field DSelect p a] - fields = filter fieldSelr cdfFields + fields' :: [Field DSelect p a] + fields' = filter fieldSelr cdfFields -- Pick relevant centiles: subsetCenti :: CDF p b -> CDF p b @@ -220,15 +285,15 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr _c2a centileSelr AsOrg x = , oConstants = [] , oBody = (:[]) $ Table - { tColHeaders = fields <&> fId + { tColHeaders = fields' <&> fId , tExtended = True , tApexHeader = Just "centile" - , tColumns = fields <&> fmap (T.intercalate ":") . renderFieldCentilesWidth x cdfSamplesProps + , tColumns = fields' <&> fmap (T.intercalate ":") . renderFieldCentilesWidth x cdfSamplesProps , tRowHeaders = percSpecs <&> T.take 6 . T.pack . printf "%.4f" . unCentile , tSummaryHeaders = ["avg", "samples"] - , tSummaryValues = [ fields <&> + , tSummaryValues = [ fields' <&> \f@Field{..} -> mapField x (T.take (width fWidth + 1) . T.pack . printf "%f" . cdfAverageVal) f - , fields <&> + , fields' <&> \f@Field{} -> mapField x (T.pack . printf "%d" . cdfSize) f ] & transpose , tFormula = [] @@ -239,8 +304,8 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr _c2a centileSelr AsOrg x = cdfSamplesProps :: Divisible c => CDF p c -> [[c]] cdfSamplesProps = fmap (pure . unliftCDFVal cdfIx . snd) . cdfSamples . restrictCDF - fields :: [Field DSelect p a] - fields = filterFields fieldSelr + fields' :: [Field DSelect p a] + fields' = filterFields fieldSelr restrictCDF :: forall c. CDF p c -> CDF p c restrictCDF = maybe id subsetCDF centileSelr @@ -265,20 +330,20 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr aspect _centileSelr AsReport x = , tExtended = True , tApexHeader = Just "metric" , tColumns = transpose $ - fields <&> + fields' <&> fmap (T.take 6 . T.pack . printf "%f") . mapFieldWithKey x (snd hdrsProjs) - , tRowHeaders = fields <&> fShortDesc + , tRowHeaders = fields' <&> fShortDesc , tSummaryHeaders = [] , tSummaryValues = [] , tFormula = [] , tConstants = [("nSamples", - fields <&> mapField x (T.pack . show . cdfSize) & head)] + fields' <&> mapField x (T.pack . show . cdfSize) & head)] } } where - fields :: [Field DSelect p a] - fields = filter fieldSelr cdfFields + fields' :: [Field DSelect p a] + fields' = filter fieldSelr cdfFields hdrsProjs :: forall v. (Divisible v) => ([Text], Field DSelect p a -> CDF p v -> [Double]) hdrsProjs = aspectColHeadersAndProjections aspect @@ -289,7 +354,7 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr aspect _centileSelr AsReport x = OfOverallDataset -> (,) ["average", "CoV", "min", "max", "stddev", "range", "precision", "size"] - \Field{..} c@CDF{cdfRange=(cdfMin, cdfMax), ..} -> + \Field{..} c@CDF{cdfRange=Interval cdfMin cdfMax, ..} -> let avg = cdfAverageVal c & toDouble in [ avg , cdfStddev / avg @@ -306,7 +371,7 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr aspect _centileSelr AsReport x = (\Field{..} -> cdfArity (error "Cannot do inter-CDF statistics on plain CDFs") - (\CDF{cdfAverage=cdfAvg@CDF{cdfRange=(minAvg, maxAvg),..}} -> + (\CDF{cdfAverage=cdfAvg@CDF{cdfRange=Interval minAvg maxAvg,..}} -> let avg = cdfAverageVal cdfAvg & toDouble in [ avg , cdfStddev / avg @@ -326,15 +391,15 @@ renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = <> sizeAvg where head1, head2 :: Maybe Text - head1 = if all ((== 0) . T.length . fHead1) fields then Nothing + head1 = if all ((== 0) . T.length . fHead1) fields' then Nothing else Just (renderLineHead1 (uncurry T.take . ((+1) . width . fWidth &&& fHead1))) - head2 = if all ((== 0) . T.length . fHead2) fields then Nothing + head2 = if all ((== 0) . T.length . fHead2) fields' then Nothing else Just (renderLineHead2 (uncurry T.take . ((+1) . width . fWidth &&& fHead2))) renderLineHead1 = mconcat . (" ":) . renderLine' justifyHead (toEnum . (+ 1) . width . fWidth) renderLineHead2 = mconcat . (" %tile":) . renderLine' justifyHead (toEnum . (+ 1) . width . fWidth) pLines :: [Text] - pLines = fields + pLines = fields' <&> -- fmap (T.intercalate " ") . fmap T.concat . @@ -343,8 +408,8 @@ renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = & transpose & fmap (T.intercalate " ") - fields :: [Field DSelect p a] - fields = filter fieldSelr cdfFields + fields' :: [Field DSelect p a] + fields' = filter fieldSelr cdfFields centiles :: [Centile] centiles = mapSomeFieldCDF @@ -363,12 +428,12 @@ renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = (\f -> flip (renderField justifyData fWidth) f $ const $ mapSomeFieldCDF (fit (fWidth f) .T.pack . printf "%F" . cdfAverageVal) x (fSelect f)) - <$> fields + <$> fields' , (justifyProp 6 "size" :) $ (\f -> flip (renderField justifyHead fWidth) f $ const $ mapSomeFieldCDF (fit (fWidth f) . T.pack . show . cdfSize) x (fSelect f)) - <$> fields + <$> fields' ] fit :: Width -> Text -> Text @@ -380,7 +445,7 @@ renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = else T.take w t renderLine' :: (Int -> Text -> Text) -> (Field DSelect p a -> Width) -> (Field DSelect p a -> Text) -> [Text] - renderLine' jfn wfn rfn = renderField jfn wfn rfn <$> fields + renderLine' jfn wfn rfn = renderField jfn wfn rfn <$> fields' renderField :: forall f. (Int -> Text -> Text) -> (f -> Width) -> (f -> Text) -> f -> Text renderField jfn wfn rend f = jfn (width $ wfn f) (rend f) diff --git a/bench/locli/src/Cardano/Report.hs b/bench/locli/src/Cardano/Report.hs index b96f4c6c0c0..288026f7220 100644 --- a/bench/locli/src/Cardano/Report.hs +++ b/bench/locli/src/Cardano/Report.hs @@ -9,11 +9,9 @@ where import Cardano.Prelude -import Data.Aeson (FromJSON (..), ToJSON (..), object) import Data.ByteString qualified as BS import Data.HashMap.Lazy qualified as HM import Data.Map.Strict qualified as Map -import Data.List (last) import Data.Text qualified as T import Data.Text.Lazy qualified as LT import Data.Time.Clock @@ -31,30 +29,30 @@ newtype Author = Author { unAuthor :: Text } deriving newtype (FromJSON, T newtype Revision = Revision { unRevision :: Int } deriving newtype (FromJSON, ToJSON) newtype ShortId = ShortId { unShortId :: Text } deriving newtype (FromJSON, ToJSON) -data Report - = Report - { rAuthor :: !Author - , rDate :: !UTCTime - , rRevision :: !Revision - , rLocliVersion :: !LocliVersion - , rTarget :: !Version +data ReportMeta + = ReportMeta + { rmAuthor :: !Author + , rmDate :: !UTCTime + , rmRevision :: !Revision + , rmLocliVersion :: !LocliVersion + , rmTarget :: !Version } -instance ToJSON Report where - toJSON Report{..} = object - [ "author" .= rAuthor - , "date" .= rDate - , "revision" .= rRevision - , "locli" .= rLocliVersion - , "target" .= rTarget +instance ToJSON ReportMeta where + toJSON ReportMeta{..} = object + [ "author" .= rmAuthor + , "date" .= rmDate + , "revision" .= rmRevision + , "locli" .= rmLocliVersion + , "target" .= rmTarget ] -getReport :: Version -> Maybe Revision -> IO Report -getReport rTarget mrev = do - rAuthor <- (getUserEntryForName =<< getLoginName) <&> Author . T.pack . userGecos - rDate <- getCurrentTime - let rRevision = fromMaybe (Revision 1) mrev - rLocliVersion = getLocliVersion - pure Report{..} +getReport :: Version -> Maybe Revision -> IO ReportMeta +getReport rmTarget mrev = do + rmAuthor <- (getUserEntryForName =<< getLoginName) <&> Author . T.pack . userGecos + rmDate <- getCurrentTime + let rmRevision = fromMaybe (Revision 1) mrev + rmLocliVersion = getLocliVersion + pure ReportMeta{..} data Workload = WValue @@ -68,26 +66,36 @@ instance ToJSON Workload where WPlutusLoopSECP -> "Plutus SECP loop" data Section where - STable :: CDFFields a p => - { sData :: !(a p) - , sFieldSelector :: !(Field DSelect p a -> Bool) - , sDataRef :: !Text - , sOrgTableSrc :: !Text - , sTitle :: !Text + STable :: + { sData :: !(a p) + , sFields :: !FSelect + , sNameCol :: !Text + , sValueCol :: !Text + , sDataRef :: !Text + , sOrgFile :: !Text + , sTitle :: !Text } -> Section -defaultReportSections :: MachPerf (CDF I) -> BlockProp I -> [Section] -defaultReportSections machPerf blockProp = - [ STable machPerf mtFieldsReport "perf" "clusterperf.report.org" +summaryReportSection :: SummaryOne -> Section +summaryReportSection summ = + STable summ (ISel @SummaryOne $ iFields sumFieldsReport) "Parameter" "Value" "summary" "summary.org" + "Overall run parameters" + +analysesReportSections :: MachPerf (CDF I) -> BlockProp I -> [Section] +analysesReportSections mp bp = + [ STable mp (DSel @MachPerf $ dFields mtFieldsReport) "metric" "average" "perf" "clusterperf.report.org" "Resource Usage" - , STable blockProp bpFieldSelectForger "forge" "blockprop.forger.org" + , STable bp (DSel @BlockProp $ dFields bpFieldsControl) "metric" "average" "control" "blockprop.control.org" + "Anomaly control" + + , STable bp (DSel @BlockProp $ dFields bpFieldsForger) "metric" "average" "forge" "blockprop.forger.org" "Forging" - , STable blockProp bpFieldSelectPeers "peers" "blockprop.peers.org" + , STable bp (DSel @BlockProp $ dFields bpFieldsPeers) "metric" "average" "peers" "blockprop.peers.org" "Individual peer propagation" - , STable blockProp bpFieldSelectEndToEnd "end2end" "blockprop.endtoend.org" + , STable bp (DSel @BlockProp $ dFields bpFieldsEndToEnd) "metric" "average" "end2end" "blockprop.endtoend.org" "End-to-end propagation" ] @@ -95,12 +103,12 @@ defaultReportSections machPerf blockProp = -- Representation of a run, structured for template generator's needs. -- -liftTmplRun :: Run -> TmplRun -liftTmplRun Run{generatorProfile=GeneratorProfile{..}, ..} = +liftTmplRun :: Summary a -> TmplRun +liftTmplRun Summary{sumGenerator=GeneratorProfile{..} + ,sumMeta=meta@Metadata{..}} = TmplRun - { trMeta = metadata - , trShortId = ShortId (batch metadata) - , trManifest = manifest metadata & unsafeShortenManifest 5 + { trMeta = meta + , trManifest = manifest & unsafeShortenManifest 5 , trWorkload = case ( plutusMode & fromMaybe False , plutusLoopScript & fromMaybe "" & FS.takeFileName & FS.dropExtension ) of @@ -114,7 +122,6 @@ liftTmplRun Run{generatorProfile=GeneratorProfile{..}, ..} = data TmplRun = TmplRun { trMeta :: !Metadata - , trShortId :: !ShortId , trWorkload :: !Workload , trManifest :: !Manifest } @@ -123,7 +130,6 @@ instance ToJSON TmplRun where toJSON TmplRun{trManifest=Manifest{..},..} = object [ "meta" .= trMeta - , "shortId" .= trShortId , "workload" .= trWorkload , "branch" .= mNodeBranch , "ver" .= mNodeApproxVer @@ -145,19 +151,25 @@ liftTmplSection = STable{..} -> TmplTable { tsTitle = sTitle + , tsNameCol = sNameCol + , tsValueCol = sValueCol , tsDataRef = sDataRef - , tsOrgTableSrc = sOrgTableSrc - , tsRowPrecs = fs <&> fromEnum . fPrecision + , tsOrgFile = sOrgFile + , tsRowPrecs = fs <&> fromEnum , tsVars = [ ("nSamples", "Sample count") ] } - where fs = filterFields sFieldSelector + where fs = case sFields of + ISel sel -> filter sel timelineFields <&> fPrecision + DSel sel -> filter sel cdfFields <&> fPrecision data TmplSection = TmplTable { tsTitle :: !Text + , tsNameCol :: !Text + , tsValueCol :: !Text , tsDataRef :: !Text - , tsOrgTableSrc :: !Text + , tsOrgFile :: !Text , tsRowPrecs :: ![Int] , tsVars :: ![(Text, Text)] -- map from Org constant name to description } @@ -165,8 +177,10 @@ data TmplSection instance ToJSON TmplSection where toJSON TmplTable{..} = object [ "title" .= tsTitle + , "nameCol" .= tsNameCol + , "valueCol" .= tsValueCol , "dataRef" .= tsDataRef - , "orgFile" .= tsOrgTableSrc + , "orgFile" .= tsOrgFile -- Yes, strange as it is, this is the encoding to ease iteration in ED-E. , "rowPrecs" .= tsRowPrecs , "vars" .= Map.fromList (zip tsVars ([0..] <&> flip T.replicate ">" . (length tsVars -)) @@ -177,9 +191,9 @@ instance ToJSON TmplSection where ] generate :: InputDir -> Maybe TextInputFile - -> (ClusterPerf, BlockPropOne, Run) -> [(ClusterPerf, BlockPropOne, Run)] + -> (SummaryOne, ClusterPerf, BlockPropOne) -> [(SummaryOne, ClusterPerf, BlockPropOne)] -> IO (ByteString, Text) -generate (InputDir ede) mReport (cp, bp, base) rest = do +generate (InputDir ede) mReport (summ, cp, bp) rest = do ctx <- getReport (last restTmpls & trManifest & mNodeApproxVer) Nothing tmplRaw <- BS.readFile (maybe defaultReportPath unTextInputFile mReport) tmpl <- parseWith defaultSyntax (includeFile ede) "report" tmplRaw @@ -187,11 +201,8 @@ generate (InputDir ede) mReport (cp, bp, base) rest = do \x -> renderWith fenv x (env ctx baseTmpl restTmpls) where - baseTmpl = liftTmplRun base - restTmpls = fmap (liftTmplRun. thd3) rest - - sections :: [Section] - sections = defaultReportSections cp bp + baseTmpl = liftTmplRun summ + restTmpls = fmap (liftTmplRun. fst3) rest defaultReportPath = ede <> "/report.ede" fenv = HM.fromList @@ -200,6 +211,7 @@ generate (InputDir ede) mReport (cp, bp, base) rest = do [ "report" .= rc , "base" .= b , "runs" .= rs - , "sections" .= (liftTmplSection <$> sections) + , "summary" .= liftTmplSection (summaryReportSection summ) + , "analyses" .= (liftTmplSection <$> analysesReportSections cp bp) , "dictionary" .= metricDictionary ] diff --git a/bench/locli/src/Cardano/Unlog/LogObject.hs b/bench/locli/src/Cardano/Unlog/LogObject.hs index 299b14f1e88..46f116fe805 100644 --- a/bench/locli/src/Cardano/Unlog/LogObject.hs +++ b/bench/locli/src/Cardano/Unlog/LogObject.hs @@ -14,7 +14,6 @@ import Prelude (head, id, show, unzip3) import Cardano.Prelude hiding (Text, head, show) import Control.Monad (fail) -import Data.Aeson (FromJSON(..), ToJSON(..), Value(..), Object, (.:), (.:?)) import Data.Aeson qualified as AE import Data.Aeson.KeyMap qualified as KeyMap import Data.Aeson.Types (Parser) diff --git a/bench/locli/src/Cardano/Unlog/Resources.hs b/bench/locli/src/Cardano/Unlog/Resources.hs index 2fef0b5e2b5..428526684cc 100644 --- a/bench/locli/src/Cardano/Unlog/Resources.hs +++ b/bench/locli/src/Cardano/Unlog/Resources.hs @@ -15,8 +15,6 @@ module Cardano.Unlog.Resources import Cardano.Prelude -import Data.Time.Clock (UTCTime) - import Data.Accum import Data.CDF import Cardano.Util diff --git a/bench/locli/src/Cardano/Util.hs b/bench/locli/src/Cardano/Util.hs index 02d401202b4..7afe8deaec2 100644 --- a/bench/locli/src/Cardano/Util.hs +++ b/bench/locli/src/Cardano/Util.hs @@ -1,9 +1,17 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveFunctor #-} {-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-incomplete-patterns #-} {- HLINT ignore "Use list literal pattern" -} module Cardano.Util ( module Prelude + , module Util + , module Data.Aeson + , module Data.IntervalMap.FingerTree + , module Data.SOP.Strict , module Data.List.Split + , module Data.Time.Clock + , module Data.Time.Clock.POSIX , module Data.Tuple.Extra , module Cardano.Ledger.BaseTypes , module Control.Arrow @@ -16,20 +24,26 @@ module Cardano.Util ) where -import Prelude (String, error) +import Prelude (String, error, head, last) import Cardano.Prelude +import Util hiding (fst3, snd3, third3, uncurry3, firstM, secondM) +import Data.Aeson (FromJSON (..), ToJSON (..), Object, Value (..), (.:), (.:?), withObject, object) +import Data.Aeson qualified as AE import Data.Tuple.Extra hiding ((&&&), (***)) import Control.Arrow ((&&&), (***)) import Control.Applicative ((<|>)) import Control.Concurrent.Async (forConcurrently, forConcurrently_, mapConcurrently, mapConcurrently_) import Control.DeepSeq qualified as DS import Control.Monad.Trans.Except.Extra (firstExceptT, newExceptT) -import Data.Aeson (ToJSON, encode) import Data.ByteString.Lazy.Char8 qualified as LBS +import Data.IntervalMap.FingerTree (Interval (..), low, high, point) import Data.List (span) import Data.List.Split (chunksOf) import Data.Text qualified as T +import Data.SOP.Strict +import Data.Time.Clock (NominalDiffTime, UTCTime (..), diffUTCTime) +import Data.Time.Clock.POSIX import Data.Vector (Vector) import Data.Vector qualified as Vec import GHC.Base (build) @@ -42,6 +56,29 @@ import Ouroboros.Consensus.Util.Time import Cardano.Ledger.BaseTypes (StrictMaybe (..), fromSMaybe) +-- * Data.IntervalMap.FingerTree.Interval +-- +deriving instance FromJSON a => (FromJSON (Interval a)) +deriving instance Functor Interval +deriving instance ToJSON a => (ToJSON (Interval a)) +deriving instance NFData a => (NFData (Interval a)) + +unionIntv, intersectIntv :: Ord a => [Interval a] -> Interval a +unionIntv xs = Interval (low lo) (high hi) + where lo = minimumBy (compare `on` low) xs + hi = maximumBy (compare `on` high) xs +intersectIntv xs = Interval (low lo) (high hi) + where lo = maximumBy (compare `on` low) xs + hi = minimumBy (compare `on` high) xs + +renderIntv :: (a -> Text) -> Interval a -> Text +renderIntv f (Interval lo hi) = f lo <> "-" <> f hi + +intvDurationSec :: Interval UTCTime -> NominalDiffTime +intvDurationSec = uncurry diffUTCTime . (high &&& low) + +-- * SMaybe +-- type SMaybe a = StrictMaybe a instance Alternative StrictMaybe where @@ -104,14 +141,6 @@ mapConcurrentlyPure f = mapConcurrently (evaluate . DS.force . f) -mapAndUnzip :: (a -> (b, c)) -> [a] -> ([b], [c]) -mapAndUnzip _ [] = ([], []) -mapAndUnzip f (x:xs) - = let (r1, r2) = f x - (rs1, rs2) = mapAndUnzip f xs - in - (r1:rs1, r2:rs2) - mapHead :: (a -> a) -> [a] -> [a] mapHead f (x:xs) = f x:xs mapHead _ [] = error "mapHead: partial" @@ -144,7 +173,7 @@ progress key = putStr . T.pack . \case R x -> printf "{ \"%s\": %s }\n" key x Q x -> printf "{ \"%s\": \"%s\" }\n" key x L xs -> printf "{ \"%s\": \"%s\" }\n" key (Cardano.Prelude.intercalate "\", \"" xs) - J x -> printf "{ \"%s\": %s }\n" key (LBS.unpack $ encode x) + J x -> printf "{ \"%s\": %s }\n" key (LBS.unpack $ AE.encode x) -- Dumping to files -- @@ -170,3 +199,15 @@ norm2Tuple ((a, b), c) = (a, (b, c)) {-# INLINE showText #-} showText :: Show a => a -> Text showText = T.pack . show + +roundUTCTimeSec, roundUTCTimeDay :: UTCTime -> UTCTime +roundUTCTimeSec = + posixSecondsToUTCTime . fromIntegral @Integer . truncate . utcTimeToPOSIXSeconds +roundUTCTimeDay (UTCTime day _) = UTCTime day 0 + +utcTimeDeltaSec :: UTCTime -> UTCTime -> Int +utcTimeDeltaSec x y = diffUTCTime x y & round + +foldEmpty :: r -> ([a] -> r) -> [a] -> r +foldEmpty r _ [] = r +foldEmpty _ f l = f l diff --git a/bench/locli/src/Data/CDF.hs b/bench/locli/src/Data/CDF.hs index b585db6c984..748fedfd365 100644 --- a/bench/locli/src/Data/CDF.hs +++ b/bench/locli/src/Data/CDF.hs @@ -51,12 +51,10 @@ module Data.CDF , module Data.SOP.Strict ) where -import Prelude ((!!), head, show) +import Prelude ((!!), show) import Cardano.Prelude hiding (head, show) -import Data.Aeson (FromJSON(..), ToJSON(..)) import Data.SOP.Strict -import Data.Time.Clock (NominalDiffTime) import Data.Vector qualified as Vec import Statistics.Sample qualified as Stat @@ -158,7 +156,7 @@ data CDF p a = { cdfSize :: Int , cdfAverage :: p Double , cdfStddev :: Double - , cdfRange :: (a, a) + , cdfRange :: Interval a , cdfSamples :: [(Centile, p a)] } deriving (Functor, Generic) @@ -207,7 +205,7 @@ zeroCDF = { cdfSize = 0 , cdfAverage = liftCDFVal 0 cdfIx , cdfStddev = 0 - , cdfRange = (0, 0) + , cdfRange = Interval 0 0 , cdfSamples = mempty } @@ -218,7 +216,7 @@ cdf centiles (sort -> sorted) = { cdfSize = size , cdfAverage = I . fromDouble $ Stat.mean doubleVec , cdfStddev = Stat.stdDev doubleVec - , cdfRange = (mini, maxi) + , cdfRange = Interval mini maxi , cdfSamples = centiles <&> \spec -> @@ -257,7 +255,7 @@ liftCDFVal x = \case CDF2 -> CDF { cdfSize = 1 , cdfAverage = I $ toDouble x , cdfStddev = 0 - , cdfRange = (x, x) + , cdfRange = point x , cdfSamples = [] , .. } @@ -267,12 +265,12 @@ unliftCDFVal CDF2 CDF{cdfAverage=I cdfAverage} = (1 :: a) `divide` (1 / toDouble unliftCDFValExtra :: forall a p. Divisible a => CDFIx p -> p a -> [a] unliftCDFValExtra CDFI (I x) = [x] -unliftCDFValExtra i@CDF2 c@CDF{cdfRange=(mi, ma), ..} = [ mean - , mi - , ma - , mean - stddev - , mean + stddev - ] +unliftCDFValExtra i@CDF2 c@CDF{cdfRange=Interval mi ma, ..} = [ mean + , mi + , ma + , mean - stddev + , mean + stddev + ] where mean = unliftCDFVal i c stddev = (1 :: a) `divide` (1 / cdfStddev) @@ -304,7 +302,7 @@ data Combine p a = Combine { cWeightedAverages :: !([(Int, Double)] -> Double) , cStddevs :: !([Double] -> Double) - , cRanges :: !([(a, a)] -> (a, a)) + , cRanges :: !([Interval a] -> Interval a) , cWeightedSamples :: !([(Int, a)] -> a) , cCDF :: !([p a] -> Either CDFError (CDF I a)) } @@ -313,14 +311,11 @@ stdCombine1 :: forall a. (Divisible a) => [Centile] -> Combine I a stdCombine1 cs = Combine { cWeightedAverages = weightedAverage - , cRanges = outerRange + , cRanges = unionIntv , cStddevs = maximum -- it's an approximation , cWeightedSamples = weightedAverage , cCDF = Right . cdf cs . fmap unI } - where - outerRange xs = (,) (minimum $ fst <$> xs) - (maximum $ snd <$> xs) stdCombine2 :: Divisible a => [Centile] -> Combine (CDF I) a stdCombine2 cs = diff --git a/bench/locli/src/Data/DataDomain.hs b/bench/locli/src/Data/DataDomain.hs index 8331f5770e3..de623be288c 100644 --- a/bench/locli/src/Data/DataDomain.hs +++ b/bench/locli/src/Data/DataDomain.hs @@ -1,48 +1,56 @@ {-# LANGUAGE DeriveAnyClass #-} -module Data.DataDomain (module Data.DataDomain) where +{-# OPTIONS_GHC -Wno-orphans #-} +module Data.DataDomain + ( module Data.DataDomain + ) +where import Cardano.Prelude -import Data.Aeson (FromJSON, ToJSON) +import Cardano.Util + + +-- * DataDomain +-- data DataDomain a = DataDomain - { ddRawFirst :: !a - , ddRawLast :: !a - , ddFilteredFirst :: !(Maybe a) - , ddFilteredLast :: !(Maybe a) - , ddRawCount :: Int - , ddFilteredCount :: Int + { ddRaw :: !(Interval a) + , ddFiltered :: !(Maybe (Interval a)) + , ddRawCount :: !Int + , ddFilteredCount :: !Int } deriving (Generic, Show, ToJSON, FromJSON) deriving anyclass NFData -- Perhaps: Plutus.V1.Ledger.Slot.SlotRange = Interval Slot +dataDomainFilterRatio :: DataDomain a -> Double +dataDomainFilterRatio DataDomain{..} = + fromIntegral ddFilteredCount / fromIntegral ddRawCount + mkDataDomainInj :: a -> a -> (a -> Int) -> DataDomain a -mkDataDomainInj f l measure = DataDomain f l (Just f) (Just l) delta delta - where delta = measure l - measure f +mkDataDomainInj f l measure = + DataDomain (Interval f l) (Just (Interval f l)) delta delta + where delta = measure l - measure f mkDataDomain :: a -> a -> a -> a -> (a -> Int) -> DataDomain a mkDataDomain f l f' l' measure = - DataDomain f l (Just f') (Just l') (measure l - measure f) (measure l' - measure f') + DataDomain (Interval f l) (Just (Interval f' l')) + (measure l - measure f) (measure l' - measure f') -dataDomainsMergeInner :: Ord a => [DataDomain a] -> DataDomain a -dataDomainsMergeInner xs = +unionDataDomains :: Ord a => [DataDomain a] -> DataDomain a +unionDataDomains xs = DataDomain - { ddRawFirst = maximum $ xs <&> ddRawFirst - , ddRawLast = minimum $ xs <&> ddRawLast - , ddFilteredFirst = bool (Just . maximum $ xs & mapMaybe ddFilteredFirst) Nothing (null xs) - , ddFilteredLast = bool (Just . maximum $ xs & mapMaybe ddFilteredLast) Nothing (null xs) - , ddRawCount = sum $ xs <&> ddRawCount - , ddFilteredCount = sum $ xs <&> ddFilteredCount + { ddRaw = unionIntv $ xs <&> ddRaw + , ddFiltered = foldEmpty Nothing (Just . unionIntv) $ ddFiltered `mapMaybe` xs + , ddRawCount = sum $ xs <&> ddRawCount + , ddFilteredCount = sum $ xs <&> ddFilteredCount } -dataDomainsMergeOuter :: Ord a => [DataDomain a] -> DataDomain a -dataDomainsMergeOuter xs = +intersectDataDomains :: Ord a => [DataDomain a] -> DataDomain a +intersectDataDomains xs = DataDomain - { ddRawFirst = minimum $ xs <&> ddRawFirst - , ddRawLast = maximum $ xs <&> ddRawLast - , ddFilteredFirst = minimum $ xs <&> ddFilteredFirst - , ddFilteredLast = maximum $ xs <&> ddFilteredLast - , ddRawCount = sum $ xs <&> ddRawCount - , ddFilteredCount = sum $ xs <&> ddFilteredCount + { ddRaw = intersectIntv $ xs <&> ddRaw + , ddFiltered = foldEmpty Nothing (Just . intersectIntv) $ ddFiltered `mapMaybe` xs + , ddRawCount = sum $ xs <&> ddRawCount + , ddFilteredCount = sum $ xs <&> ddFilteredCount } diff --git a/bench/locli/test/Test/Analysis/CDF.hs b/bench/locli/test/Test/Analysis/CDF.hs index 4615e7009ca..f265a1fa0b8 100644 --- a/bench/locli/test/Test/Analysis/CDF.hs +++ b/bench/locli/test/Test/Analysis/CDF.hs @@ -2,7 +2,6 @@ {-# OPTIONS_GHC -Wno-missing-signatures #-} module Test.Analysis.CDF where -import Prelude (head) import Cardano.Prelude hiding (handle, head) import Hedgehog @@ -68,7 +67,7 @@ prop_CDF_I_2x2 = property $ cdfI_2x2 === { cdfSize = 2 , cdfAverage = I 0.5 , cdfStddev = 0.7071067811865476 - , cdfRange = (0.0,1.0) + , cdfRange = Interval 0.0 1.0 , cdfSamples = [(Centile 0.25,I 0.0) ,(Centile 0.75,I 1.0)]} @@ -81,20 +80,20 @@ prop_CDF_CDF_I_3x3 = property $ cdf2_3x3 === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 0.0 - , cdfRange = (1.0,1.0) + , cdfRange = Interval 1.0 1.0 , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 1.0) ,(Centile 0.8333333333333333, I 1.0)]} , cdfStddev = 1.0 - , cdfRange = (0.0,2.0) + , cdfRange = Interval 0.0 2.0 , cdfSamples = [(Centile 0.16666666666666666 ,CDF { cdfSize = 3 , cdfAverage = I 0.0 , cdfStddev = 0.0 - , cdfRange = (0.0,0.0) + , cdfRange = Interval 0.0 0.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 0.0) @@ -104,7 +103,7 @@ prop_CDF_CDF_I_3x3 = property $ cdf2_3x3 === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 0.0 - , cdfRange = (1.0,1.0) + , cdfRange = Interval 1.0 1.0 , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 1.0) @@ -114,7 +113,7 @@ prop_CDF_CDF_I_3x3 = property $ cdf2_3x3 === { cdfSize = 3 , cdfAverage = I 2.0 , cdfStddev = 0.0 - , cdfRange = (2.0,2.0) + , cdfRange = Interval 2.0 2.0 , cdfSamples = [(Centile 0.16666666666666666, I 2.0) ,(Centile 0.5, I 2.0) @@ -128,20 +127,20 @@ prop_CDF_CDF_I_3x3_shifted = property $ cdf2_3x3sh === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 1.0 - , cdfRange = (0.0,2.0) + , cdfRange = Interval 0.0 2.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 1.0) ,(Centile 0.8333333333333333, I 2.0)]} , cdfStddev = 1.0 - , cdfRange = (-1.0,3.0) + , cdfRange = Interval (-1.0) 3.0 , cdfSamples = [(Centile 0.16666666666666666 ,CDF { cdfSize = 3 , cdfAverage = I 0.0 , cdfStddev = 1.0 - , cdfRange = (-1.0,1.0) + , cdfRange = Interval (-1.0) 1.0 , cdfSamples = [(Centile 0.16666666666666666, I (-1.0)) ,(Centile 0.5, I 0.0) @@ -151,7 +150,7 @@ prop_CDF_CDF_I_3x3_shifted = property $ cdf2_3x3sh === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 1.0 - , cdfRange = (0.0,2.0) + , cdfRange = Interval 0.0 2.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 1.0) @@ -161,7 +160,7 @@ prop_CDF_CDF_I_3x3_shifted = property $ cdf2_3x3sh === { cdfSize = 3 , cdfAverage = I 2.0 , cdfStddev = 1.0 - , cdfRange = (1.0,3.0) + , cdfRange = Interval 1.0 3.0 , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 2.0) @@ -175,20 +174,20 @@ prop_CDF_CDF_I_3x3x3_collapsed_shifted = property $ cdf2_3x3x3sh === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 2.0 - , cdfRange = (-1.0,3.0) + , cdfRange = Interval (-1.0) 3.0 , cdfSamples = [(Centile 0.16666666666666666, I (-1.0)) ,(Centile 0.5, I 1.0) ,(Centile 0.8333333333333333, I 3.0)]} , cdfStddev = 1.0 - , cdfRange = (-3.0,5.0) + , cdfRange = Interval (-3.0) 5.0 , cdfSamples = [(Centile 0.16666666666666666 ,CDF { cdfSize = 9 , cdfAverage = I 0.0 , cdfStddev = 1.0 - , cdfRange = (-3.0,3.0) + , cdfRange = Interval (-3.0) 3.0 , cdfSamples = [(Centile 0.16666666666666666, I (-1.0)) ,(Centile 0.5, I 0.0) @@ -198,7 +197,7 @@ prop_CDF_CDF_I_3x3x3_collapsed_shifted = property $ cdf2_3x3x3sh === { cdfSize = 9 , cdfAverage = I 1.0 , cdfStddev = 1.0 - , cdfRange = (-2.0,4.0) + , cdfRange = Interval (-2.0) 4.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 1.0) @@ -208,8 +207,8 @@ prop_CDF_CDF_I_3x3x3_collapsed_shifted = property $ cdf2_3x3x3sh === { cdfSize = 9 , cdfAverage = I 2.0 , cdfStddev = 1.0 - , cdfRange = (-1.0,5.0) - , cdfSamples = + , cdfRange = Interval (-1.0) 5.0 + , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 2.0) ,(Centile 0.8333333333333333, I 3.0)]})]} diff --git a/nix/workbench/analyse.sh b/nix/workbench/analyse.sh index cf2542475f9..f574c5dd576 100644 --- a/nix/workbench/analyse.sh +++ b/nix/workbench/analyse.sh @@ -129,7 +129,7 @@ case "$op" in compute-multi-propagation multi-propagation-json multi-propagation-org - multi-propagation-{forger,peers,endtoend} + multi-propagation-{control,forger,peers,endtoend} multi-propagation-gnuplot multi-propagation-full ) @@ -144,7 +144,7 @@ case "$op" in read-propagations propagation-json propagation-org - propagation-{forger,peers,endtoend} + propagation-{control,forger,peers,endtoend} propagation-gnuplot propagation-full @@ -176,7 +176,7 @@ case "$op" in compute-propagation propagation-json propagation-org - propagation-{forger,peers,endtoend} + propagation-{control,forger,peers,endtoend} propagation-gnuplot propagation-full @@ -211,7 +211,7 @@ case "$op" in compute-propagation propagation-json propagation-org - propagation-{forger,peers,endtoend} + propagation-{control,forger,peers,endtoend} propagation-gnuplot propagation-full ) @@ -220,6 +220,7 @@ case "$op" in ;; re-block-propagation | reblockprop | rebp ) + fail "re-block-propagation is broken: read-chain not implemented" local script=( read-chain chain-timeline @@ -227,7 +228,7 @@ case "$op" in compute-propagation propagation-json propagation-org - propagation-{forger,peers,endtoend} + propagation-{control,forger,peers,endtoend} propagation-gnuplot propagation-full ) @@ -355,28 +356,29 @@ case "$op" in v1=("${v0[@]/#logs/ 'unlog' --host-from-log-filename ${analysis_allowed_loanys[*]/#/--ok-loany } ${logfiles[*]/#/--log } }") v2=("${v1[@]/#context/ 'meta-genesis' --run-metafile \"$dir\"/meta.json --shelley-genesis \"$dir\"/genesis-shelley.json }") - v4=("${v2[@]/#read-chain/ 'read-chain' --chain \"$adir\"/chain.json}") - v5=("${v4[@]/#rebuild-chain/ 'rebuild-chain' ${filters[@]}}") - v6=("${v5[@]/#dump-chain/ 'dump-chain' --chain \"$adir\"/chain.json --chain-rejecta \"$adir\"/chain-rejecta.json}") - v7=("${v6[@]/#chain-timeline/ 'timeline-chain' --timeline \"$adir\"/chain.txt ${filter_reasons:+--filter-reasons} ${chain_errors:+--chain-errors}}") - v8=("${v7[@]/#collect-slots/ 'collect-slots' ${minus_logfiles[*]/#/--ignore-log }}") - v9=("${v8[@]/#filter-slots/ 'filter-slots' ${filters[@]}}") - va=("${v9[@]/#propagation-json/ 'render-propagation' --json \"$adir\"/blockprop.json --full}") - vb=("${va[@]/#propagation-org/ 'render-propagation' --org \"$adir\"/blockprop.org --full}") - vc=("${vb[@]/#propagation-forger/ 'render-propagation' --report \"$adir\"/blockprop.forger.org --forger}") - vd=("${vc[@]/#propagation-peers/ 'render-propagation' --report \"$adir\"/blockprop.peers.org --peers }") - ve=("${vd[@]/#propagation-endtoend/ 'render-propagation' --report \"$adir\"/blockprop.endtoend.org --end-to-end}") + v3=("${v2[@]/#read-chain/ 'read-chain' --chain \"$adir\"/chain.json}") + v4=("${v3[@]/#rebuild-chain/ 'rebuild-chain' ${filters[@]}}") + v5=("${v4[@]/#dump-chain/ 'dump-chain' --chain \"$adir\"/chain.json --chain-rejecta \"$adir\"/chain-rejecta.json}") + v6=("${v5[@]/#chain-timeline/ 'timeline-chain' --timeline \"$adir\"/chain.txt ${filter_reasons:+--filter-reasons} ${chain_errors:+--chain-errors}}") + v7=("${v6[@]/#collect-slots/ 'collect-slots' ${minus_logfiles[*]/#/--ignore-log }}") + v8=("${v7[@]/#filter-slots/ 'filter-slots' ${filters[@]}}") + v9=("${v8[@]/#propagation-json/ 'render-propagation' --json \"$adir\"/blockprop.json --full}") + va=("${v9[@]/#propagation-org/ 'render-propagation' --org \"$adir\"/blockprop.org --full}") + vb=("${va[@]/#propagation-control/ 'render-propagation' --org-report \"$adir\"/blockprop.control.org --control}") + vc=("${vb[@]/#propagation-forger/ 'render-propagation' --org-report \"$adir\"/blockprop.forger.org --forger}") + vd=("${vc[@]/#propagation-peers/ 'render-propagation' --org-report \"$adir\"/blockprop.peers.org --peers }") + ve=("${vd[@]/#propagation-endtoend/ 'render-propagation' --org-report \"$adir\"/blockprop.endtoend.org --end-to-end}") vf=("${ve[@]/#propagation-gnuplot/ 'render-propagation' --gnuplot \"$adir\"/cdf/%s.cdf --full}") vg=("${vf[@]/#propagation-full/ 'render-propagation' --pretty \"$adir\"/blockprop-full.txt --full}") vh=("${vg[@]/#clusterperf-json/ 'render-clusterperf' --json \"$adir\"/clusterperf.json --full }") vi=("${vh[@]/#clusterperf-org/ 'render-clusterperf' --org \"$adir\"/clusterperf.org --full }") - vj=("${vi[@]/#clusterperf-report/ 'render-clusterperf' --report \"$adir\"/clusterperf.report.org --summary }") + vj=("${vi[@]/#clusterperf-report/ 'render-clusterperf' --org-report \"$adir\"/clusterperf.report.org --report }") vk=("${vj[@]/#clusterperf-gnuplot/ 'render-clusterperf' --gnuplot \"$adir\"/cdf/%s.cdf --full }") vl=("${vk[@]/#clusterperf-full/ 'render-clusterperf' --pretty \"$adir\"/clusterperf-full.txt --full }") vm=("${vl[@]/#read-clusterperfs/ 'read-clusterperfs' --clusterperf \"$adir\"/clusterperf.json }") vn=("${vm[@]/#read-propagations/ 'read-propagations' --prop \"$adir\"/blockprop.json }") vo=("${vn[@]/#summary-json/ 'render-summary' --json \"$adir\"/summary.json}") - vp=("${vo[@]/#summary-report/ 'render-summary' --report \"$adir\"/summary.org}") + vp=("${vo[@]/#summary-report/ 'render-summary' --org-report \"$adir\"/summary.org}") local ops_final=() for v in "${vp[@]}" do eval ops_final+=($v); done @@ -405,9 +407,8 @@ case "$op" in local props=( $(for adir in ${adirs[*]}; do echo --prop ${adir}/blockprop.json; done)) local cperfs=($(for adir in ${adirs[*]}; do echo --clusterperf ${adir}/clusterperf.json; done)) local compares=($(for adir in ${adirs[*]} - do echo --run-metafile ${adir}/../meta.json \ - --shelley-genesis ${adir}/../genesis-shelley.json \ - --perf ${adir}/clusterperf.json \ + do echo --summary ${adir}/summary.json \ + --perf ${adir}/clusterperf.json \ --prop ${adir}/blockprop.json done)) local run=$(for dir in ${dirs[*]}; do basename $dir; done | sort -r | head -n1 | cut -d. -f1-2)_$suffix @@ -420,18 +421,19 @@ case "$op" in v0=("$@") v1=(${v0[*]/#read-clusterperfs/ 'read-clusterperfs' ${cperfs[*]} }) v2=(${v1[*]/#read-propagations/ 'read-propagations' ${props[*]} }) - v3=(${v2[*]/#multi-clusterperf-json/ 'render-multi-clusterperf' --json $adir/'multi-clusterperf.json' --full $multi_aspect }) - v4=(${v3[*]/#multi-clusterperf-org/ 'render-multi-clusterperf' --org $adir/'multi-clusterperf.org' --full $multi_aspect }) - v5=(${v4[*]/#multi-clusterperf-report/ 'render-multi-clusterperf' --report $adir/'multi-clusterperf.report.org' --summary $multi_aspect }) + v3=(${v2[*]/#multi-clusterperf-json/ 'render-multi-clusterperf' --json $adir/'clusterperf.json' --full $multi_aspect }) + v4=(${v3[*]/#multi-clusterperf-org/ 'render-multi-clusterperf' --org $adir/'clusterperf.org' --full $multi_aspect }) + v5=(${v4[*]/#multi-clusterperf-report/ 'render-multi-clusterperf' --report $adir/'clusterperf.report.org' --report $multi_aspect }) v6=(${v5[*]/#multi-clusterperf-gnuplot/ 'render-multi-clusterperf' --gnuplot $adir/cdf/'%s.cdf' --full $multi_aspect }) - v7=(${v6[*]/#multi-clusterperf-full/ 'render-multi-clusterperf' --pretty $adir/'multi-clusterperf-full.txt' --full $multi_aspect }) - v8=(${v7[*]/#multi-propagation-json/ 'render-multi-propagation' --json $adir/'multi-blockprop.json' --full $multi_aspect }) - v9=(${v8[*]/#multi-propagation-org/ 'render-multi-propagation' --org $adir/'multi-blockprop.org' --full $multi_aspect }) - va=(${v9[*]/#multi-propagation-forger/ 'render-multi-propagation' --report $adir/'multi-blockprop-forger.org' --forger $multi_aspect }) - vb=(${va[*]/#multi-propagation-peers/ 'render-multi-propagation' --report $adir/'multi-blockprop-peers.org' --peers $multi_aspect }) - vc=(${vb[*]/#multi-propagation-endtoend/ 'render-multi-propagation' --report $adir/'multi-blockprop-endtoend.org' --end-to-end $multi_aspect }) + v7=(${v6[*]/#multi-clusterperf-full/ 'render-multi-clusterperf' --pretty $adir/'clusterperf-full.txt' --full $multi_aspect }) + v8=(${v7[*]/#multi-propagation-json/ 'render-multi-propagation' --json $adir/'blockprop.json' --full $multi_aspect }) + v9=(${v8[*]/#multi-propagation-org/ 'render-multi-propagation' --org $adir/'blockprop.org' --full $multi_aspect }) + va=(${v9[*]/#multi-propagation-control/ 'render-multi-propagation' --report $adir/'blockprop.forger.org' --forger $multi_aspect }) + va=(${v9[*]/#multi-propagation-forger/ 'render-multi-propagation' --report $adir/'blockprop.forger.org' --forger $multi_aspect }) + vb=(${va[*]/#multi-propagation-peers/ 'render-multi-propagation' --report $adir/'blockprop.peers.org' --peers $multi_aspect }) + vc=(${vb[*]/#multi-propagation-endtoend/ 'render-multi-propagation' --report $adir/'blockprop.endtoend.org' --end-to-end $multi_aspect }) vd=(${vc[*]/#multi-propagation-gnuplot/ 'render-multi-propagation' --gnuplot $adir/cdf/'%s.cdf' --full $multi_aspect }) - ve=(${vd[*]/#multi-propagation-full/ 'render-multi-propagation' --pretty $adir/'multi-blockprop-full.txt' --full $multi_aspect }) + ve=(${vd[*]/#multi-propagation-full/ 'render-multi-propagation' --pretty $adir/'blockprop-full.txt' --full $multi_aspect }) vf=(${ve[*]/#compare/ 'compare' --ede nix/workbench/ede --report $adir/report-$run.org ${compares[*]} }) vg=(${vf[*]/#update/ 'compare' --ede nix/workbench/ede --report $adir/report-$run.org ${compares[*]} --template $adir/report-$run.ede }) local ops_final=(${vg[*]}) diff --git a/nix/workbench/ede/chart.ede b/nix/workbench/ede/chart.ede index 284931d768e..8d202508d1d 100644 --- a/nix/workbench/ede/chart.ede +++ b/nix/workbench/ede/chart.ede @@ -14,9 +14,9 @@ set yrange [*:*] set ylabel "{{ args.ylabel }}" {% endif %} eval cdfI_{{ (runs | length) + 1 }}("{{ args.metric }}", "{{ args.title }}", \ - "{{ base.meta.tag }}/{{ base.shortId }}/{{ base.meta.era | toTitle }}", "{{ base.meta.tag }}", \ + "{{ base.meta.tag }}/{{ base.meta.batch }}/{{ base.meta.era | toTitle }}", "{{ base.meta.tag }}", \ {% for run in runs %} - "{{ run.value.meta.tag }}/{{ run.value.shortId }}/{{ run.value.meta.era | toTitle }}", "{{ run.value.meta.tag }}"{% if !run.last %},{% endif %} \ + "{{ run.value.meta.tag }}/{{ run.value.meta.batch }}/{{ run.value.meta.era | toTitle }}", "{{ run.value.meta.tag }}"{% if !run.last %},{% endif %} \ {% endfor %} ) #+end_src diff --git a/nix/workbench/ede/report.ede b/nix/workbench/ede/report.ede index ce3cd6e6b76..6f6f7891853 100644 --- a/nix/workbench/ede/report.ede +++ b/nix/workbench/ede/report.ede @@ -1,4 +1,4 @@ -#+CONSTANTS:{% for sec in sections %} {{ sec.value.dataRef }}={{ sec.value.orgFile }} {% endfor %} +#+CONSTANTS: {{ summary.dataRef }}={{ summary.orgFile }}{% for sec in analyses %} {{ sec.value.dataRef }}={{ sec.value.orgFile }} {% endfor %} #+CONSTANTS: base=../{{ base.meta.tag }}/analysis {% for run in runs %} #+CONSTANTS: run{{ run.index }}=../{{ run.value.meta.tag }}/analysis @@ -20,16 +20,16 @@ We compare {% for run in runs %}{%if !run.first%}{%if !run.last%}, {%else%} and {%endif%}{%endif%}{{ run.value.ver }}/{{ run.value.meta.era | toTitle }}{% endfor %} relative to ={{ base.ver }}=/{{ base.meta.era | toTitle }}, under {{ base.workload }} workload. -{% include "manifest.ede" %} +{% include "table.ede" with table = summary %} ***** Revision history - rev 1, {{ report.date }}: initial release *** Analysis -{% for sec in sections %} +{% for sec in analyses %} ***** {{ sec.value.title }} -{% include "table.ede" with table = sec.value %} +{% include "tablevars-delta-pct.ede" with table = sec.value %} {% endfor %} diff --git a/nix/workbench/ede/table.ede b/nix/workbench/ede/table.ede index 5976aa9093b..bd4b59c911e 100644 --- a/nix/workbench/ede/table.ede +++ b/nix/workbench/ede/table.ede @@ -1,13 +1,10 @@ -| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }} | Δ | Δ%{% endfor %} | -|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| +| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }}{% endfor %} | +|---------------------------+-----{% for run in runs %}-+-----{% endfor %}-| {% for i in table.rowPrecs %} -| | {% for run in runs %} | | | {% endfor %} | +| | {% for run in runs %} | {% endfor %} | {% endfor %} -|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| -{% for var in table.vars %} -| | {% for run in runs %} | | | {% endfor %} | -{% endfor %} -#+TBLFM: $2=remote(file:$base/${{ table.dataRef }},@@#$average); N f-3::$1=remote(file:$base/${{ table.dataRef }},@@#$metric){% for var in table.vars %}::@{{ var.value.angles }}$1=string("{{ var.value.name }}")::@{{ var.value.angles }}$2=remote(file:$base/${{ table.dataRef }},${{var.key}}){% endfor %} +|---------------------------+-----{% for run in runs %}-+-----{% endfor %}-| +#+TBLFM: $2='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.valueCol }}))::$1='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.nameCol }})) {% for run in runs %} -#+TBLFM: ${{ run.index * 3 }}=remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#$average); N f-3::${{ run.index * 3 + 1 }}=${{ run.index * 3 }}-$2; N f-3::${{ run.index * 3 + 2 }}=if($2 == 0, string("nan"), round(100*${{ run.index * 3 + 1 }}/$2)){% for var in table.vars %}::@{{ var.value.angles }}${{ run.index * 3 }}=remote(file:$base/${{ table.dataRef }},${{var.key}})::@{{ var.value.angles }}${{ run.index * 3 + 1 }}=string("")::@{{ var.value.angles }}${{ run.index * 3 + 2 }}=string(""){% endfor %} +#+TBLFM: ${{ 2 + run.index }}='(identity remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#${{ table.valueCol }})) {% endfor %} diff --git a/nix/workbench/ede/tablevars-delta-pct.ede b/nix/workbench/ede/tablevars-delta-pct.ede new file mode 100644 index 00000000000..6febfedb270 --- /dev/null +++ b/nix/workbench/ede/tablevars-delta-pct.ede @@ -0,0 +1,13 @@ +| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }} | Δ | Δ%{% endfor %} | +|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| +{% for i in table.rowPrecs %} +| | {% for run in runs %} | | | {% endfor %} | +{% endfor %} +|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| +{% for var in table.vars %} +| | {% for run in runs %} | | | {% endfor %} | +{% endfor %} +#+TBLFM: $2='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::$1='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.nameCol }})){% for var in table.vars %}::@{{ var.value.angles }}$1=string("{{ var.value.name }}")::@{{ var.value.angles }}$2='(identity remote(file:$base/${{ table.dataRef }},${{var.key}})){% endfor %} +{% for run in runs %} +#+TBLFM: ${{ run.index * 3 }}='(identity remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::${{ run.index * 3 + 1 }}=${{ run.index * 3 }}-$2; N f-3::${{ run.index * 3 + 2 }}=if($2 == 0, string("nan"), round(100*${{ run.index * 3 + 1 }}/$2)){% for var in table.vars %}::@{{ var.value.angles }}${{ run.index * 3 }}='(identity remote(file:$base/${{ table.dataRef }},${{var.key}}))::@{{ var.value.angles }}${{ run.index * 3 + 1 }}=string("")::@{{ var.value.angles }}${{ run.index * 3 + 2 }}=string(""){% endfor %} +{% endfor %} diff --git a/nix/workbench/ede/tablevars-delta.ede b/nix/workbench/ede/tablevars-delta.ede new file mode 100644 index 00000000000..79a5d5f58c6 --- /dev/null +++ b/nix/workbench/ede/tablevars-delta.ede @@ -0,0 +1,13 @@ +| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }} | Δ{% endfor %} | +|---------------------------+-----{% for run in runs %}-+------+-----{% endfor %}-| +{% for i in table.rowPrecs %} +| | {% for run in runs %} | | {% endfor %} | +{% endfor %} +|---------------------------+-----{% for run in runs %}-+------+-----{% endfor %}-| +{% for var in table.vars %} +| | {% for run in runs %} | | {% endfor %} | +{% endfor %} +#+TBLFM: $2='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::$1='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.nameCol }})){% for var in table.vars %}::@{{ var.value.angles }}$1=string("{{ var.value.name }}")::@{{ var.value.angles }}$2='(identity remote(file:$base/${{ table.dataRef }},${{var.key}})){% endfor %} +{% for run in runs %} +#+TBLFM: ${{ run.index * 2 }}='(identity remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::${{ run.index * 2 + 1 }}=${{ run.index * 2 }}-$2; N f-3::@{{ var.value.angles }}${{ run.index * 2 }}='(identity remote(file:$base/${{ table.dataRef }},${{var.key}}))::@{{ var.value.angles }}${{ run.index * 2 + 1 }}=string(""){% endfor %} +{% endfor %}