Skip to content

Commit

Permalink
Merge pull request #1195 from input-output-hk/lc/cbor-dataset
Browse files Browse the repository at this point in the history
CBOR Dataset
  • Loading branch information
locallycompact authored Dec 4, 2023
2 parents 66d10b8 + 4afa89b commit ec342cc
Show file tree
Hide file tree
Showing 9 changed files with 56 additions and 32,624 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-nix.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ jobs:
options: '-o $(pwd)/../benchmarks/ledger-bench.html'
- package: hydra-cluster
bench: bench-e2e
options: 'datasets datasets/3-nodes.json datasets/1-node.json --output-directory $(pwd)/../benchmarks --timeout 1000s'
options: 'datasets datasets/3-nodes.cbor datasets/1-node.cbor --output-directory $(pwd)/../benchmarks --timeout 1000s'
- package: plutus-merkle-tree
bench: on-chain-cost
options: '$(pwd)/../benchmarks'
Expand Down
26 changes: 17 additions & 9 deletions hydra-cluster/bench/Main.hs
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,17 @@ import Test.Hydra.Prelude
import Bench.EndToEnd (bench)
import Bench.Options (Options (..), benchOptionsParser)
import Bench.Summary (Summary (..), markdownReport, textReport)
import Data.Aeson (eitherDecodeFileStrict', encodeFile)
import Cardano.Binary (decodeFull, serialize)
import Data.Aeson (eitherDecodeFileStrict')
import Data.ByteString (hPut)
import Data.ByteString.Base16 qualified as Base16
import Data.ByteString.Lazy qualified as LBS
import Hydra.Cardano.Api (
ShelleyBasedEra (..),
ShelleyGenesis (..),
fromLedgerPParams,
)
import Hydra.Generator (generateConstantUTxODataset)
import Hydra.Generator (Dataset, generateConstantUTxODataset)
import Options.Applicative (
execParser,
)
Expand All @@ -38,7 +41,7 @@ main =
play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId tmpDir
DatasetOptions{datasetFiles, outputDirectory, timeoutSeconds, startingNodeId} -> do
benchDir <- createSystemTempDirectory "bench"
datasets <- mapM (eitherDecodeFileStrict' >=> either die pure) datasetFiles
datasets <- mapM loadDataset datasetFiles
let targets = zip datasets $ (benchDir </>) . show <$> [1 .. length datasets]
forM_ (snd <$> targets) (createDirectoryIfMissing True)
run outputDirectory timeoutSeconds startingNodeId targets
Expand All @@ -51,11 +54,11 @@ main =
Right shelleyGenesis ->
pure $ fromLedgerPParams ShelleyBasedEraShelley (sgProtocolParams shelleyGenesis)
dataset <- generateConstantUTxODataset pparams (fromIntegral clusterSize) numberOfTxs
saveDataset benchDir dataset
saveDataset (benchDir </> "dataset.cbor") dataset
run outputDirectory timeoutSeconds startingNodeId [(dataset, benchDir)]

replay outputDirectory timeoutSeconds startingNodeId benchDir = do
dataset <- either die pure =<< eitherDecodeFileStrict' (benchDir </> "dataset.json")
dataset <- loadDataset $ benchDir </> "dataset.cbor"
putStrLn $ "Using UTxO and Transactions from: " <> benchDir
run outputDirectory timeoutSeconds startingNodeId [(dataset, benchDir)]

Expand All @@ -75,10 +78,15 @@ main =
[] -> benchmarkSucceeded outputDirectory summaries
errs -> mapM_ (\(_, dir, exc) -> benchmarkFailedWith dir exc) errs >> exitFailure

saveDataset tmpDir dataset = do
let txsFile = tmpDir </> "dataset.json"
putStrLn $ "Writing dataset to: " <> txsFile
encodeFile txsFile dataset
loadDataset :: FilePath -> IO Dataset
loadDataset f = do
putStrLn $ "Reading dataset from: " <> f
readFileBS f >>= either (die . show) pure . (decodeFull . LBS.fromStrict . Base16.decodeLenient)

saveDataset :: FilePath -> Dataset -> IO ()
saveDataset f dataset = do
putStrLn $ "Writing dataset to: " <> f
writeFileBS f $ Base16.encode $ LBS.toStrict $ serialize dataset

data BenchmarkFailed
= TestFailed HUnitFailure
Expand Down
1 change: 1 addition & 0 deletions hydra-cluster/datasets/1-node.cbor

Large diffs are not rendered by default.

Loading

0 comments on commit ec342cc

Please sign in to comment.