-
Notifications
You must be signed in to change notification settings - Fork 188
feat: state migration benchmark #6922
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
LesnyRumcajs
wants to merge
3
commits into
main
Choose a base branch
from
state-migration-benchmark
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
Show all changes
3 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,159 @@ | ||
| // Copyright 2019-2026 ChainSafe Systems | ||
| // SPDX-License-Identifier: Apache-2.0, MIT | ||
|
|
||
| use crate::blocks::Tipset; | ||
| use crate::daemon::bundle::load_actor_bundles; | ||
| use crate::db::{ | ||
| car::{AnyCar, ManyCar}, | ||
| db_engine::{Db, DbConfig}, | ||
| }; | ||
| use crate::networks::{ChainConfig, Height, NetworkChain}; | ||
| use crate::state_migration::run_state_migrations; | ||
| use crate::utils::db::car_util::load_car; | ||
| use anyhow::Context as _; | ||
| use clap::{Args, ValueEnum}; | ||
| use fvm_ipld_blockstore::Blockstore; | ||
| use std::{path::PathBuf, sync::Arc}; | ||
|
|
||
| /// Read-side layout for the snapshot during the benchmark. | ||
| #[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] | ||
| enum Backend { | ||
| /// Attach the snapshot CAR as a read-only overlay on top of the temporary | ||
| /// ParityDb. Migration reads hit the CAR layer. | ||
| Car, | ||
| /// Ingest the snapshot into the temporary ParityDb before timing the | ||
| /// migration, so migration reads go through the writable db the same way | ||
| /// a long-running daemon would. | ||
| Db, | ||
| } | ||
|
|
||
| /// Runs a single state migration against the head of a snapshot, using a | ||
| /// throwaway on-disk ParityDb as the writable backing store so that timings | ||
| /// reflect the real production I/O path. The temporary ParityDb is removed | ||
| /// when the command exits. | ||
| #[derive(Debug, Args)] | ||
| pub struct MigrateCommand { | ||
| /// Path to the snapshot CAR file (plain `.car` or zstd-compressed `.car.zst`). | ||
| #[arg(long, required = true)] | ||
| snapshot: PathBuf, | ||
| /// Migration height to run (e.g. `GoldenWeek`, `Xxx`). The migration will | ||
| /// be invoked as if the chain had reached that height's configured epoch | ||
| /// for the network detected from the snapshot's genesis. | ||
| #[arg(long, required = true)] | ||
| height: Height, | ||
| /// Storage layout to benchmark against. | ||
| #[arg(long, value_enum, default_value_t = Backend::Car)] | ||
| backend: Backend, | ||
| } | ||
|
|
||
| impl MigrateCommand { | ||
| pub async fn run(self) -> anyhow::Result<()> { | ||
| let Self { | ||
| snapshot, | ||
| height, | ||
| backend, | ||
| } = self; | ||
|
|
||
| // On-disk ParityDb so the benchmark reflects production I/O rather than | ||
| // the in-memory fast path. | ||
| let temp_dir = tempfile::Builder::new() | ||
| .prefix("forest-migrate-") | ||
| .tempdir()?; | ||
| let paritydb_path = temp_dir.path().join("paritydb"); | ||
| let paritydb = Db::open(&paritydb_path, &DbConfig::default())?; | ||
| tracing::info!("Using temporary ParityDb at {}", paritydb_path.display()); | ||
|
|
||
| match backend { | ||
| Backend::Db => { | ||
| // The snapshot is about to be consumed into the writable db, so | ||
| // identify the network first and hold on to the head tipset. | ||
| let (head, network) = { | ||
| let car = AnyCar::try_from(snapshot.as_path()).with_context(|| { | ||
| format!("failed to open snapshot {}", snapshot.display()) | ||
| })?; | ||
| let head = car.heaviest_tipset()?; | ||
| let network = detect_network(&head, &car)?; | ||
| (head, network) | ||
| }; | ||
| let chain_config = ChainConfig::from_chain(&network); | ||
| ensure_epoch(&chain_config, height, &network)?; | ||
|
|
||
| tracing::info!("Importing snapshot into temporary ParityDb…"); | ||
| let import_start = std::time::Instant::now(); | ||
| let file = tokio::fs::File::open(&snapshot) | ||
| .await | ||
| .with_context(|| format!("failed to open {}", snapshot.display()))?; | ||
| load_car(&paritydb, tokio::io::BufReader::new(file)).await?; | ||
| tracing::info!( | ||
| "Snapshot imported in {}", | ||
| humantime::format_duration(import_start.elapsed()) | ||
| ); | ||
| let store = Arc::new(paritydb); | ||
| load_actor_bundles(&*store, &network).await?; | ||
| bench(&store, &chain_config, &network, head, height) | ||
| } | ||
| Backend::Car => { | ||
| let store = Arc::new(ManyCar::new(paritydb)); | ||
| store | ||
| .read_only_file(&snapshot) | ||
| .with_context(|| format!("failed to attach snapshot {}", snapshot.display()))?; | ||
| let head = store.heaviest_tipset()?; | ||
| let network = detect_network(&head, &store)?; | ||
| let chain_config = ChainConfig::from_chain(&network); | ||
| ensure_epoch(&chain_config, height, &network)?; | ||
| load_actor_bundles(store.writer(), &network).await?; | ||
| bench(&store, &chain_config, &network, head, height) | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| fn detect_network(head: &Tipset, store: &impl Blockstore) -> anyhow::Result<NetworkChain> { | ||
| let genesis = head.genesis(store)?; | ||
| NetworkChain::from_genesis(genesis.cid()).context( | ||
| "snapshot genesis does not match any known mainnet/calibnet/butterflynet genesis; custom devnets are not supported", | ||
| ) | ||
| } | ||
|
|
||
| fn ensure_epoch( | ||
| chain_config: &ChainConfig, | ||
| height: Height, | ||
| network: &NetworkChain, | ||
| ) -> anyhow::Result<()> { | ||
| let epoch = chain_config.epoch(height); | ||
| anyhow::ensure!( | ||
| epoch > 0, | ||
| "no epoch configured for height {height} on {network}" | ||
| ); | ||
| Ok(()) | ||
| } | ||
|
|
||
| fn bench<DB: Blockstore + Send + Sync>( | ||
| store: &Arc<DB>, | ||
| chain_config: &ChainConfig, | ||
| network: &NetworkChain, | ||
| head: Tipset, | ||
| height: Height, | ||
| ) -> anyhow::Result<()> { | ||
| let epoch = chain_config.epoch(height); | ||
| let parent_state = *head.parent_state(); | ||
| tracing::info!( | ||
| "Running {height} migration on {network} (epoch {epoch}); head epoch {head_epoch}, parent state {parent_state}", | ||
| head_epoch = head.epoch(), | ||
| ); | ||
|
|
||
| let start = std::time::Instant::now(); | ||
| let new_state = run_state_migrations(epoch, chain_config, store, &parent_state)?; | ||
| let elapsed = start.elapsed(); | ||
|
|
||
| match new_state { | ||
| Some(new_state) => tracing::info!( | ||
| "Migration completed: {parent_state} -> {new_state} in {elapsed}", | ||
| elapsed = humantime::format_duration(elapsed), | ||
| ), | ||
| None => anyhow::bail!( | ||
| "No migration ran. Check that the mapping for height {height} is registered for {network} in `get_migrations` and that the snapshot's head is compatible." | ||
| ), | ||
| } | ||
| Ok(()) | ||
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The parameter
dbcould beBlockstoreWithWriteBufferalready, we should somehow avoid recursiveBlockstoreWithWriteBufferwrappers