Skip to content

Commit 6f7f973

Browse files
authored
feat: add state to prepare snapshot command (#83)
* feat(snapshot): store state when preparing snapshot This allows the prepare snapshot command to resume from the same point after a restart. * feat(snapshot): add progress tracing when importing * chore: remove redundant line
1 parent 2635d3a commit 6f7f973

File tree

5 files changed

+68
-8
lines changed

5 files changed

+68
-8
lines changed

src/main.rs

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,11 @@ use eyre::Result;
1818
use processor::snapshot::{
1919
exporter::SnapshotExporter, importer::SnapshotImporter, SnapshotBuilder,
2020
};
21-
use state_reconstruct_fetcher::{constants::storage, l1_fetcher::L1Fetcher, types::CommitBlock};
21+
use state_reconstruct_fetcher::{
22+
constants::{ethereum, storage},
23+
l1_fetcher::{L1Fetcher, L1FetcherOptions},
24+
types::CommitBlock,
25+
};
2226
use tikv_jemallocator::Jemalloc;
2327
use tokio::sync::mpsc;
2428
use tracing_subscriber::{filter::LevelFilter, EnvFilter};
@@ -153,10 +157,20 @@ async fn main() -> Result<()> {
153157
l1_fetcher_options,
154158
db_path,
155159
} => {
156-
let fetcher_options = l1_fetcher_options.into();
157-
let fetcher = L1Fetcher::new(fetcher_options, None)?;
158160
let processor = SnapshotBuilder::new(db_path);
159161

162+
let mut fetcher_options: L1FetcherOptions = l1_fetcher_options.into();
163+
if let Ok(Some(batch_number)) = processor.get_last_l1_batch_number() {
164+
if batch_number > ethereum::GENESIS_BLOCK {
165+
tracing::info!(
166+
"Found a preexisting snapshot db, continuing from L1 block: {batch_number}"
167+
);
168+
fetcher_options.start_block = batch_number + 1;
169+
}
170+
}
171+
172+
let fetcher = L1Fetcher::new(fetcher_options, None)?;
173+
160174
let (tx, rx) = mpsc::channel::<CommitBlock>(5);
161175
let processor_handle = tokio::spawn(async move {
162176
processor.run(rx).await;

src/processor/snapshot/database.rs

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ pub const FACTORY_DEPS: &str = "factory_deps";
1414
const METADATA: &str = "metadata";
1515

1616
const LAST_REPEATED_KEY_INDEX: &str = "LAST_REPEATED_KEY_INDEX";
17+
const LAST_L1_BATCH_NUMBER: &str = "LAST_L1_BATCH_NUMBER";
1718

1819
#[allow(clippy::enum_variant_names)]
1920
#[derive(Error, Debug)]
@@ -118,6 +119,27 @@ impl SnapshotDB {
118119
.map_err(Into::into)
119120
}
120121

122+
pub fn get_last_l1_batch_number(&self) -> Result<Option<u64>> {
123+
// Unwrapping column family handle here is safe because presence of
124+
// those CFs is ensured in construction of this DB.
125+
let metadata = self.cf_handle(METADATA).unwrap();
126+
let batch = self.get_cf(metadata, LAST_L1_BATCH_NUMBER)?.map(|bytes| {
127+
u64::from_be_bytes([
128+
bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
129+
])
130+
});
131+
132+
Ok(batch)
133+
}
134+
135+
pub fn set_last_l1_batch_number(&self, batch_number: u64) -> Result<()> {
136+
// Unwrapping column family handle here is safe because presence of
137+
// those CFs is ensured in construction of this DB.
138+
let metadata = self.cf_handle(METADATA).unwrap();
139+
self.put_cf(metadata, LAST_L1_BATCH_NUMBER, batch_number.to_be_bytes())
140+
.map_err(Into::into)
141+
}
142+
121143
pub fn get_storage_log(&self, key: &[u8]) -> Result<Option<SnapshotStorageLog>> {
122144
// Unwrapping column family handle here is safe because presence of
123145
// those CFs is ensured in construction of this DB.

src/processor/snapshot/exporter.rs

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ use std::{
44
};
55

66
use bytes::BytesMut;
7+
use ethers::types::U64;
78
use eyre::Result;
89
use flate2::{write::GzEncoder, Compression};
910
use prost::Message;
@@ -38,7 +39,17 @@ impl SnapshotExporter {
3839
}
3940

4041
pub fn export_snapshot(&self, chunk_size: u64) -> Result<()> {
41-
let mut header = SnapshotHeader::default();
42+
let l1_batch_number = U64::from(
43+
self.database
44+
.get_last_l1_batch_number()?
45+
.expect("snapshot db contains no L1 batch number"),
46+
);
47+
48+
let mut header = SnapshotHeader {
49+
l1_batch_number,
50+
..Default::default()
51+
};
52+
4253
self.export_storage_logs(chunk_size, &mut header)?;
4354
self.export_factory_deps(&mut header)?;
4455

@@ -146,7 +157,6 @@ impl SnapshotExporter {
146157
};
147158

148159
chunk.storage_logs.push(pb);
149-
header.l1_batch_number = entry.l1_batch_number_of_initial_write;
150160
}
151161
} else {
152162
has_more = false;
@@ -159,7 +169,7 @@ impl SnapshotExporter {
159169
buf.reserve(chunk_len - buf.capacity());
160170
}
161171

162-
let path = PathBuf::new().join(&self.basedir).join(format!(
172+
let path = &self.basedir.join(format!(
163173
"snapshot_l1_batch_{}_storage_logs_part_{:0>4}.proto.gzip",
164174
header.l1_batch_number, chunk_id
165175
));

src/processor/snapshot/mod.rs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,11 @@ impl SnapshotBuilder {
5353

5454
Self { database }
5555
}
56+
57+
// Gets the next L1 batch number to be processed for ues in state recovery.
58+
pub fn get_last_l1_batch_number(&self) -> Result<Option<u64>> {
59+
self.database.get_last_l1_batch_number()
60+
}
5661
}
5762

5863
#[async_trait]
@@ -109,6 +114,10 @@ impl Processor for SnapshotBuilder {
109114
})
110115
.expect("failed to save factory dep");
111116
}
117+
118+
if let Some(number) = block.l1_block_number {
119+
let _ = self.database.set_last_l1_batch_number(number);
120+
};
112121
}
113122
}
114123
}

src/processor/tree/tree_wrapper.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,9 @@ impl TreeWrapper {
131131
) -> Result<()> {
132132
let mut tree_entries = Vec::new();
133133

134-
for chunk in &chunks {
134+
for (i, chunk) in chunks.iter().enumerate() {
135+
tracing::info!("Importing chunk {}/{}...", i + 1, chunks.len());
136+
135137
for log in &chunk.storage_logs {
136138
let key = U256::from_big_endian(log.storage_key());
137139
let index = log.enumeration_index();
@@ -146,15 +148,18 @@ impl TreeWrapper {
146148
.add_key(&key)
147149
.expect("cannot add key");
148150
}
151+
152+
tracing::info!("Chunk {} was succesfully imported!", i + 1);
149153
}
150154

155+
tracing::info!("Extending merkle tree with imported storage logs...");
151156
let num_tree_entries = tree_entries.len();
152157
self.tree.extend(tree_entries);
153158

154159
tracing::info!("Succesfully imported snapshot containing {num_tree_entries} storage logs!",);
155160

156161
let snapshot = self.snapshot.lock().await;
157-
snapshot.set_latest_l1_block_number(l1_batch_number.as_u64())?;
162+
snapshot.set_latest_l1_block_number(l1_batch_number.as_u64() + 1)?;
158163

159164
Ok(())
160165
}

0 commit comments

Comments
 (0)