From 6259f80a4f214e648f2c2b110a3f492628ee1d0f Mon Sep 17 00:00:00 2001 From: jackzhhuang Date: Thu, 12 Dec 2024 14:00:02 +0800 Subject: [PATCH] fix fmt --- chain/src/chain.rs | 14 +- flexidag/src/blockdag.rs | 139 +++++----- .../src/consensusdb/consensus_reachability.rs | 7 +- flexidag/src/consensusdb/db.rs | 2 +- flexidag/tests/tests.rs | 243 ++++-------------- 5 files changed, 117 insertions(+), 288 deletions(-) diff --git a/chain/src/chain.rs b/chain/src/chain.rs index bd4ecaf5ac..9f6197de1c 100644 --- a/chain/src/chain.rs +++ b/chain/src/chain.rs @@ -659,19 +659,11 @@ impl BlockChain { self.storage.save_block_info(block_info.clone())?; self.storage.save_table_infos(txn_table_infos)?; - let genesis_header = self - .storage - .get_block_header_by_hash(self.genesis_hash)? - .ok_or_else(|| format_err!("failed to get genesis because it is none"))?; let result = match verified_block.ghostdata { - Some(trusted_ghostdata) => self.dag.commit_trusted_block( - header.to_owned(), - genesis_header.parent_hash(), - Arc::new(trusted_ghostdata), - ), - None => self + Some(trusted_ghostdata) => self .dag - .commit(header.to_owned(), genesis_header.parent_hash()), + .commit_trusted_block(header.to_owned(), Arc::new(trusted_ghostdata)), + None => self.dag.commit(header.to_owned()), }; match result { anyhow::Result::Ok(_) => info!("finish to commit dag block: {:?}", block_id), diff --git a/flexidag/src/blockdag.rs b/flexidag/src/blockdag.rs index 60c9d1a0db..16e4ed9d3a 100644 --- a/flexidag/src/blockdag.rs +++ b/flexidag/src/blockdag.rs @@ -5,7 +5,7 @@ use crate::consensusdb::consenses_state::{ }; use crate::consensusdb::prelude::{FlexiDagStorageConfig, StoreError}; use crate::consensusdb::schemadb::{ - GhostdagStoreReader, ReachabilityStore, StagingReachabilityStore, REINDEX_ROOT_KEY, + GhostdagStoreReader, ReachabilityStore, StagingReachabilityStore, }; use crate::consensusdb::{ prelude::FlexiDagStorage, @@ -15,14 +15,13 @@ use crate::consensusdb::{ }, }; use crate::ghostdag::protocol::GhostdagManager; +use crate::process_key_already_error; use crate::prune::pruning_point_manager::PruningPointManagerT; -use crate::{process_key_already_error, reachability}; use anyhow::{bail, ensure, Ok}; use rocksdb::WriteBatch; use starcoin_config::temp_dir; use starcoin_crypto::{HashValue as Hash, HashValue}; use starcoin_logger::prelude::{debug, info, warn}; -use starcoin_storage::batch::WriteBatchWithColumn; use starcoin_types::block::BlockHeader; use starcoin_types::{ blockhash::{BlockHashes, KType}, @@ -118,7 +117,7 @@ impl BlockDAG { .write() .insert(origin, BlockHashes::new(vec![]))?; - self.commit(genesis, origin)?; + self.commit(genesis)?; self.save_dag_state( genesis_id, DagState { @@ -154,7 +153,6 @@ impl BlockDAG { pub fn commit_trusted_block( &mut self, header: BlockHeader, - origin: HashValue, trusted_ghostdata: Arc, ) -> anyhow::Result<()> { info!( @@ -212,12 +210,25 @@ impl BlockDAG { } }; + // Create a DB batch writer + let mut batch = WriteBatch::default(); + + // lock the dag data to write in batch + // the cache will be written at the same time + // when the batch is written before flush to the disk and + // if the writing process abort the starcoin process will/should restart. + let mut stage = StagingReachabilityStore::new( + self.storage.db.clone(), + self.storage.reachability_store.upgradable_read(), + ); + // Store ghostdata - process_key_already_error( - self.storage - .ghost_dag_store - .insert(header.id(), ghostdata.clone()), - )?; + process_key_already_error(self.storage.ghost_dag_store.insert_batch( + &mut batch, + header.id(), + ghostdata.clone(), + )) + .expect("failed to ghostdata in batch"); // Update reachability store debug!( @@ -225,81 +236,50 @@ impl BlockDAG { header.id(), header.number() ); - let reachability_store = self.storage.reachability_store.clone(); let mut merge_set = ghostdata .unordered_mergeset_without_selected_parent() .filter(|hash| self.storage.reachability_store.read().has(*hash).unwrap()) .collect::>() .into_iter(); - let add_block_result = { - let mut reachability_writer = reachability_store.write(); - inquirer::add_block( - reachability_writer.deref_mut(), - header.id(), - ghostdata.selected_parent, - &mut merge_set, - ) - }; - match add_block_result { - Result::Ok(_) => (), - Err(reachability::ReachabilityError::DataInconsistency) => { - let _future_covering_set = reachability_store - .read() - .get_future_covering_set(header.id())?; - info!( - "the key {:?} was already processed, original error message: {:?}", - header.id(), - reachability::ReachabilityError::DataInconsistency - ); - } - Err(reachability::ReachabilityError::StoreError(StoreError::KeyNotFound(msg))) => { - if msg == *REINDEX_ROOT_KEY.to_string() { - info!( - "the key {:?} was already processed, original error message: {:?}", - header.id(), - reachability::ReachabilityError::StoreError(StoreError::KeyNotFound( - REINDEX_ROOT_KEY.to_string() - )) - ); - info!("now set the reindex key to origin: {:?}", origin); - // self.storage.reachability_store.set_reindex_root(origin)?; - self.set_reindex_root(origin)?; - bail!( - "failed to add a block when committing, e: {:?}", - reachability::ReachabilityError::StoreError(StoreError::KeyNotFound(msg)) - ); - } else { - bail!( - "failed to add a block when committing, e: {:?}", - reachability::ReachabilityError::StoreError(StoreError::KeyNotFound(msg)) - ); - } - } - Err(reachability::ReachabilityError::StoreError(StoreError::InvalidInterval(_, _))) => { - self.set_reindex_root(origin)?; - bail!("failed to add a block when committing for invalid interval",); - } - Err(e) => { - bail!("failed to add a block when committing, e: {:?}", e); - } - } - process_key_already_error( - self.storage - .relations_store - .write() - .insert(header.id(), BlockHashes::new(parents)), - )?; + inquirer::add_block( + &mut stage, + header.id(), + ghostdata.selected_parent, + &mut merge_set, + ) + .expect("failed to add reachability in stage batch"); + + process_key_already_error(self.storage.relations_store.write().insert_batch( + &mut batch, + header.id(), + BlockHashes::new(parents), + )) + .expect("failed to insert relations in batch"); + // Store header store process_key_already_error(self.storage.header_store.insert( header.id(), Arc::new(header), 1, - ))?; + )) + .expect("failed to insert header in batch"); + + // the read lock will be updated to the write lock + // and then write the batch + // and then release the lock + stage + .commit(&mut batch) + .expect("failed to write the stage reachability in batch"); + + // write the data just one time + self.storage + .write_batch(batch) + .expect("failed to write dag data in batch"); Ok(()) } - pub fn commit(&mut self, header: BlockHeader, origin: HashValue) -> anyhow::Result<()> { + pub fn commit(&mut self, header: BlockHeader) -> anyhow::Result<()> { info!( "start to commit header: {:?}, number: {:?}", header.id(), @@ -329,7 +309,10 @@ impl BlockDAG { // Create a DB batch writer let mut batch = WriteBatch::default(); - // lock the dag data to write in batch + // lock the dag data to write in batch, read lock. + // the cache will be written at the same time + // when the batch is written before flush to the disk and + // if the writing process abort the starcoin process will/should restart. let mut stage = StagingReachabilityStore::new( self.storage.db.clone(), self.storage.reachability_store.upgradable_read(), @@ -364,10 +347,6 @@ impl BlockDAG { ) .expect("failed to add block in batch"); - stage - .commit(&mut batch) - .expect("failed to write the stage reachability in batch"); - process_key_already_error(self.storage.relations_store.write().insert_batch( &mut batch, header.id(), @@ -384,6 +363,14 @@ impl BlockDAG { )) .expect("failed to insert header in batch"); + // the read lock will be updated to the write lock + // and then write the batch + // and then release the lock + stage + .commit(&mut batch) + .expect("failed to write the stage reachability in batch"); + + // write the data just one time self.storage .write_batch(batch) .expect("failed to write dag data in batch"); diff --git a/flexidag/src/consensusdb/consensus_reachability.rs b/flexidag/src/consensusdb/consensus_reachability.rs index 59691a5311..22cfa64336 100644 --- a/flexidag/src/consensusdb/consensus_reachability.rs +++ b/flexidag/src/consensusdb/consensus_reachability.rs @@ -3,10 +3,7 @@ use super::{ prelude::{BatchDbWriter, CachedDbAccess, CachedDbItem, DirectDbWriter, StoreError}, }; use starcoin_crypto::HashValue as Hash; -use starcoin_storage::{ - batch::WriteBatchData, - storage::{InnerStore, RawDBStorage, WriteOp}, -}; +use starcoin_storage::storage::{InnerStore, RawDBStorage}; use crate::{ consensusdb::schema::{KeyCodec, ValueCodec}, @@ -15,7 +12,7 @@ use crate::{ }; use starcoin_types::blockhash::{self, BlockHashMap, BlockHashes}; -use parking_lot::{RwLockUpgradableReadGuard, RwLockWriteGuard}; +use parking_lot::RwLockUpgradableReadGuard; use rocksdb::WriteBatch; use std::{collections::hash_map::Entry::Vacant, sync::Arc}; diff --git a/flexidag/src/consensusdb/db.rs b/flexidag/src/consensusdb/db.rs index 98413207c6..96c846c534 100644 --- a/flexidag/src/consensusdb/db.rs +++ b/flexidag/src/consensusdb/db.rs @@ -8,7 +8,7 @@ use super::{ }, }; use parking_lot::RwLock; -use rocksdb::{FlushOptions, WriteBatch, DB}; +use rocksdb::WriteBatch; use starcoin_config::{RocksdbConfig, StorageConfig}; pub(crate) use starcoin_storage::db_storage::DBStorage; use starcoin_storage::storage::RawDBStorage; diff --git a/flexidag/tests/tests.rs b/flexidag/tests/tests.rs index cec18c210f..a82d38903a 100644 --- a/flexidag/tests/tests.rs +++ b/flexidag/tests/tests.rs @@ -38,7 +38,6 @@ fn test_dag_commit() -> Result<()> { .build(); let mut parents_hash = vec![genesis.id()]; - let origin = dag.init_with_genesis(genesis.clone())?; for _ in 0..10 { let header_builder = BlockHeaderBuilder::random(); @@ -46,7 +45,7 @@ fn test_dag_commit() -> Result<()> { .with_parents_hash(parents_hash.clone()) .build(); parents_hash = vec![header.id()]; - dag.commit(header.to_owned(), origin)?; + dag.commit(header.to_owned())?; let ghostdata = dag.ghostdata_by_hash(header.id()).unwrap().unwrap(); println!("{:?},{:?}", header, ghostdata); } @@ -92,15 +91,14 @@ fn test_dag_1() -> Result<()> { let genesis_id = genesis.id(); let mut dag = BlockDAG::create_for_testing().unwrap(); let expect_selected_parented = [block5.id(), block3.id(), block3_1.id(), genesis_id]; - let origin = dag.init_with_genesis(genesis.clone()).unwrap(); - - dag.commit(block1, origin)?; - dag.commit(block2, origin)?; - dag.commit(block3_1, origin)?; - dag.commit(block3, origin)?; - dag.commit(block4, origin)?; - dag.commit(block5, origin)?; - dag.commit(block6, origin)?; + + dag.commit(block1)?; + dag.commit(block2)?; + dag.commit(block3_1)?; + dag.commit(block3)?; + dag.commit(block4)?; + dag.commit(block5)?; + dag.commit(block6)?; let mut count = 0; while latest_id != genesis_id && count < 4 { let ghostdata = dag @@ -130,9 +128,8 @@ async fn test_with_spawn() { .with_parents_hash(vec![genesis.id()]) .build(); let mut dag = BlockDAG::create_for_testing().unwrap(); - let real_origin = dag.init_with_genesis(genesis.clone()).unwrap(); - dag.commit(block1.clone(), real_origin).unwrap(); - dag.commit(block2.clone(), real_origin).unwrap(); + dag.commit(block1.clone()).unwrap(); + dag.commit(block2.clone()).unwrap(); let block3 = BlockHeaderBuilder::random() .with_difficulty(3.into()) .with_parents_hash(vec![block1.id(), block2.id()]) @@ -144,7 +141,7 @@ async fn test_with_spawn() { let handle = tokio::task::spawn_blocking(move || { let mut count = 10; loop { - match dag_clone.commit(block_clone.clone(), real_origin) { + match dag_clone.commit(block_clone.clone()) { std::result::Result::Ok(_) => break, Err(e) => { debug!("failed to commit error: {:?}, i: {:?}", e, i); @@ -261,8 +258,7 @@ fn test_dag_genesis_fork() { .with_parents_hash(parents_hash.clone()) .build(); parents_hash = vec![header.id()]; - dag.commit(header.to_owned(), genesis.parent_hash()) - .unwrap(); + dag.commit(header.to_owned()).unwrap(); let _ghostdata = dag.ghostdata_by_hash(header.id()).unwrap().unwrap(); } @@ -285,8 +281,7 @@ fn test_dag_genesis_fork() { .with_parents_hash(old_parents_hash.clone()) .build(); old_parents_hash = vec![header.id()]; - dag.commit(header.to_owned(), genesis.parent_hash()) - .unwrap(); + dag.commit(header.to_owned()).unwrap(); let ghostdata = dag.ghostdata_by_hash(header.id()).unwrap().unwrap(); println!("add a old header: {:?}, tips: {:?}", header, ghostdata); } @@ -298,8 +293,7 @@ fn test_dag_genesis_fork() { .with_parents_hash(parents_hash.clone()) .build(); parents_hash = vec![header.id()]; - dag.commit(header.to_owned(), genesis.parent_hash()) - .unwrap(); + dag.commit(header.to_owned()).unwrap(); let ghostdata = dag.ghostdata_by_hash(header.id()).unwrap().unwrap(); println!("add a forked header: {:?}, tips: {:?}", header, ghostdata); } @@ -308,8 +302,7 @@ fn test_dag_genesis_fork() { parents_hash.append(&mut old_parents_hash); let header = header_builder.with_parents_hash(parents_hash).build(); // parents_hash = vec![header.id()]; - dag.commit(header.to_owned(), genesis.parent_hash()) - .unwrap(); + dag.commit(header.to_owned()).unwrap(); let ghostdata = dag.ghostdata_by_hash(header.id()).unwrap().unwrap(); println!("add a forked header: {:?}, tips: {:?}", header, ghostdata); } @@ -359,10 +352,10 @@ fn test_dag_multiple_commits() -> anyhow::Result<()> { .build(); parents_hash = vec![header.id()]; parent_hash = header.id(); - dag.commit(header.to_owned(), genesis.parent_hash())?; + dag.commit(header.to_owned())?; if header.number() == 6 { - dag.commit(header.to_owned(), genesis.parent_hash())?; - dag.commit(header.to_owned(), genesis.parent_hash())?; + dag.commit(header.to_owned())?; + dag.commit(header.to_owned())?; } let ghostdata = dag.ghostdata(&parents_hash).unwrap(); println!("add a header: {:?}, tips: {:?}", header, ghostdata); @@ -710,7 +703,6 @@ fn add_and_print_with_ghostdata( number: BlockNumber, parent: Hash, parents: Vec, - origin: Hash, dag: &mut BlockDAG, ghostdata: GhostdagData, ) -> anyhow::Result { @@ -721,7 +713,7 @@ fn add_and_print_with_ghostdata( .with_number(number) .build(); let start = Instant::now(); - dag.commit_trusted_block(header.to_owned(), origin, Arc::new(ghostdata))?; + dag.commit_trusted_block(header.to_owned(), Arc::new(ghostdata))?; let duration = start.elapsed(); println!( "commit header: {:?}, number: {:?}, duration: {:?}", @@ -741,7 +733,6 @@ fn add_and_print_with_pruning_point( number: BlockNumber, parent: Hash, parents: Vec, - origin: Hash, pruning_point: Hash, dag: &mut BlockDAG, ) -> anyhow::Result { @@ -753,7 +744,7 @@ fn add_and_print_with_pruning_point( .with_pruning_point(pruning_point) .build(); let start = Instant::now(); - dag.commit(header.to_owned(), origin)?; + dag.commit(header.to_owned())?; let duration = start.elapsed(); println!( "commit header: {:?}, number: {:?}, duration: {:?}", @@ -773,10 +764,9 @@ fn add_and_print( number: BlockNumber, parent: Hash, parents: Vec, - origin: Hash, dag: &mut BlockDAG, ) -> anyhow::Result { - add_and_print_with_pruning_point(number, parent, parents, origin, Hash::zero(), dag) + add_and_print_with_pruning_point(number, parent, parents, Hash::zero(), dag) } #[test] @@ -795,32 +785,18 @@ fn test_dag_mergeset() -> anyhow::Result<()> { let mut parents_hash = vec![genesis.id()]; let mut parent_hash = genesis.id(); - let mut header = add_and_print( - 2, - parent_hash, - parents_hash, - genesis.parent_hash(), - &mut dag, - )? - .id(); - let red = add_and_print(3, header, vec![header], genesis.parent_hash(), &mut dag)?.id(); + let mut header = add_and_print(2, parent_hash, parents_hash, &mut dag)?.id(); + let red = add_and_print(3, header, vec![header], &mut dag)?.id(); parents_hash = vec![genesis.id()]; parent_hash = genesis.id(); - header = add_and_print( - 2, - parent_hash, - parents_hash, - genesis.parent_hash(), - &mut dag, - )? - .id(); - header = add_and_print(3, header, vec![header], genesis.parent_hash(), &mut dag)?.id(); - header = add_and_print(4, header, vec![header], genesis.parent_hash(), &mut dag)?.id(); + header = add_and_print(2, parent_hash, parents_hash, &mut dag)?.id(); + header = add_and_print(3, header, vec![header], &mut dag)?.id(); + header = add_and_print(4, header, vec![header], &mut dag)?.id(); let blue = header; - header = add_and_print(5, blue, vec![blue, red], genesis.parent_hash(), &mut dag)?.id(); + header = add_and_print(5, blue, vec![blue, red], &mut dag)?.id(); let ghostdata = dag.ghostdata(&[header, red])?; println!( @@ -846,13 +822,7 @@ fn test_big_data_commit() -> anyhow::Result<()> { // one let mut parent = genesis.clone(); for i in 0..count { - let new = add_and_print( - i + 1, - parent.id(), - vec![parent.id()], - genesis.parent_hash(), - &mut dag, - )?; + let new = add_and_print(i + 1, parent.id(), vec![parent.id()], &mut dag)?; parent = new; } // let last_one = parent; @@ -896,69 +866,25 @@ fn test_prune() -> anyhow::Result<()> { dag.init_with_genesis(genesis.clone()).unwrap(); - let block1 = add_and_print( - 1, - genesis.id(), - vec![genesis.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block1 = add_and_print(1, genesis.id(), vec![genesis.id()], &mut dag)?; - let block_main_2 = add_and_print( - 2, - block1.id(), - vec![block1.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_main_3 = add_and_print( - 3, - block_main_2.id(), - vec![block_main_2.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_main_3_1 = add_and_print( - 3, - block_main_2.id(), - vec![block_main_2.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block_main_2 = add_and_print(2, block1.id(), vec![block1.id()], &mut dag)?; + let block_main_3 = add_and_print(3, block_main_2.id(), vec![block_main_2.id()], &mut dag)?; + let block_main_3_1 = add_and_print(3, block_main_2.id(), vec![block_main_2.id()], &mut dag)?; let block_main_4 = add_and_print( 4, block_main_3.id(), vec![block_main_3.id(), block_main_3_1.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_main_5 = add_and_print( - 5, - block_main_4.id(), - vec![block_main_4.id()], - genesis.parent_hash(), &mut dag, )?; + let block_main_5 = add_and_print(5, block_main_4.id(), vec![block_main_4.id()], &mut dag)?; - let block_red_2 = add_and_print( - 2, - block1.id(), - vec![block1.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_red_2_1 = add_and_print( - 2, - block1.id(), - vec![block1.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block_red_2 = add_and_print(2, block1.id(), vec![block1.id()], &mut dag)?; + let block_red_2_1 = add_and_print(2, block1.id(), vec![block1.id()], &mut dag)?; let block_red_3 = add_and_print( 3, block_red_2.id(), vec![block_red_2.id(), block_red_2_1.id()], - genesis.parent_hash(), &mut dag, )?; @@ -1024,22 +950,9 @@ fn test_prune() -> anyhow::Result<()> { // test the pruning logic - let block_main_6 = add_and_print( - 6, - block_main_5.id(), - tips.clone(), - genesis.parent_hash(), - &mut dag, - )?; - let block_main_6_1 = - add_and_print(6, block_main_5.id(), tips, genesis.parent_hash(), &mut dag)?; - let block_fork = add_and_print( - 4, - block_red_3.id(), - vec![block_red_3.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block_main_6 = add_and_print(6, block_main_5.id(), tips.clone(), &mut dag)?; + let block_main_6_1 = add_and_print(6, block_main_5.id(), tips, &mut dag)?; + let block_fork = add_and_print(4, block_red_3.id(), vec![block_red_3.id()], &mut dag)?; dag.save_dag_state( genesis.id(), @@ -1081,69 +994,25 @@ fn test_verification_blue_block() -> anyhow::Result<()> { dag.init_with_genesis(genesis.clone()).unwrap(); - let block1 = add_and_print( - 1, - genesis.id(), - vec![genesis.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block1 = add_and_print(1, genesis.id(), vec![genesis.id()], &mut dag)?; - let block_main_2 = add_and_print( - 2, - block1.id(), - vec![block1.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_main_3 = add_and_print( - 3, - block_main_2.id(), - vec![block_main_2.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_main_3_1 = add_and_print( - 3, - block_main_2.id(), - vec![block_main_2.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block_main_2 = add_and_print(2, block1.id(), vec![block1.id()], &mut dag)?; + let block_main_3 = add_and_print(3, block_main_2.id(), vec![block_main_2.id()], &mut dag)?; + let block_main_3_1 = add_and_print(3, block_main_2.id(), vec![block_main_2.id()], &mut dag)?; let block_main_4 = add_and_print( 4, block_main_3.id(), vec![block_main_3.id(), block_main_3_1.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_main_5 = add_and_print( - 5, - block_main_4.id(), - vec![block_main_4.id()], - genesis.parent_hash(), &mut dag, )?; + let block_main_5 = add_and_print(5, block_main_4.id(), vec![block_main_4.id()], &mut dag)?; - let block_red_2 = add_and_print( - 2, - block1.id(), - vec![block1.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_red_2_1 = add_and_print( - 2, - block1.id(), - vec![block1.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block_red_2 = add_and_print(2, block1.id(), vec![block1.id()], &mut dag)?; + let block_red_2_1 = add_and_print(2, block1.id(), vec![block1.id()], &mut dag)?; let block_red_3 = add_and_print( 3, block_red_2.id(), vec![block_red_2.id(), block_red_2_1.id()], - genesis.parent_hash(), &mut dag, )?; @@ -1215,7 +1084,6 @@ fn test_verification_blue_block() -> anyhow::Result<()> { 6, block_main_5.id(), vec![block_main_5.id(), block_red_3.id()], - genesis.parent_hash(), &mut dag, )?; assert_eq!( @@ -1240,25 +1108,12 @@ fn test_verification_blue_block() -> anyhow::Result<()> { 6, block_main_5.id(), vec![block_main_5.id(), block_red_3.id()], - genesis.parent_hash(), &mut dag, makeup_ghostdata.clone(), )?; - let block_from_normal = add_and_print( - 7, - normal_block.id(), - vec![normal_block.id()], - genesis.parent_hash(), - &mut dag, - )?; - let block_from_makeup = add_and_print( - 7, - makeup_block.id(), - vec![makeup_block.id()], - genesis.parent_hash(), - &mut dag, - )?; + let block_from_normal = add_and_print(7, normal_block.id(), vec![normal_block.id()], &mut dag)?; + let block_from_makeup = add_and_print(7, makeup_block.id(), vec![makeup_block.id()], &mut dag)?; let ghostdag_data_from_normal = dag .ghostdata_by_hash(block_from_normal.id())? @@ -1288,7 +1143,6 @@ fn test_verification_blue_block() -> anyhow::Result<()> { 8, together_mine.selected_parent, vec![block_from_normal.id(), block_from_makeup.id()], - genesis.parent_hash(), &mut dag, )?; let together_ghost_data = dag.storage.ghost_dag_store.get_data(mine_together.id())?; @@ -1300,7 +1154,6 @@ fn test_verification_blue_block() -> anyhow::Result<()> { 8, together_mine.selected_parent, vec![block_from_normal.id(), block_from_makeup.id()], - genesis.parent_hash(), &mut dag, )?; let together_ghost_data = dag.storage.ghost_dag_store.get_data(mine_together.id())?;