Skip to content

Commit

Permalink
Cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
sanlee42 committed Sep 20, 2023
1 parent e361e0c commit c23f1b5
Show file tree
Hide file tree
Showing 14 changed files with 392 additions and 328 deletions.
38 changes: 23 additions & 15 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use starcoin_chain_api::{
verify_block, ChainReader, ChainWriter, ConnectBlockError, EventWithProof, ExcludedTxns,
ExecutedBlock, MintedUncleNumber, TransactionInfoWithProof, VerifiedBlock, VerifyBlockField,
};
use starcoin_consensus::Consensus;
use starcoin_consensus::{BlockDAG, Consensus, FlexiDagStorage};
use starcoin_crypto::hash::PlainCryptoHash;
use starcoin_crypto::HashValue;
use starcoin_executor::VMMetrics;
Expand All @@ -22,8 +22,8 @@ use starcoin_open_block::OpenedBlock;
use starcoin_state_api::{AccountStateReader, ChainStateReader, ChainStateWriter};
use starcoin_statedb::ChainStateDB;
use starcoin_storage::flexi_dag::SyncFlexiDagSnapshot;
use starcoin_storage::Store;
use starcoin_storage::storage::CodecKVStore;
use starcoin_storage::Store;
use starcoin_time_service::TimeService;
use starcoin_types::block::BlockIdAndNumber;
use starcoin_types::contract_event::ContractEventInfo;
Expand Down Expand Up @@ -64,19 +64,21 @@ pub struct BlockChain {
epoch: Epoch,
vm_metrics: Option<VMMetrics>,
dag_accumulator: Option<MerkleAccumulator>,
dag: BlockDAG,
}

impl BlockChain {
pub fn new(
time_service: Arc<dyn TimeService>,
head_block_hash: HashValue,
storage: Arc<dyn Store>,
dag_store: FlexiDagStorage,
vm_metrics: Option<VMMetrics>,
) -> Result<Self> {
let head = storage
.get_block_by_hash(head_block_hash)?
.ok_or_else(|| format_err!("Can not find block by hash {:?}", head_block_hash))?;
Self::new_with_uncles(time_service, head, None, storage, vm_metrics)
Self::new_with_uncles(time_service, head, None, storage, vm_metrics, dag_store)
}

fn new_with_uncles(
Expand All @@ -85,6 +87,7 @@ impl BlockChain {
uncles: Option<HashMap<HashValue, MintedUncleNumber>>,
storage: Arc<dyn Store>,
vm_metrics: Option<VMMetrics>,
dag_store: FlexiDagStorage,
) -> Result<Self> {
let block_info = storage
.get_block_info(head_block.id())?
Expand All @@ -108,7 +111,11 @@ impl BlockChain {
)),
None => None,
};
let dag_snapshot_tips = storage.get_accumulator_snapshot_storage().get(head_id)?.map(|snapshot| snapshot.child_hashes);
let dag_snapshot_tips = storage
.get_accumulator_snapshot_storage()
.get(head_id)?
.map(|snapshot| snapshot.child_hashes);
let dag = BlockDAG::new(genesis,16,dag_store),
let mut chain = Self {
genesis_hash: genesis,
time_service,
Expand All @@ -123,11 +130,7 @@ impl BlockChain {
storage.as_ref(),
),
status: ChainStatusWithBlock {
status: ChainStatus::new(
head_block.header.clone(),
block_info,
dag_snapshot_tips,
),
status: ChainStatus::new(head_block.header.clone(), block_info, dag_snapshot_tips),
head: head_block,
},
statedb: chain_state,
Expand All @@ -136,6 +139,7 @@ impl BlockChain {
epoch,
vm_metrics,
dag_accumulator,
dag,
};
watch(CHAIN_WATCH_NAME, "n1251");
match uncles {
Expand Down Expand Up @@ -638,21 +642,25 @@ impl BlockChain {
);
Ok(())
}

pub fn dag_parents_in_tips(&self, dag_parents: Vec<HashValue>) -> Result<bool> {
Ok(dag_parents.into_iter().all(|parent| {
match &self.status.status.tips_hash {
Ok(dag_parents
.into_iter()
.all(|parent| match &self.status.status.tips_hash {
Some(tips) => tips.contains(&parent),
None => false,
}
}))
}))
}

pub fn is_head_of_dag_accumulator(&self, next_tips: Vec<HashValue>) -> Result<bool> {
let key = Self::calculate_dag_accumulator_key(next_tips)?;
let next_tips_info = self.storage.get_dag_accumulator_info(key)?;

return Ok(next_tips_info == self.dag_accumulator.as_ref().map(|accumulator| accumulator.get_info()));
return Ok(next_tips_info
== self
.dag_accumulator
.as_ref()
.map(|accumulator| accumulator.get_info()));
}
}

Expand Down
39 changes: 9 additions & 30 deletions consensus/src/dag/blockdag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,20 +80,22 @@ impl BlockDAG {
self.relations_store
.insert(Hash::new(ORIGIN), BlockHashes::new(vec![]))
.unwrap();
// let _ = self.commit_header(&self.genesis.clone())?;
self.commit_header(&self.genesis.clone())?;
Ok(())
}

pub fn commit_header_inner(
&mut self,
ghostdag_data: &GhostdagData,
header: &Header,
) -> anyhow::Result<()> {
pub fn commit_header(&mut self, header: &Header) -> anyhow::Result<()> {
// Generate ghostdag data
let parents_hash = header.parents_hash();
let ghostdag_data = if header.hash() != self.genesis.hash() {
self.ghostdag_manager.ghostdag(parents_hash)
} else {
self.ghostdag_manager.genesis_ghostdag_data()
};
// Store ghostdata
self.ghostdag_store
.insert(header.hash(), Arc::new(ghostdag_data.clone()))?;
.insert(header.hash(), Arc::new(ghostdag_data.clone()))
.unwrap();

// Update reachability store
let mut reachability_store = self.reachability_store.clone();
Expand All @@ -118,29 +120,6 @@ impl BlockDAG {
Ok(())
}

pub fn commit_header(&mut self, header: &Header) -> anyhow::Result<ColoringOutput> {
let ghostdag_data = if header.hash() != self.genesis.hash() {
self.ghostdag_manager.ghostdag(header.parents_hash())
} else {
self.ghostdag_manager.genesis_ghostdag_data()
};

match self.commit_header_inner(&ghostdag_data, header) {
anyhow::Result::Ok(()) => (),
Err(error) => {
let error_result = error.downcast::<StoreError>()?;
match error_result {
StoreError::KeyAlreadyExists(_) => (), // if the header existed already, we check its color
_ => {
return anyhow::Result::Err(error_result.into());
}
}
}
}
Ok(self
.ghostdag_manager
.check_blue_candidate(&ghostdag_data, header.hash()))
}
fn is_in_dag(&self, _hash: Hash) -> anyhow::Result<bool> {
return Ok(true);
}
Expand Down
4 changes: 2 additions & 2 deletions scripts/import_snapshot.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ function import_snapshot() {
from_dir=$2
to_dir=$3

download "$net" "$from_dir"
#download "$net" "$from_dir"

./starcoin_db_exporter apply-snapshot -i "$from_dir"/snapshot -n "$net" -o "$to_dir"
case_status=$?
Expand All @@ -66,4 +66,4 @@ case $net in
echo "$net not supported"
usage
;;
esac
esac
5 changes: 4 additions & 1 deletion storage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -740,7 +740,10 @@ impl SyncFlexiDagStore for Storage {

// for block chain
new_tips.iter().try_fold((), |_, block_id| {
if let Some(t) = self.flexi_dag_storage.get_hashes_by_hash(block_id.clone())? {
if let Some(t) = self
.flexi_dag_storage
.get_hashes_by_hash(block_id.clone())?
{
if t != snapshot {
bail!("the key {} should not exists", block_id);
}
Expand Down
23 changes: 16 additions & 7 deletions sync/src/block_connector/block_connector_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
use crate::block_connector::{ExecuteRequest, ResetRequest, WriteBlockChainService};
use crate::sync::{CheckSyncEvent, SyncService};
use crate::tasks::{BlockConnectedEvent, BlockDiskCheckEvent};
use anyhow::{format_err, Result, Ok};
use anyhow::{format_err, Ok, Result};
use network_api::PeerProvider;
use starcoin_chain_api::{ConnectBlockError, WriteableChainService, ChainReader};
use starcoin_chain_api::{ChainReader, ConnectBlockError, WriteableChainService};
use starcoin_config::{NodeConfig, G_CRATE_VERSION};
use starcoin_consensus::BlockDAG;
use starcoin_executor::VMMetrics;
Expand All @@ -16,11 +16,13 @@ use starcoin_service_registry::{
ActorService, EventHandler, ServiceContext, ServiceFactory, ServiceHandler,
};
use starcoin_storage::{BlockStore, Storage};
use starcoin_sync_api::{PeerNewBlock, NewBlockChainRequest};
use starcoin_sync_api::{NewBlockChainRequest, PeerNewBlock};
use starcoin_txpool::TxPoolService;
use starcoin_types::block::ExecutedBlock;
use starcoin_types::sync_status::SyncStatus;
use starcoin_types::system_events::{MinedBlock, SyncStatusChangeEvent, SystemShutdown, NewHeadBlock};
use starcoin_types::system_events::{
MinedBlock, NewHeadBlock, SyncStatusChangeEvent, SystemShutdown,
};
use std::sync::{Arc, Mutex};
use sysinfo::{DiskExt, System, SystemExt};

Expand Down Expand Up @@ -222,7 +224,9 @@ impl EventHandler<Self, PeerNewBlock> for BlockConnectorService {
match connect_error {
ConnectBlockError::FutureBlock(block) => {
//TODO cache future block
if let std::result::Result::Ok(sync_service) = ctx.service_ref::<SyncService>() {
if let std::result::Result::Ok(sync_service) =
ctx.service_ref::<SyncService>()
{
info!(
"BlockConnector try connect future block ({:?},{}), peer_id:{:?}, notify Sync service check sync.",
block.id(),
Expand Down Expand Up @@ -285,8 +289,13 @@ impl ServiceHandler<Self, NewBlockChainRequest> for BlockConnectorService {
msg: NewBlockChainRequest,
ctx: &mut ServiceContext<BlockConnectorService>,
) -> Result<()> {
let (new_branch, dag_parents, next_tips) = self.chain_service.switch_new_main(msg.new_head_block)?;
ctx.broadcast(NewHeadBlock(Arc::new(new_branch.head_block()), Some(dag_parents), Some(next_tips)));
let (new_branch, dag_parents, next_tips) =
self.chain_service.switch_new_main(msg.new_head_block)?;
ctx.broadcast(NewHeadBlock(
Arc::new(new_branch.head_block()),
Some(dag_parents),
Some(next_tips),
));
Ok(())
}
}
Expand Down
2 changes: 1 addition & 1 deletion sync/src/block_connector/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@ pub struct BlockConnectedRequest {

impl ServiceRequest for BlockConnectedRequest {
type Response = anyhow::Result<()>;
}
}
5 changes: 2 additions & 3 deletions sync/src/block_connector/test_write_dag_block_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@ pub fn gen_dag_blocks(
}
}

let result = writeable_block_chain_service
.execute_dag_block_pool();
let result = writeable_block_chain_service.execute_dag_block_pool();
let result = result.unwrap();
match result {
super::write_block_chain::ConnectOk::Duplicate(block)
Expand Down Expand Up @@ -159,7 +158,7 @@ async fn test_block_chain_switch_main() {
.get_main()
.current_header()
.id(),
last_block.unwrap()
last_block.unwrap()
);

last_block = gen_fork_dag_block_chain(
Expand Down
Loading

0 comments on commit c23f1b5

Please sign in to comment.