Skip to content

Commit

Permalink
Merge pull request #4250 from EthanYuan/text-optimize
Browse files Browse the repository at this point in the history
chore: CKB Text Optimization
  • Loading branch information
zhangsoledad authored Dec 8, 2023
2 parents 98a583d + b8c2698 commit a00ba13
Show file tree
Hide file tree
Showing 78 changed files with 407 additions and 381 deletions.
12 changes: 6 additions & 6 deletions block-filter/src/filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,13 @@ impl BlockFilter {
let tip_header = snapshot.get_tip_header().expect("tip stored");
let start_number = match snapshot.get_latest_built_filter_data_block_hash() {
Some(block_hash) => {
debug!("Latest built block hash {:#x}", block_hash);
debug!("Hash of the latest created block {:#x}", block_hash);
if snapshot.is_main_chain(&block_hash) {
let header = snapshot
.get_block_header(&block_hash)
.expect("header stored");
debug!(
"Latest built block is main chain, start from {}",
"Latest created block on the main chain, starting from {}",
header.number() + 1
);
header.number() + 1
Expand All @@ -99,7 +99,7 @@ impl BlockFilter {
.expect("parent header stored");
}
debug!(
"Latest built filter data block is fork chain, start from {}",
"Block with the latest built filter data on the forked chain, starting from {}",
header.number()
);
header.number()
Expand All @@ -126,7 +126,7 @@ impl BlockFilter {
let db = self.shared.store();
if db.get_block_filter_hash(&header.hash()).is_some() {
debug!(
"Filter data for block {:#x} already exist, skip build",
"Filter data for block {:#x} already exists. Skip building.",
header.hash()
);
return;
Expand All @@ -144,8 +144,8 @@ impl BlockFilter {
let (filter_data, missing_out_points) = build_filter_data(provider, &transactions);
for out_point in missing_out_points {
warn!(
"Can't find input cell for out_point: {:#x}, \
should only happen in test, skip adding to filter",
"Unable to find the input cell for the out_point: {:#x}, \
Skip adding it to the filter. This should only happen during testing.",
out_point
);
}
Expand Down
20 changes: 10 additions & 10 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,13 +345,13 @@ impl ChainService {
let block_number = block.number();
let block_hash = block.hash();

debug!("begin processing block: {}-{}", block_number, block_hash);
debug!("Begin processing block: {}-{}", block_number, block_hash);
if block_number < 1 {
warn!("receive 0 number block: 0-{}", block_hash);
warn!("Receive 0 number block: 0-{}", block_hash);
}

self.insert_block(block, switch).map(|ret| {
debug!("finish processing block");
debug!("Finish processing block");
ret
})
}
Expand Down Expand Up @@ -444,7 +444,7 @@ impl ChainService {

let current_total_difficulty = shared_snapshot.total_difficulty().to_owned();
debug!(
"difficulty current = {:#x}, cannon = {:#x}",
"Current difficulty = {:#x}, cannon = {:#x}",
current_total_difficulty, cannon_total_difficulty,
);

Expand All @@ -453,7 +453,7 @@ impl ChainService {

if new_best_block {
debug!(
"new best block found: {} => {:#x}, difficulty diff = {:#x}",
"Newly found best block : {} => {:#x}, difficulty diff = {:#x}",
block.header().number(),
block.header().hash(),
&cannon_total_difficulty - &current_total_difficulty
Expand Down Expand Up @@ -506,7 +506,7 @@ impl ChainService {
fork.detached_proposal_id().clone(),
new_snapshot,
) {
error!("notify update_tx_pool_for_reorg error {}", e);
error!("Notify update_tx_pool_for_reorg error {}", e);
}
}

Expand Down Expand Up @@ -535,7 +535,7 @@ impl ChainService {
if tx_pool_controller.service_started() {
let block_ref: &BlockView = &block;
if let Err(e) = tx_pool_controller.notify_new_uncle(block_ref.as_uncle()) {
error!("notify new_uncle error {}", e);
error!("Notify new_uncle error {}", e);
}
}
}
Expand Down Expand Up @@ -576,7 +576,7 @@ impl ChainService {
let proposal_start =
cmp::max(1, (new_tip + 1).saturating_sub(proposal_window.farthest()));

debug!("reload_proposal_table [{}, {}]", proposal_start, common);
debug!("Reload_proposal_table [{}, {}]", proposal_start, common);
for bn in proposal_start..=common {
let blk = self
.shared
Expand Down Expand Up @@ -930,13 +930,13 @@ impl ChainService {

fn print_error(&self, b: &BlockView, err: &Error) {
error!(
"block verify error, block number: {}, hash: {}, error: {:?}",
"Block verify error. Block number: {}, hash: {}, error: {:?}",
b.header().number(),
b.header().hash(),
err
);
if log_enabled!(ckb_logger::Level::Trace) {
trace!("block {}", b.data());
trace!("Block {}", b.data());
}
}

Expand Down
2 changes: 1 addition & 1 deletion ckb-bin/src/helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub fn deadlock_detection() {
use ckb_util::parking_lot::deadlock;
use std::{thread, time::Duration};

info!("deadlock_detection enable");
info!("deadlock_detection enabled");
let dead_lock_jh = thread::spawn({
let ticker = ckb_channel::tick(Duration::from_secs(10));
let stop_rx = new_crossbeam_exit_rx();
Expand Down
4 changes: 2 additions & 2 deletions ckb-bin/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,9 @@ pub fn run_app(version: Version) -> Result<(), ExitCode> {
handle.drop_guard();

tokio::task::block_in_place(|| {
info!("waiting all tokio tasks exit...");
info!("Waiting for all tokio tasks to exit...");
handle_stop_rx.blocking_recv();
info!("all tokio tasks and threads have exited, ckb shutdown");
info!("All tokio tasks and threads have exited. CKB shutdown");
});
}

Expand Down
30 changes: 16 additions & 14 deletions ckb-bin/src/subcommand/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@ pub fn init(args: InitArgs) -> Result<(), ExitCode> {
}

if args.chain != "dev" && !args.customize_spec.is_unset() {
eprintln!("Customizing consensus parameters for chain spec only works for dev chains.");
eprintln!("Customizing consensus parameters for chain spec; only works for dev chains.");
return Err(ExitCode::Failure);
}

let exported = Resource::exported_in(&args.root_dir);
if !args.force && exported {
eprintln!("Config files already exist, use --force to overwrite.");
eprintln!("Config files already exist; use --force to overwrite.");

if args.interactive {
let input = prompt("Overwrite config files now? ");
Expand Down Expand Up @@ -103,15 +103,15 @@ pub fn init(args: InitArgs) -> Result<(), ExitCode> {
);
} else if *default_code_hash != *hash {
eprintln!(
"WARN: the default secp256k1 code hash is `{default_code_hash}`, you are using `{hash}`.\n\
It will require `ckb run --ba-advanced` to enable this block assembler"
"WARN: Use the default secp256k1 code hash `{default_code_hash}` rather than `{hash}`.\n\
To enable this block assembler, use `ckb run --ba-advanced`."
);
} else if args.block_assembler_args.len() != 1
|| args.block_assembler_args[0].len() != SECP256K1_BLAKE160_SIGHASH_ALL_ARG_LEN
{
eprintln!(
"WARN: the block assembler arg is not a valid secp256k1 pubkey hash.\n\
It will require `ckb run --ba-advanced` to enable this block assembler"
"WARN: The block assembler arg is not a valid secp256k1 pubkey hash.\n\
To enable this block assembler, use `ckb run --ba-advanced`. "
);
}
}
Expand All @@ -129,7 +129,7 @@ pub fn init(args: InitArgs) -> Result<(), ExitCode> {
)
}
None => {
eprintln!("WARN: mining feature is disabled because of lacking the block assembler config options");
eprintln!("WARN: Mining feature is disabled because of the lack of the block assembler config options.");
format!(
"# secp256k1_blake160_sighash_all example:\n\
# [block_assembler]\n\
Expand Down Expand Up @@ -175,7 +175,7 @@ pub fn init(args: InitArgs) -> Result<(), ExitCode> {
let target_file = specs_dir.join(format!("{}.toml", args.chain));

if spec_file == "-" {
println!("create specs/{}.toml from stdin", args.chain);
println!("Create specs/{}.toml from stdin", args.chain);
let mut encoded_content = String::new();
io::stdin().read_to_string(&mut encoded_content)?;
let base64_config =
Expand All @@ -185,30 +185,32 @@ pub fn init(args: InitArgs) -> Result<(), ExitCode> {
let spec_content = base64_engine.encode(encoded_content.trim());
fs::write(target_file, spec_content)?;
} else {
println!("cp {} specs/{}.toml", spec_file, args.chain);
println!("copy {} to specs/{}.toml", spec_file, args.chain);
fs::copy(spec_file, target_file)?;
}
} else if args.chain == "dev" {
println!("create {SPEC_DEV_FILE_NAME}");
println!("Create {SPEC_DEV_FILE_NAME}");
let bundled = Resource::bundled(SPEC_DEV_FILE_NAME.to_string());
let kvs = args.customize_spec.key_value_pairs();
let context_spec =
TemplateContext::new("customize", kvs.iter().map(|(k, v)| (*k, v.as_str())));
bundled.export(&context_spec, &args.root_dir)?;
}

println!("create {CKB_CONFIG_FILE_NAME}");
println!("Create {CKB_CONFIG_FILE_NAME}");
Resource::bundled_ckb_config().export(&context, &args.root_dir)?;
println!("create {MINER_CONFIG_FILE_NAME}");
println!("Create {MINER_CONFIG_FILE_NAME}");
Resource::bundled_miner_config().export(&context, &args.root_dir)?;
println!("create {DB_OPTIONS_FILE_NAME}");
println!("Create {DB_OPTIONS_FILE_NAME}");
Resource::bundled_db_options().export(&context, &args.root_dir)?;

let genesis_hash = AppConfig::load_for_subcommand(args.root_dir, cli::CMD_INIT)?
.chain_spec()?
.build_genesis()
.map_err(|err| {
eprintln!("couldn't build genesis from generated chain spec, since {err}");
eprintln!(
"Couldn't build the genesis block from the generated chain spec, since {err}"
);
ExitCode::Failure
})?
.hash();
Expand Down
10 changes: 5 additions & 5 deletions ckb-bin/src/subcommand/migrate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@ pub fn migrate(args: MigrateArgs) -> Result<(), ExitCode> {

{
let read_only_db = migrate.open_read_only_db().map_err(|e| {
eprintln!("migrate error {e}");
eprintln!("Migration error {e}");
ExitCode::Failure
})?;

if let Some(db) = read_only_db {
let db_status = migrate.check(&db);
if matches!(db_status, Ordering::Greater) {
eprintln!(
"The database is created by a higher version CKB executable binary, \n\
so that the current CKB executable binary couldn't open this database.\n\
"The database was created by a higher version CKB executable binary \n\
and cannot be opened by the current binary.\n\
Please download the latest CKB executable binary."
);
return Err(ExitCode::Failure);
Expand Down Expand Up @@ -50,7 +50,7 @@ pub fn migrate(args: MigrateArgs) -> Result<(), ExitCode> {
> ",
);
if input.trim().to_lowercase() != "yes" {
eprintln!("The migration was declined since the user didn't confirm.");
eprintln!("Migration was declined since the user didn't confirm.");
return Err(ExitCode::Failure);
}
} else {
Expand All @@ -62,7 +62,7 @@ pub fn migrate(args: MigrateArgs) -> Result<(), ExitCode> {
}

let bulk_load_db_db = migrate.open_bulk_load_db().map_err(|e| {
eprintln!("migrate error {e}");
eprintln!("Migration error {e}");
ExitCode::Failure
})?;

Expand Down
16 changes: 8 additions & 8 deletions ckb-bin/src/subcommand/replay.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ pub fn replay(args: ReplayArgs, async_handle: Handle) -> Result<(), ExitCode> {

if !args.tmp_target.is_dir() {
eprintln!(
"replay error: {:?}",
"Replay error: {:?}",
"The specified path does not exist or not directory"
);
return Err(ExitCode::Failure);
}
let tmp_db_dir = tempfile::tempdir_in(args.tmp_target).map_err(|err| {
eprintln!("replay error: {err:?}");
eprintln!("Replay error: {err:?}");
ExitCode::Failure
})?;
{
Expand All @@ -58,7 +58,7 @@ pub fn replay(args: ReplayArgs, async_handle: Handle) -> Result<(), ExitCode> {
}
}
tmp_db_dir.close().map_err(|err| {
eprintln!("replay error: {err:?}");
eprintln!("Replay error: {err:?}");
ExitCode::Failure
})?;

Expand All @@ -72,7 +72,7 @@ fn profile(shared: Shared, mut chain: ChainService, from: Option<u64>, to: Optio
.map(|v| std::cmp::min(v, tip_number))
.unwrap_or(tip_number);
process_range_block(&shared, &mut chain, 1..from);
println!("start profiling, re-process blocks {from}..{to}:");
println!("Start profiling; re-process blocks {from}..{to}:");
let now = std::time::Instant::now();
let tx_count = process_range_block(&shared, &mut chain, from..=to);
let duration = std::time::Instant::now().saturating_duration_since(now);
Expand Down Expand Up @@ -136,7 +136,7 @@ fn sanity_check(shared: Shared, mut chain: ChainService, full_verification: bool
let header = block.header();
if let Err(e) = chain.process_block(Arc::new(block), switch) {
eprintln!(
"replay sanity-check error: {:?} at block({}-{})",
"Replay sanity-check error: {:?} at block({}-{})",
e,
header.number(),
header.hash(),
Expand All @@ -152,19 +152,19 @@ fn sanity_check(shared: Shared, mut chain: ChainService, full_verification: bool

if cursor != tip_header {
eprintln!(
"sanity-check break at block({}-{}), expect tip({}-{})",
"Sanity-check break at block({}-{}); expect tip({}-{})",
cursor.number(),
cursor.hash(),
tip_header.number(),
tip_header.hash(),
);
} else {
println!(
"sanity-check pass, tip({}-{})",
"Sanity-check pass, tip({}-{})",
tip_header.number(),
tip_header.hash()
);
}

println!("replay finishing, please wait...");
println!("Finishing replay; please wait...");
}
4 changes: 2 additions & 2 deletions ckb-bin/src/subcommand/reset_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pub fn reset_data(args: ResetDataArgs) -> Result<(), ExitCode> {

for dir in target_dirs.iter() {
if dir.exists() {
println!("deleting {}", dir.display());
println!("Deleting {}", dir.display());
if let Some(e) = fs::remove_dir_all(dir).err() {
eprintln!("{e}");
errors_count += 1;
Expand All @@ -60,7 +60,7 @@ pub fn reset_data(args: ResetDataArgs) -> Result<(), ExitCode> {

for file in target_files.iter() {
if file.exists() {
println!("deleting {}", file.display());
println!("Deleting {}", file.display());
if let Some(e) = fs::remove_file(file).err() {
eprintln!("{e}");
errors_count += 1;
Expand Down
8 changes: 4 additions & 4 deletions db-migration/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,15 @@ impl Migrations {
}
}
};
debug!("current database version [{}]", db_version);
debug!("Current database version [{}]", db_version);

let latest_version = self
.migrations
.values()
.last()
.unwrap_or_else(|| panic!("should have at least one version"))
.version();
debug!("latest database version [{}]", latest_version);
debug!("Latest database version [{}]", latest_version);

db_version.as_str().cmp(latest_version)
}
Expand Down Expand Up @@ -176,8 +176,8 @@ impl Migrations {
if m.version() < v.as_str() {
error!(
"Database downgrade detected. \
The database schema version is newer than client schema version,\
please upgrade to the newer version"
The database schema version is more recent than the client schema version.\
Please upgrade to the latest client version."
);
return Err(internal_error(
"Database downgrade is not supported".to_string(),
Expand Down
2 changes: 1 addition & 1 deletion error/src/internal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use thiserror::Error;

/// An error with no reason.
#[derive(Error, Debug, Clone, Copy)]
#[error("no reason is provided")]
#[error("No reason provided")]
pub struct SilentError;

/// An error with only a string as the reason.
Expand Down
Loading

0 comments on commit a00ba13

Please sign in to comment.