Skip to content
This repository was archived by the owner on Jan 27, 2026. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions .github/workflows/redbit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,37 @@ permissions:
contents: write

jobs:
check_skip:
runs-on: ubuntu-latest
outputs:
skip: ${{ steps.check.outputs.skip }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 2

- name: Determine skip token
id: check
run: |
SKIP=false

# push event: github.event.head_commit.message exists
if [ "${{ github.event_name }}" = "push" ]; then
MSG="${{ github.event.head_commit.message }}"
if echo "$MSG" | grep -qiE '\[skip ci\]|\[ci skip\]'; then
SKIP=true
fi
else
MSG=$(git log -1 --pretty=%B)
if echo "$MSG" | grep -qiE '\[skip ci\]|\[ci skip\]'; then
SKIP=true
fi
fi

test:
needs: check_skip
if: ${{ needs.check_skip.outputs.skip != 'true' }}
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
Expand Down Expand Up @@ -55,6 +85,8 @@ jobs:
run: |
cargo test --package demo --features integration,ci
cargo test --package btc --features integration,ci
cargo test --package ltc --features integration,ci
cargo test --package bch --features integration,ci
cargo test --package cardano --features integration,ci
cargo test --package ergo --features integration,ci

Expand All @@ -78,6 +110,8 @@ jobs:
run: |
RUST_LOG=error cargo bench -q --package demo --bench demo_benchmark -- --quiet
RUST_LOG=error cargo bench -q --package btc --bench btc_benchmark -- --quiet
RUST_LOG=error cargo bench -q --package ltc --bench ltc_benchmark -- --quiet
RUST_LOG=error cargo bench -q --package bch --bench bch_benchmark -- --quiet
RUST_LOG=error cargo bench -q --package cardano --bench cardano_benchmark -- --quiet
RUST_LOG=error cargo bench -q --package ergo --bench ergo_benchmark -- --quiet

Expand Down
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ members = [
"chains/btc",
"chains/cardano",
"chains/ergo",
"chains/ltc",
"chains/bch",
]
default-members = ["macros", "redbit", "chain"]

Expand Down
53 changes: 53 additions & 0 deletions chains/bch/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
[package]
edition = "2024"
name = "bch"
version.workspace = true

[features]
default = ["chain"]
integration = []
bench = []
ci = []
chain = ["redbit/chain"]
tracing = ["chain/tracing"]

[[bin]]
name = "bch"
path = "src/main.rs"

[lib]
doctest = false

[dev-dependencies]
criterion = { version = "0.7.0", features = ["async_tokio", "html_reports"] }
axum-test = "17.3.0"

[[bench]]
name = "bch_benchmark"
harness = false

[dependencies]
redbit = { path = "../../redbit" }
chain = { path = "../../chain" }
tokio = { version = "1.45.1", features = ["full", "tracing"] }
tokio-stream = { version = "0.1.17", features = ["sync"] }
tokio-util = "0.7.16"
config = "0.15.11"
bitcoin = "0.32.0"
async-trait = "0.1.81"
futures = "0.3.30"
serde = { version = "1.0.219", features = ["derive"] }
bs58 = { version = "0.5", features = ["check"] }
axum = { version = "0.8.4", features = ["default", "macros"] }
thiserror = "2.0.12"
chrono = "0.4.38"
anyhow = "1.0.80"
bincode = "2.0.1"
serde_with = { version = "3.14.0", features = ["hex", "chrono_0_4", "base64", "time_0_3"] }
serde_json = "1.0.142"
hex = "0.4.3"
reqwest = { version = "0.12.23", default-features = false, features = [
"json",
"blocking",
] }
bitcoincore-rpc = "0.19.0"
37 changes: 37 additions & 0 deletions chains/bch/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
### Bitcoin Cash

Download Bitcoin Cash Node:
```
wget https://github.com/bitcoin-cash-node/bitcoin-cash-node/releases/download/v28.0.1/bitcoin-cash-node-28.0.1-x86_64-linux-gnu.tar.gz
```

Configure it :
```
$ cat /opt/bitcoincash/bitcoin.conf
server=1
rest=1
rpcbind=127.0.0.1
rpcallowip=127.0.0.1
rpcuser=foo
rpcpassword=bar
disablewallet=1
maxconnections=32
rpcport=7332
port=7333
```

Export RPC credentials :
```
export BITCOINCASH__rpc_user="foo"
export BITCOINCASH__rpc_password="bar"
```

Start the node :
```
/opt/bitcoincash/bin/bitcoind -datadir=/opt/bitcoincash -conf=/opt/bitcoincash/bitcoin.conf -daemon=0 -printtoconsole
```

Start syncing :
```
cargo run --release
```
96 changes: 96 additions & 0 deletions chains/bch/benches/bch_benchmark.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
use bch::block_provider::BchBlockProvider;
use bch::model_v1::{BlockChain, Input, Utxo};
use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput};
use redbit::{info, Durability, StorageOwner, WriteTxContext};
use std::{sync::Arc, time::Duration};

fn criterion_benchmark(c: &mut Criterion) {
let rt = tokio::runtime::Runtime::new().unwrap();
let (storage_owner, storage) = rt
.block_on(StorageOwner::temp("bch_benchmark", 1, true))
.expect("Failed to create temporary storage");

let chain = BlockChain::new(Arc::clone(&storage));

// Load canned Bitcoin Cash blocks from JSON (Core-like format with tx hex)
let (_b_small, small_block) = BchBlockProvider::block_from_file("small", 223, 1);
let (_b_avg, avg_block) = BchBlockProvider::block_from_file("avg", 274_015, 161);
let (_b_huge, huge_block) = BchBlockProvider::block_from_file("huge", 274_017, 678);

info!("Initiating processing");
let processed_small_block =
BchBlockProvider::process_block_pure(&small_block).expect("Failed to process small_block");
let processed_avg_block =
BchBlockProvider::process_block_pure(&avg_block).expect("Failed to process avg_block");
let processed_huge_block =
BchBlockProvider::process_block_pure(&huge_block).expect("Failed to process huge_block");

info!("Validating to avoid unexpected write amplification");
redbit::utils::assert_sorted(&processed_small_block.transactions, "Txs", |tx| &tx.id);
for (idx, tx) in processed_small_block.transactions.iter().enumerate() {
redbit::utils::assert_sorted(&tx.inputs, &format!("Tx[{idx}].inputs"), |inp: &Input| &inp.id);
redbit::utils::assert_sorted(&tx.utxos, &format!("Tx[{idx}].utxos"), |u: &Utxo| &u.id);
}

info!("Initiating indexing");
let mut group = c.benchmark_group("bch_chain");
group.throughput(Throughput::Elements(1));
group.warm_up_time(Duration::from_millis(50));
group.measurement_time(Duration::from_millis(300));
group.sample_size(10);
group.bench_function(BenchmarkId::from_parameter("small_block_processing"), |bencher| {
bencher.iter(|| BchBlockProvider::process_block_pure(&small_block).expect("Failed to process small_block"));
});
group.sample_size(10);
group.bench_function(BenchmarkId::from_parameter("avg_block_processing"), |bencher| {
bencher.iter(|| BchBlockProvider::process_block_pure(&avg_block).expect("Failed to process avg_block"));
});
group.sample_size(10);
group.bench_function(BenchmarkId::from_parameter("huge_block_processing"), |bencher| {
bencher.iter(|| BchBlockProvider::process_block_pure(&huge_block).expect("Failed to process huge_block"));
});

group.sample_size(10);
let indexing_context = chain.new_indexing_ctx().expect("Failed to create indexing context");
group.bench_function(BenchmarkId::from_parameter("small_block_persistence"), |bencher| {
bencher.iter_batched_ref(
|| vec![processed_small_block.clone()],
|blocks| {
chain
.store_blocks(&indexing_context, std::mem::take(blocks), Durability::None)
.expect("Failed to persist block");
},
BatchSize::LargeInput,
);
});
group.sample_size(10);
group.bench_function(BenchmarkId::from_parameter("avg_block_persistence"), |bencher| {
bencher.iter_batched_ref(
|| vec![processed_avg_block.clone()],
|blocks| {
chain
.store_blocks(&indexing_context, std::mem::take(blocks), Durability::None)
.expect("Failed to persist block");
},
BatchSize::LargeInput,
);
});
group.sample_size(10);
group.bench_function(BenchmarkId::from_parameter("huge_block_persistence"), |bencher| {
bencher.iter_batched_ref(
|| vec![processed_huge_block.clone()],
|blocks| {
chain
.store_blocks(&indexing_context, std::mem::take(blocks), Durability::None)
.expect("Failed to persist block");
},
BatchSize::LargeInput,
);
});
indexing_context.stop_writing().unwrap();
drop(storage_owner);
group.finish();
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
1 change: 1 addition & 0 deletions chains/bch/blocks/avg_block.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions chains/bch/blocks/huge_block.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions chains/bch/blocks/small_block.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"hash":"00000000b9938ade2272609da8a83179d919aa80774c5f2577168dfb1eb51a8f","confirmations":280273,"size":216,"height":223,"version":1,"versionHex":"00000001","merkleroot":"2038fd408bb42f6c8a0484c82e6ad2d11756cf7d6b47716ed663fa2addeeb1b4","tx":[{"txid":"2038fd408bb42f6c8a0484c82e6ad2d11756cf7d6b47716ed663fa2addeeb1b4","hash":"2038fd408bb42f6c8a0484c82e6ad2d11756cf7d6b47716ed663fa2addeeb1b4","version":1,"size":135,"locktime":0,"vin":[{"coinbase":"04ffff001d02a300","sequence":4294967295}],"vout":[{"value":50.00000000,"n":0,"scriptPubKey":{"asm":"04840625cc27ae2cf9792d2a42ad091e729ee6ae09784126ca676453d9196bfcc88bf7c8958a8bebbae043f7863d2f9254d81222de0263ae1283a3ec28cf9e41bd OP_CHECKSIG","hex":"4104840625cc27ae2cf9792d2a42ad091e729ee6ae09784126ca676453d9196bfcc88bf7c8958a8bebbae043f7863d2f9254d81222de0263ae1283a3ec28cf9e41bdac","reqSigs":1,"type":"pubkey","addresses":["bitcoincash:qzj43sqr8kzkzgkpestcvjlf73qq9zm4vuluus68cd"]}}],"hex":"01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02a300ffffffff0100f2052a01000000434104840625cc27ae2cf9792d2a42ad091e729ee6ae09784126ca676453d9196bfcc88bf7c8958a8bebbae043f7863d2f9254d81222de0263ae1283a3ec28cf9e41bdac00000000"}],"time":1231771213,"mediantime":1231766966,"nonce":2776679704,"bits":"1d00ffff","difficulty":1,"chainwork":"000000000000000000000000000000000000000000000000000000e000e000e0","nTx":1,"previousblockhash":"000000004da68466ee873c7095c766baf62df93a16df579350e01e7f78911616","nextblockhash":"000000003c29d5cb24584ddb079dd51cf9ecd02a96de0aecf05f8c2bcfa709f2"}
4 changes: 4 additions & 0 deletions chains/bch/config/bch.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
api_host = "http://127.0.0.1:7332"
fetching_parallelism = "high" # low / mild / high
rpc_user = "" # change this either in .env file or export is as env var
rpc_password = ""
Comment on lines +3 to +4
Copy link

Copilot AI Nov 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Security concern: The configuration file contains empty RPC credentials with an instruction to change them, but this could lead to accidental deployment with empty credentials. Consider either providing secure placeholder values or making these required fields that fail on startup if empty.

Suggested change
rpc_user = "" # change this either in .env file or export is as env var
rpc_password = ""
rpc_user = "CHANGEME_USER" # MUST change this in .env file or export as env var before deployment
rpc_password = "CHANGEME_PASSWORD" # MUST change this in .env file or export as env var before deployment

Copilot uses AI. Check for mistakes.
16 changes: 16 additions & 0 deletions chains/bch/config/settings.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[indexer]
name = "bch"
db_path = "/opt/.chain"
enable = true
node_sync_interval_s = 1 # 0 means only catching up but no periodic sync
fork_detection_heights = 100 # max 255, how many blocks from tip to check for forks
db_cache_size_gb = "mild" # off / tiny / low / mild / high / ultra
min_entity_batch_size = 1_000_000 # Sum of : inputs + outputs
max_entity_buffer_kb_size = 8192 # total size of fetched blocks in KB to buffer
non_durable_batches = 50 # how many batches to commit non-durably before forcing a durable commit
processing_parallelism = "high" # off / tiny / low / mild / high / ultra
validation_from_height = 0

[http]
enable = true
bind_address = "0.0.0.0:3035"
Loading