diff --git a/.github/workflows/benchmark-prs.yml b/.github/workflows/benchmark-prs.yml
index 3d3aa4bd77..af1b3ce0fe 100644
--- a/.github/workflows/benchmark-prs.yml
+++ b/.github/workflows/benchmark-prs.yml
@@ -9,386 +9,386 @@ env:
NODE_DATA_PATH: /home/runner/.local/share/safe/node
jobs:
- benchmark-cli:
- name: Compare sn_cli benchmarks to main
- # right now only ubuntu, running on multiple systems would require many pushes...\
- # perhaps this can be done with one consolidation action in the future, pulling down all results and pushing
- # once to the branch..
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - uses: dtolnay/rust-toolchain@stable
- with:
- components: rustfmt, clippy
-
- - uses: Swatinem/rust-cache@v2
- continue-on-error: true
-
- ########################
- ### Setup ###
- ########################
- - run: cargo install cargo-criterion
-
- - name: install ripgrep
- run: sudo apt-get -y install ripgrep
-
- - name: Download 95mb file to be uploaded with the safe client
- shell: bash
- run: wget https://sn-node.s3.eu-west-2.amazonaws.com/the-test-data.zip
-
- # As normal user won't care much about initial client startup,
- # but be more alerted on communication speed during transmission.
- # Meanwhile the criterion testing code includes the client startup as well,
- # it will be better to execute bench test with `local-discovery`,
- # to make the measurement results reflect speed improvement or regression more accurately.
- - name: Build sn bins
- run: cargo build --release --bin safe --bin safenode --features local-discovery
- timeout-minutes: 30
-
- - name: Build faucet bin
- run: cargo build --release --bin faucet --features local-discovery --features gifting --no-default-features
- timeout-minutes: 30
-
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- env:
- SN_LOG: "all"
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ubuntu-latest
- build: true
-
- - name: Check SAFE_PEERS was set
- shell: bash
- run: echo "The SAFE_PEERS variable has been set to $SAFE_PEERS"
-
- #########################
- ### Upload large file ###
- #########################
-
- - name: Fund cli wallet
- shell: bash
- run: target/release/safe --log-output-dest=data-dir wallet get-faucet 127.0.0.1:8000
- env:
- SN_LOG: "all"
-
- - name: Start a client instance to compare memory usage
- shell: bash
- run: target/release/safe --log-output-dest=data-dir files upload the-test-data.zip --retry-strategy quick
- env:
- SN_LOG: "all"
-
- - name: Cleanup uploaded_files folder to avoid pollute download benchmark
- shell: bash
- run: rm -rf $CLIENT_DATA_PATH/uploaded_files
-
- ###########################
- ### Client Mem Analysis ###
- ###########################
-
- - name: Check client memory usage
- shell: bash
- run: |
- client_peak_mem_limit_mb="1024" # mb
- client_avg_mem_limit_mb="512" # mb
-
- peak_mem_usage=$(
- rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
- awk -F':' '/"memory_used_mb":/{print $2}' |
- sort -n |
- tail -n 1
- )
- echo "Peak memory usage: $peak_mem_usage MB"
- if (( $(echo "$peak_mem_usage > $client_peak_mem_limit_mb" | bc -l) )); then
- echo "Client peak memory usage exceeded threshold: $client_peak_mem_limit_mb MB"
- exit 1
- fi
-
- total_mem=$(
- rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
- awk -F':' '/"memory_used_mb":/ {sum += $2} END {printf "%.0f\n", sum}'
- )
- num_of_times=$(
- rg "\"memory_used_mb\"" $CLIENT_DATA_PATH/logs --glob safe.* -c --stats |
- rg "(\d+) matches" |
- rg "\d+" -o
- )
- echo "num_of_times: $num_of_times"
- echo "Total memory is: $total_mem"
- average_mem=$(($total_mem/$(($num_of_times))))
- echo "Average memory is: $average_mem"
-
- if (( $(echo "$average_mem > $client_avg_mem_limit_mb" | bc -l) )); then
- echo "Client average memory usage exceeded threshold: $client_avg_mem_limit_mb MB"
- exit 1
- fi
- # Write the client memory usage to a file
- echo '[
- {
- "name": "client-peak-memory-usage-during-upload",
- "value": '$peak_mem_usage',
- "unit": "MB"
- },
- {
- "name": "client-average-memory-usage-during-upload",
- "value": '$average_mem',
- "unit": "MB"
- }
- ]' > client_memory_usage.json
-
- - name: check client_memory_usage.json
- shell: bash
- run: cat client_memory_usage.json
-
- - name: Alert for client memory usage
- uses: benchmark-action/github-action-benchmark@v1
- with:
- name: "Memory Usage of Client during uploading large file"
- tool: "customSmallerIsBetter"
- output-file-path: client_memory_usage.json
- # Where the previous data file is stored
- external-data-json-path: ./cache/client-mem-usage.json
- # Workflow will fail when an alert happens
- fail-on-alert: true
- # GitHub API token to make a commit comment
- github-token: ${{ secrets.GITHUB_TOKEN }}
- # Enable alert commit comment
- comment-on-alert: true
- # 200% regression will result in alert
- alert-threshold: "200%"
- # Enable Job Summary for PRs
- summary-always: true
-
- ########################
- ### Benchmark ###
- ########################
- - name: Bench `safe` cli
- shell: bash
- # Criterion outputs the actual bench results to stderr "2>&1 tee output.txt" takes stderr,
- # passes to tee which displays it in the terminal and writes to output.txt
- run: |
- cargo criterion --features=local-discovery --message-format=json 2>&1 -p sn_cli | tee -a output.txt
- cat output.txt | rg benchmark-complete | jq -s 'map({
- name: (.id | split("/"))[-1],
- unit: "MiB/s",
- value: ((if .throughput[0].unit == "KiB/s" then (.throughput[0].per_iteration / (1024*1024*1024)) else (.throughput[0].per_iteration / (1024*1024)) end) / (.mean.estimate / 1e9))
- })' > files-benchmark.json
- timeout-minutes: 15
-
- - name: Confirming the number of files uploaded and downloaded during the benchmark test
- shell: bash
- run: |
- ls -l $CLIENT_DATA_PATH
- ls -l $CLIENT_DATA_PATH/uploaded_files
- ls -l $CLIENT_DATA_PATH/safe_files
-
- - name: Store benchmark result
- uses: benchmark-action/github-action-benchmark@v1
- with:
- # What benchmark tool the output.txt came from
- tool: "customBiggerIsBetter"
- output-file-path: files-benchmark.json
- # Where the previous data file is stored
- external-data-json-path: ./cache/benchmark-data.json
- # Workflow will fail when an alert happens
- fail-on-alert: true
- # GitHub API token to make a commit comment
- github-token: ${{ secrets.GITHUB_TOKEN }}
- # Enable alert commit comment
- comment-on-alert: true
- # 200% regression will result in alert
- alert-threshold: "200%"
- # Enable Job Summary for PRs
- summary-always: true
-
- - name: Start a client to carry out download to output the logs
- shell: bash
- run: target/release/safe --log-output-dest=data-dir files download --retry-strategy quick
-
- - name: Start a client to simulate criterion upload
- shell: bash
- run: |
- ls -l target/release
- target/release/safe --log-output-dest=data-dir files upload target/release/faucet --retry-strategy quick
-
- #########################
- ### Stop Network ###
- #########################
-
- - name: Stop the local network
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: safe_test_logs_benchmark
- platform: ubuntu-latest
- build: true
-
- - name: Upload Faucet folder
- uses: actions/upload-artifact@main
- with:
- name: faucet_folder
- path: /home/runner/.local/share/safe/test_faucet
- continue-on-error: true
- if: always()
-
- #########################
- ### Node Mem Analysis ###
- #########################
-
- # The large file uploaded will increase node's peak mem usage a lot
- - name: Check node memory usage
- shell: bash
- run: |
- node_peak_mem_limit_mb="250" # mb
- peak_mem_usage=$(
- rg '"memory_used_mb":[^,]*' $NODE_DATA_PATH/*/logs/* -o --no-line-number --no-filename |
- awk -F':' '/"memory_used_mb":/{print $2}' |
- sort -n |
- tail -n 1
- )
-
- echo "Memory usage: $peak_mem_usage MB"
- if (( $(echo "$peak_mem_usage > $node_peak_mem_limit_mb" | bc -l) )); then
- echo "Node memory usage exceeded threshold: $peak_mem_usage MB"
- exit 1
- fi
- # Write the node memory usage to a file
- echo '[
- {
- "name": "node-memory-usage-through-safe-benchmark",
- "value": '$peak_mem_usage',
- "unit": "MB"
- }
- ]' > node_memory_usage.json
-
- - name: check node_memory_usage.json
- shell: bash
- run: cat node_memory_usage.json
-
- - name: Alert for node memory usage
- uses: benchmark-action/github-action-benchmark@v1
- with:
- tool: "customSmallerIsBetter"
- output-file-path: node_memory_usage.json
- # Where the previous data file is stored
- external-data-json-path: ./cache/node-mem-usage.json
- # Workflow will fail when an alert happens
- fail-on-alert: true
- # GitHub API token to make a commit comment
- github-token: ${{ secrets.GITHUB_TOKEN }}
- # Enable alert commit comment
- comment-on-alert: true
- # Comment on the PR
- comment-always: true
- # 200% regression will result in alert
- alert-threshold: "200%"
- # Enable Job Summary for PRs
- summary-always: true
-
- ###########################################
- ### Swarm_driver handling time Analysis ###
- ###########################################
-
- - name: Check swarm_driver handling time
- shell: bash
- run: |
- num_of_times=$(
- rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
- rg "(\d+) matches" |
- rg "\d+" -o
- )
- echo "Number of long cmd handling times: $num_of_times"
- total_long_handling_ms=$(
- rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
- awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
- )
- echo "Total cmd long handling time is: $total_long_handling_ms ms"
- average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
- echo "Average cmd long handling time is: $average_handling_ms ms"
- total_long_handling=$(($total_long_handling_ms))
- total_num_of_times=$(($num_of_times))
- num_of_times=$(
- rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
- rg "(\d+) matches" |
- rg "\d+" -o
- )
- echo "Number of long event handling times: $num_of_times"
- total_long_handling_ms=$(
- rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
- awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
- )
- echo "Total event long handling time is: $total_long_handling_ms ms"
- average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
- echo "Average event long handling time is: $average_handling_ms ms"
- total_long_handling=$(($total_long_handling_ms+$total_long_handling))
- total_num_of_times=$(($num_of_times+$total_num_of_times))
- average_handling_ms=$(($total_long_handling/$(($total_num_of_times))))
- echo "Total swarm_driver long handling times is: $total_num_of_times"
- echo "Total swarm_driver long handling duration is: $total_long_handling ms"
- echo "Total average swarm_driver long handling duration is: $average_handling_ms ms"
- total_num_of_times_limit_hits="30000" # hits
- total_long_handling_limit_ms="400000" # ms
- average_handling_limit_ms="20" # ms
- if (( $(echo "$total_num_of_times > $total_num_of_times_limit_hits" | bc -l) )); then
- echo "Swarm_driver long handling times exceeded threshold: $total_num_of_times hits"
- exit 1
- fi
- if (( $(echo "$total_long_handling > $total_long_handling_limit_ms" | bc -l) )); then
- echo "Swarm_driver total long handling duration exceeded threshold: $total_long_handling ms"
- exit 1
- fi
- if (( $(echo "$average_handling_ms > $average_handling_limit_ms" | bc -l) )); then
- echo "Swarm_driver average long handling time exceeded threshold: $average_handling_ms ms"
- exit 1
- fi
-
- # Write the node memory usage to a file
- echo '[
- {
- "name": "swarm_driver long handling times",
- "value": '$total_num_of_times',
- "unit": "hits"
- },
- {
- "name": "swarm_driver long handling total_time",
- "value": '$total_long_handling',
- "unit": "ms"
- },
- {
- "name": "swarm_driver average long handling time",
- "value": '$average_handling_ms',
- "unit": "ms"
- }
- ]' > swarm_driver_long_handlings.json
-
- - name: check swarm_driver_long_handlings.json
- shell: bash
- run: cat swarm_driver_long_handlings.json
-
- - name: Alert for swarm_driver long handlings
- uses: benchmark-action/github-action-benchmark@v1
- with:
- tool: "customSmallerIsBetter"
- output-file-path: swarm_driver_long_handlings.json
- # Where the previous data file is stored
- external-data-json-path: ./cache/swarm_driver_long_handlings.json
- # Workflow will fail when an alert happens
- fail-on-alert: true
- # GitHub API token to make a commit comment
- github-token: ${{ secrets.GITHUB_TOKEN }}
- # Enable alert commit comment
- comment-on-alert: true
- # Comment on the PR
- comment-always: true
- # 200% regression will result in alert
- alert-threshold: "200%"
- # Enable Job Summary for PRs
- summary-always: true
+ # benchmark-cli:
+ # name: Compare sn_cli benchmarks to main
+ # # right now only ubuntu, running on multiple systems would require many pushes...\
+ # # perhaps this can be done with one consolidation action in the future, pulling down all results and pushing
+ # # once to the branch..
+ # runs-on: ubuntu-latest
+ # steps:
+ # - uses: actions/checkout@v4
+
+ # - uses: dtolnay/rust-toolchain@stable
+ # with:
+ # components: rustfmt, clippy
+
+ # - uses: Swatinem/rust-cache@v2
+ # continue-on-error: true
+
+ # ########################
+ # ### Setup ###
+ # ########################
+ # - run: cargo install cargo-criterion
+
+ # - name: install ripgrep
+ # run: sudo apt-get -y install ripgrep
+
+ # - name: Download 95mb file to be uploaded with the safe client
+ # shell: bash
+ # run: wget https://sn-node.s3.eu-west-2.amazonaws.com/the-test-data.zip
+
+ # # As normal user won't care much about initial client startup,
+ # # but be more alerted on communication speed during transmission.
+ # # Meanwhile the criterion testing code includes the client startup as well,
+ # # it will be better to execute bench test with `local`,
+ # # to make the measurement results reflect speed improvement or regression more accurately.
+ # - name: Build sn bins
+ # run: cargo build --release --bin safe --bin safenode --features local
+ # timeout-minutes: 30
+
+ # - name: Build faucet bin
+ # run: cargo build --release --bin faucet --features local --features gifting --no-default-features
+ # timeout-minutes: 30
+
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # env:
+ # SN_LOG: "all"
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ubuntu-latest
+ # build: true
+
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: echo "The SAFE_PEERS variable has been set to $SAFE_PEERS"
+
+ # #########################
+ # ### Upload large file ###
+ # #########################
+
+ # - name: Fund cli wallet
+ # shell: bash
+ # run: target/release/safe --log-output-dest=data-dir wallet get-faucet 127.0.0.1:8000
+ # env:
+ # SN_LOG: "all"
+
+ # - name: Start a client instance to compare memory usage
+ # shell: bash
+ # run: target/release/safe --log-output-dest=data-dir files upload the-test-data.zip --retry-strategy quick
+ # env:
+ # SN_LOG: "all"
+
+ # - name: Cleanup uploaded_files folder to avoid pollute download benchmark
+ # shell: bash
+ # run: rm -rf $CLIENT_DATA_PATH/uploaded_files
+
+ # ###########################
+ # ### Client Mem Analysis ###
+ # ###########################
+
+ # - name: Check client memory usage
+ # shell: bash
+ # run: |
+ # client_peak_mem_limit_mb="1024" # mb
+ # client_avg_mem_limit_mb="512" # mb
+
+ # peak_mem_usage=$(
+ # rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
+ # awk -F':' '/"memory_used_mb":/{print $2}' |
+ # sort -n |
+ # tail -n 1
+ # )
+ # echo "Peak memory usage: $peak_mem_usage MB"
+ # if (( $(echo "$peak_mem_usage > $client_peak_mem_limit_mb" | bc -l) )); then
+ # echo "Client peak memory usage exceeded threshold: $client_peak_mem_limit_mb MB"
+ # exit 1
+ # fi
+
+ # total_mem=$(
+ # rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
+ # awk -F':' '/"memory_used_mb":/ {sum += $2} END {printf "%.0f\n", sum}'
+ # )
+ # num_of_times=$(
+ # rg "\"memory_used_mb\"" $CLIENT_DATA_PATH/logs --glob safe.* -c --stats |
+ # rg "(\d+) matches" |
+ # rg "\d+" -o
+ # )
+ # echo "num_of_times: $num_of_times"
+ # echo "Total memory is: $total_mem"
+ # average_mem=$(($total_mem/$(($num_of_times))))
+ # echo "Average memory is: $average_mem"
+
+ # if (( $(echo "$average_mem > $client_avg_mem_limit_mb" | bc -l) )); then
+ # echo "Client average memory usage exceeded threshold: $client_avg_mem_limit_mb MB"
+ # exit 1
+ # fi
+ # # Write the client memory usage to a file
+ # echo '[
+ # {
+ # "name": "client-peak-memory-usage-during-upload",
+ # "value": '$peak_mem_usage',
+ # "unit": "MB"
+ # },
+ # {
+ # "name": "client-average-memory-usage-during-upload",
+ # "value": '$average_mem',
+ # "unit": "MB"
+ # }
+ # ]' > client_memory_usage.json
+
+ # - name: check client_memory_usage.json
+ # shell: bash
+ # run: cat client_memory_usage.json
+
+ # - name: Alert for client memory usage
+ # uses: benchmark-action/github-action-benchmark@v1
+ # with:
+ # name: "Memory Usage of Client during uploading large file"
+ # tool: "customSmallerIsBetter"
+ # output-file-path: client_memory_usage.json
+ # # Where the previous data file is stored
+ # external-data-json-path: ./cache/client-mem-usage.json
+ # # Workflow will fail when an alert happens
+ # fail-on-alert: true
+ # # GitHub API token to make a commit comment
+ # github-token: ${{ secrets.GITHUB_TOKEN }}
+ # # Enable alert commit comment
+ # comment-on-alert: true
+ # # 200% regression will result in alert
+ # alert-threshold: "200%"
+ # # Enable Job Summary for PRs
+ # summary-always: true
+
+ # ########################
+ # ### Benchmark ###
+ # ########################
+ # - name: Bench `safe` cli
+ # shell: bash
+ # # Criterion outputs the actual bench results to stderr "2>&1 tee output.txt" takes stderr,
+ # # passes to tee which displays it in the terminal and writes to output.txt
+ # run: |
+ # cargo criterion --features=local --message-format=json 2>&1 -p sn_cli | tee -a output.txt
+ # cat output.txt | rg benchmark-complete | jq -s 'map({
+ # name: (.id | split("/"))[-1],
+ # unit: "MiB/s",
+ # value: ((if .throughput[0].unit == "KiB/s" then (.throughput[0].per_iteration / (1024*1024*1024)) else (.throughput[0].per_iteration / (1024*1024)) end) / (.mean.estimate / 1e9))
+ # })' > files-benchmark.json
+ # timeout-minutes: 15
+
+ # - name: Confirming the number of files uploaded and downloaded during the benchmark test
+ # shell: bash
+ # run: |
+ # ls -l $CLIENT_DATA_PATH
+ # ls -l $CLIENT_DATA_PATH/uploaded_files
+ # ls -l $CLIENT_DATA_PATH/safe_files
+
+ # - name: Store benchmark result
+ # uses: benchmark-action/github-action-benchmark@v1
+ # with:
+ # # What benchmark tool the output.txt came from
+ # tool: "customBiggerIsBetter"
+ # output-file-path: files-benchmark.json
+ # # Where the previous data file is stored
+ # external-data-json-path: ./cache/benchmark-data.json
+ # # Workflow will fail when an alert happens
+ # fail-on-alert: true
+ # # GitHub API token to make a commit comment
+ # github-token: ${{ secrets.GITHUB_TOKEN }}
+ # # Enable alert commit comment
+ # comment-on-alert: true
+ # # 200% regression will result in alert
+ # alert-threshold: "200%"
+ # # Enable Job Summary for PRs
+ # summary-always: true
+
+ # - name: Start a client to carry out download to output the logs
+ # shell: bash
+ # run: target/release/safe --log-output-dest=data-dir files download --retry-strategy quick
+
+ # - name: Start a client to simulate criterion upload
+ # shell: bash
+ # run: |
+ # ls -l target/release
+ # target/release/safe --log-output-dest=data-dir files upload target/release/faucet --retry-strategy quick
+
+ # #########################
+ # ### Stop Network ###
+ # #########################
+
+ # - name: Stop the local network
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # log_file_prefix: safe_test_logs_benchmark
+ # platform: ubuntu-latest
+ # build: true
+
+ # - name: Upload Faucet folder
+ # uses: actions/upload-artifact@main
+ # with:
+ # name: faucet_folder
+ # path: /home/runner/.local/share/safe/test_faucet
+ # continue-on-error: true
+ # if: always()
+
+ # #########################
+ # ### Node Mem Analysis ###
+ # #########################
+
+ # # The large file uploaded will increase node's peak mem usage a lot
+ # - name: Check node memory usage
+ # shell: bash
+ # run: |
+ # node_peak_mem_limit_mb="250" # mb
+ # peak_mem_usage=$(
+ # rg '"memory_used_mb":[^,]*' $NODE_DATA_PATH/*/logs/* -o --no-line-number --no-filename |
+ # awk -F':' '/"memory_used_mb":/{print $2}' |
+ # sort -n |
+ # tail -n 1
+ # )
+
+ # echo "Memory usage: $peak_mem_usage MB"
+ # if (( $(echo "$peak_mem_usage > $node_peak_mem_limit_mb" | bc -l) )); then
+ # echo "Node memory usage exceeded threshold: $peak_mem_usage MB"
+ # exit 1
+ # fi
+ # # Write the node memory usage to a file
+ # echo '[
+ # {
+ # "name": "node-memory-usage-through-safe-benchmark",
+ # "value": '$peak_mem_usage',
+ # "unit": "MB"
+ # }
+ # ]' > node_memory_usage.json
+
+ # - name: check node_memory_usage.json
+ # shell: bash
+ # run: cat node_memory_usage.json
+
+ # - name: Alert for node memory usage
+ # uses: benchmark-action/github-action-benchmark@v1
+ # with:
+ # tool: "customSmallerIsBetter"
+ # output-file-path: node_memory_usage.json
+ # # Where the previous data file is stored
+ # external-data-json-path: ./cache/node-mem-usage.json
+ # # Workflow will fail when an alert happens
+ # fail-on-alert: true
+ # # GitHub API token to make a commit comment
+ # github-token: ${{ secrets.GITHUB_TOKEN }}
+ # # Enable alert commit comment
+ # comment-on-alert: true
+ # # Comment on the PR
+ # comment-always: true
+ # # 200% regression will result in alert
+ # alert-threshold: "200%"
+ # # Enable Job Summary for PRs
+ # summary-always: true
+
+ # ###########################################
+ # ### Swarm_driver handling time Analysis ###
+ # ###########################################
+
+ # - name: Check swarm_driver handling time
+ # shell: bash
+ # run: |
+ # num_of_times=$(
+ # rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
+ # rg "(\d+) matches" |
+ # rg "\d+" -o
+ # )
+ # echo "Number of long cmd handling times: $num_of_times"
+ # total_long_handling_ms=$(
+ # rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
+ # awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
+ # )
+ # echo "Total cmd long handling time is: $total_long_handling_ms ms"
+ # average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
+ # echo "Average cmd long handling time is: $average_handling_ms ms"
+ # total_long_handling=$(($total_long_handling_ms))
+ # total_num_of_times=$(($num_of_times))
+ # num_of_times=$(
+ # rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
+ # rg "(\d+) matches" |
+ # rg "\d+" -o
+ # )
+ # echo "Number of long event handling times: $num_of_times"
+ # total_long_handling_ms=$(
+ # rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
+ # awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
+ # )
+ # echo "Total event long handling time is: $total_long_handling_ms ms"
+ # average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
+ # echo "Average event long handling time is: $average_handling_ms ms"
+ # total_long_handling=$(($total_long_handling_ms+$total_long_handling))
+ # total_num_of_times=$(($num_of_times+$total_num_of_times))
+ # average_handling_ms=$(($total_long_handling/$(($total_num_of_times))))
+ # echo "Total swarm_driver long handling times is: $total_num_of_times"
+ # echo "Total swarm_driver long handling duration is: $total_long_handling ms"
+ # echo "Total average swarm_driver long handling duration is: $average_handling_ms ms"
+ # total_num_of_times_limit_hits="30000" # hits
+ # total_long_handling_limit_ms="400000" # ms
+ # average_handling_limit_ms="20" # ms
+ # if (( $(echo "$total_num_of_times > $total_num_of_times_limit_hits" | bc -l) )); then
+ # echo "Swarm_driver long handling times exceeded threshold: $total_num_of_times hits"
+ # exit 1
+ # fi
+ # if (( $(echo "$total_long_handling > $total_long_handling_limit_ms" | bc -l) )); then
+ # echo "Swarm_driver total long handling duration exceeded threshold: $total_long_handling ms"
+ # exit 1
+ # fi
+ # if (( $(echo "$average_handling_ms > $average_handling_limit_ms" | bc -l) )); then
+ # echo "Swarm_driver average long handling time exceeded threshold: $average_handling_ms ms"
+ # exit 1
+ # fi
+
+ # # Write the node memory usage to a file
+ # echo '[
+ # {
+ # "name": "swarm_driver long handling times",
+ # "value": '$total_num_of_times',
+ # "unit": "hits"
+ # },
+ # {
+ # "name": "swarm_driver long handling total_time",
+ # "value": '$total_long_handling',
+ # "unit": "ms"
+ # },
+ # {
+ # "name": "swarm_driver average long handling time",
+ # "value": '$average_handling_ms',
+ # "unit": "ms"
+ # }
+ # ]' > swarm_driver_long_handlings.json
+
+ # - name: check swarm_driver_long_handlings.json
+ # shell: bash
+ # run: cat swarm_driver_long_handlings.json
+
+ # - name: Alert for swarm_driver long handlings
+ # uses: benchmark-action/github-action-benchmark@v1
+ # with:
+ # tool: "customSmallerIsBetter"
+ # output-file-path: swarm_driver_long_handlings.json
+ # # Where the previous data file is stored
+ # external-data-json-path: ./cache/swarm_driver_long_handlings.json
+ # # Workflow will fail when an alert happens
+ # fail-on-alert: true
+ # # GitHub API token to make a commit comment
+ # github-token: ${{ secrets.GITHUB_TOKEN }}
+ # # Enable alert commit comment
+ # comment-on-alert: true
+ # # Comment on the PR
+ # comment-always: true
+ # # 200% regression will result in alert
+ # alert-threshold: "200%"
+ # # Enable Job Summary for PRs
+ # summary-always: true
benchmark-cash:
name: Compare sn_transfer benchmarks to main
diff --git a/.github/workflows/build-release-artifacts.yml b/.github/workflows/build-release-artifacts.yml
index b30d4e1803..4bbc2f8f7b 100644
--- a/.github/workflows/build-release-artifacts.yml
+++ b/.github/workflows/build-release-artifacts.yml
@@ -17,17 +17,6 @@ on:
description: Set to build a particular tag
type: string
-# The key variables also need to be passed to `cross`, which runs in a container and does not
-# inherit variables from the parent environment. The `cross` tool is used in the `build`
-# job. If any keys are added, the `build-release-artifacts` target in the Justfile must
-# also be updated.
-env:
- GENESIS_PK: ${{ secrets.STABLE_GENESIS_PK }}
- GENESIS_SK: ${{ secrets.STABLE_GENESIS_SK }}
- FOUNDATION_PK: ${{ secrets.STABLE_FOUNDATION_PK }}
- NETWORK_ROYALTIES_PK: ${{ secrets.STABLE_NETWORK_ROYALTIES_PK }}
- PAYMENT_FORWARD_PK: ${{ secrets.STABLE_REWARD_FORWARDING_PK }}
-
jobs:
build:
name: build
diff --git a/.github/workflows/cross-platform.yml b/.github/workflows/cross-platform.yml
index d4d9393008..6beeac321d 100644
--- a/.github/workflows/cross-platform.yml
+++ b/.github/workflows/cross-platform.yml
@@ -16,7 +16,7 @@ jobs:
wasm:
if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: Wasm builds
+ name: wasm32-unknown-unknown builds
runs-on: ubuntu-latest
steps:
@@ -29,10 +29,9 @@ jobs:
- name: Install wasm-pack
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
- - name: Build client for wasm
- # wasm pack doesnt support workspaces
- # --dev to avoid a loong optimisation step
- run: cd sn_client && wasm-pack build --dev
+ - name: Build WASM package
+ # --dev to avoid optimisation
+ run: wasm-pack build --dev --target=web autonomi
timeout-minutes: 30
websocket:
diff --git a/.github/workflows/generate-benchmark-charts.yml b/.github/workflows/generate-benchmark-charts.yml
index cd61f0e165..27a737a7a7 100644
--- a/.github/workflows/generate-benchmark-charts.yml
+++ b/.github/workflows/generate-benchmark-charts.yml
@@ -15,7 +15,7 @@ permissions:
env:
CARGO_INCREMENTAL: "0"
RUST_BACKTRACE: 1
- CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
+ CLIENT_DATA_PATH: /home/runner/.local/share/safe/autonomi
NODE_DATA_PATH: /home/runner/.local/share/safe/node
jobs:
@@ -45,43 +45,29 @@ jobs:
shell: bash
run: wget https://sn-node.s3.eu-west-2.amazonaws.com/the-test-data.zip
- - name: Build node and client
- run: cargo build --release --features local-discovery --bin safenode --bin safe
- timeout-minutes: 30
-
- - name: Build faucet bin
- run: cargo build --release --bin faucet --features local-discovery --features gifting
+ - name: Build node and cli binaries
+ run: cargo build --release --features local --bin safenode --bin autonomi
timeout-minutes: 30
- name: Start a local network
uses: maidsafe/sn-local-testnet-action@main
- env:
- SN_LOG: "all"
with:
action: start
- interval: 2000
+ enable-evm-testnet: true
node-path: target/release/safenode
- faucet-path: target/release/faucet
platform: ubuntu-latest
build: true
-
- - name: Create and fund a wallet to pay for files storage
- run: |
- cargo run --bin faucet --release -- --log-output-dest=data-dir send 1000000 $(cargo run --bin safe --release -- wallet address | tail -n 1) | tail -n 1 > transfer_hex
- cargo run --bin safe --release -- wallet receive --file transfer_hex
- env:
- SN_LOG: "all"
- timeout-minutes: 10
+ sn-log: "all"
########################
### Benchmark ###
########################
- - name: Bench `safe` cli
+ - name: Bench `autonomi` cli
shell: bash
# Criterion outputs the actual bench results to stderr "2>&1 tee output.txt" takes stderr,
# passes to tee which displays it in the terminal and writes to output.txt
run: |
- cargo criterion --features=local-discovery --message-format=json 2>&1 -p sn_cli | tee -a output.txt
+ cargo criterion --features=local --message-format=json 2>&1 -p autonomi | tee -a output.txt
cat output.txt | rg benchmark-complete | jq -s 'map({
name: (.id | split("/"))[-1],
unit: "MiB/s",
@@ -107,9 +93,14 @@ jobs:
auto-push: true
max-items-in-chart: 300
+ # FIXME: do this in a generic way for localtestnets
+ - name: export default secret key
+ run: echo "SECRET_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" >> $GITHUB_ENV
+ shell: bash
+
- name: Start a client instance to compare memory usage
shell: bash
- run: cargo run --bin safe --release -- --log-output-dest=data-dir files upload the-test-data.zip --retry-strategy quick
+ run: cargo run --bin autonomi --release -- --log-output-dest=data-dir file upload the-test-data.zip
env:
SN_LOG: "all"
diff --git a/.github/workflows/memcheck.yml b/.github/workflows/memcheck.yml
index 99f5b93609..55d3790bb5 100644
--- a/.github/workflows/memcheck.yml
+++ b/.github/workflows/memcheck.yml
@@ -17,506 +17,505 @@ env:
RESTART_TEST_NODE_DATA_PATH: /home/runner/.local/share/safe/restart_node
FAUCET_LOG_PATH: /home/runner/.local/share/safe/test_faucet/logs
-jobs:
- memory-check:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Check we're on the right commit
- run: git log -1 --oneline
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
-
- - uses: Swatinem/rust-cache@v2
- continue-on-error: true
-
- - name: install ripgrep
- shell: bash
- run: sudo apt-get install -y ripgrep
-
- - name: Build binaries
- run: cargo build --release --bin safe --bin safenode
- timeout-minutes: 30
-
- - name: Build faucet binary with gifting
- run: cargo build --release --bin faucet --features gifting
- timeout-minutes: 30
-
- - name: Build tests
- run: cargo test --release -p sn_node --test data_with_churn --test verify_routing_table --no-run
- timeout-minutes: 30
-
- - name: Start a node instance that does not undergo churn
- run: |
- mkdir -p $BOOTSTRAP_NODE_DATA_PATH
- ./target/release/safenode --first \
- --root-dir $BOOTSTRAP_NODE_DATA_PATH --log-output-dest $BOOTSTRAP_NODE_DATA_PATH --local --owner=bootstrap &
- sleep 10
- env:
- SN_LOG: "all"
-
- - name: Set SAFE_PEERS
- run: |
- safe_peers=$(rg "Local node is listening .+ on .+" $BOOTSTRAP_NODE_DATA_PATH -u | \
- rg '/ip4.*$' -m1 -o)
- echo $safe_peers
- echo "SAFE_PEERS=$safe_peers" >> $GITHUB_ENV
-
- - name: Check SAFE_PEERS was set
- shell: bash
- run: echo "The SAFE_PEERS variable has been set to $SAFE_PEERS"
-
- - name: Start a node instance to be restarted
- run: |
- mkdir -p $RESTART_TEST_NODE_DATA_PATH
- ./target/release/safenode \
- --root-dir $RESTART_TEST_NODE_DATA_PATH --log-output-dest $RESTART_TEST_NODE_DATA_PATH --local --owner=restart &
- sleep 10
- env:
- SN_LOG: "all"
-
- - name: Start a local network
- env:
- SN_LOG: "all"
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- build: true
- faucet-path: target/release/faucet
- interval: 2000
- join: true
- node-path: target/release/safenode
- owner-prefix: node
- platform: ubuntu-latest
- set-safe-peers: false
-
- # In this case we did *not* want SAFE_PEERS to be set to another value by starting the testnet
- - name: Check SAFE_PEERS was not changed
- shell: bash
- run: echo "The SAFE_PEERS variable has been set to ${SAFE_PEERS}"
-
- - name: Create and fund a wallet to pay for files storage
- run: |
- echo "Obtaining address for use with the faucet..."
- ./target/release/safe --log-output-dest=data-dir wallet create --no-password
- address=$(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1)
- echo "Sending tokens to the faucet at $address"
- ./target/release/faucet --log-output-dest=data-dir send 5000000 $address > initial_balance_from_faucet.txt
- cat initial_balance_from_faucet.txt
- cat initial_balance_from_faucet.txt | tail -n 1 > transfer_hex
- cat transfer_hex
- ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
- env:
- SN_LOG: "all"
- timeout-minutes: 15
-
- - name: Move faucet log to the working folder
- run: |
- echo "SAFE_DATA_PATH has: "
- ls -l $SAFE_DATA_PATH
- echo "test_faucet foder has: "
- ls -l $SAFE_DATA_PATH/test_faucet
- echo "logs folder has: "
- ls -l $SAFE_DATA_PATH/test_faucet/logs
- mv $FAUCET_LOG_PATH/faucet.log ./faucet_log.log
- continue-on-error: true
- if: always()
- timeout-minutes: 1
-
- - name: Download 95mb file to be uploaded with the safe client
- shell: bash
- run: wget https://sn-node.s3.eu-west-2.amazonaws.com/the-test-data.zip
-
- # The resources file we upload may change, and with it mem consumption.
- # Be aware!
- - name: Start a client to upload files
- # -p makes files public
- run: |
- ls -l
- ./target/release/safe --log-output-dest=data-dir files upload "./the-test-data.zip" --retry-strategy quick -p
- env:
- SN_LOG: "all"
- timeout-minutes: 25
-
- # this check needs to be after some transfer activity
- - name: Check we're warned about using default genesis
- run: |
- git log -1 --oneline
- ls -la $RESTART_TEST_NODE_DATA_PATH
- cat $RESTART_TEST_NODE_DATA_PATH/safenode.log
- - name: Check we're warned about using default genesis
- run: |
- git log -1 --oneline
- ls -la $BOOTSTRAP_NODE_DATA_PATH
- cat $BOOTSTRAP_NODE_DATA_PATH/safenode.log
-
- - name: Check we're warned about using default genesis
- run: |
- git log -1 --oneline
- ls -la $NODE_DATA_PATH
- rg "USING DEFAULT" "$NODE_DATA_PATH" -u
- shell: bash
-
- # Uploading same file using different client shall not incur any payment neither uploads
- # Note rg will throw an error directly in case of failed to find a matching pattern.
- - name: Start a different client to upload the same file
- run: |
- pwd
- mv $CLIENT_DATA_PATH $SAFE_DATA_PATH/client_first
- ls -l $SAFE_DATA_PATH
- ls -l $SAFE_DATA_PATH/client_first
- mkdir $SAFE_DATA_PATH/client
- ls -l $SAFE_DATA_PATH
- mv $SAFE_DATA_PATH/client_first/logs $CLIENT_DATA_PATH/logs
- ls -l $CLIENT_DATA_PATH
- cp ./the-test-data.zip ./the-test-data_1.zip
- ./target/release/safe --log-output-dest=data-dir wallet create --no-replace --no-password
- ./target/release/faucet --log-output-dest=data-dir send 5000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) > initial_balance_from_faucet_1.txt
- cat initial_balance_from_faucet_1.txt
- cat initial_balance_from_faucet_1.txt | tail -n 1 > transfer_hex
- cat transfer_hex
- ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
- ./target/release/safe --log-output-dest=data-dir files upload "./the-test-data_1.zip" --retry-strategy quick -p > second_upload.txt
- cat second_upload.txt
- rg "New wallet balance: 5000000.000000000" second_upload.txt -c --stats
- env:
- SN_LOG: "all"
- timeout-minutes: 25
-
- - name: Stop the restart node
- run: kill $( cat $RESTART_TEST_NODE_DATA_PATH/safenode.pid )
-
- - name: Start the restart node again
- run: |
- ./target/release/safenode \
- --root-dir $RESTART_TEST_NODE_DATA_PATH --log-output-dest $RESTART_TEST_NODE_DATA_PATH --local --owner=restarted &
- sleep 10
- env:
- SN_LOG: "all"
-
- - name: Assert we've reloaded some chunks
- run: rg "Existing record loaded" $RESTART_TEST_NODE_DATA_PATH
-
- - name: Chunks data integrity during nodes churn
- run: cargo test --release -p sn_node --test data_with_churn -- --nocapture
- env:
- TEST_DURATION_MINS: 5
- TEST_TOTAL_CHURN_CYCLES: 15
- SN_LOG: "all"
- timeout-minutes: 30
-
- - name: Check current files
- run: ls -la
- - name: Check safenode file
- run: ls /home/runner/work/safe_network/safe_network/target/release
-
- - name: Check there was no restart issues
- run: |
- if rg 'Failed to execute hard-restart command' $NODE_DATA_PATH; then
- echo "Restart issues detected"
- exit 1
- else
- echo "No restart issues detected"
- fi
-
- - name: Verify the routing tables of the nodes
- run: cargo test --release -p sn_node --test verify_routing_table -- --nocapture
- env:
- SLEEP_BEFORE_VERIFICATION: 300
- timeout-minutes: 10
-
- - name: Verify restart of nodes using rg
- shell: bash
- timeout-minutes: 1
- # get the counts, then the specific line, and then the digit count only
- # then check we have an expected level of restarts
- # TODO: make this use an env var, or relate to testnet size
- run: |
- restart_count=$(rg "Node is restarting in" $NODE_DATA_PATH -c --stats | \
- rg "(\d+) matches" | rg "\d+" -o)
- echo "Restart $restart_count nodes"
- peer_removed=$(rg "PeerRemovedFromRoutingTable" $NODE_DATA_PATH -c --stats | \
- rg "(\d+) matches" | rg "\d+" -o)
- echo "PeerRemovedFromRoutingTable $peer_removed times"
- if [ $peer_removed -lt $restart_count ]; then
- echo "PeerRemovedFromRoutingTable times of: $peer_removed is less than the restart count of: $restart_count"
- exit 1
- fi
- node_count=$(ls $NODE_DATA_PATH | wc -l)
- echo "Node dir count is $node_count"
- # TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here
- # if [ $restart_count -lt $node_count ]; then
- # echo "Restart count of: $restart_count is less than the node count of: $node_count"
- # exit 1
- # fi
-
- - name: Verify data replication using rg
- shell: bash
- timeout-minutes: 1
- # get the counts, then the specific line, and then the digit count only
- # then check we have an expected level of replication
- # TODO: make this use an env var, or relate to testnet size
- # As the bootstrap_node using separate folder for logging,
- # hence the folder input to rg needs to cover that as well.
- run: |
- sending_list_count=$(rg "Sending a replication list" $NODE_DATA_PATH -c --stats | \
- rg "(\d+) matches" | rg "\d+" -o)
- echo "Sent $sending_list_count replication lists"
- received_list_count=$(rg "Received replication list from" $NODE_DATA_PATH -c --stats | \
- rg "(\d+) matches" | rg "\d+" -o)
- echo "Received $received_list_count replication lists"
- fetching_attempt_count=$(rg "FetchingKeysForReplication" $NODE_DATA_PATH -c --stats | \
- rg "(\d+) matches" | rg "\d+" -o)
- echo "Carried out $fetching_attempt_count fetching attempts"
- if: always()
-
- - name: Start a client to download files
- run: |
- ./target/release/safe --log-output-dest=data-dir files download --retry-strategy quick
- ls -l $CLIENT_DATA_PATH/safe_files
- downloaded_files=$(ls $CLIENT_DATA_PATH/safe_files | wc -l)
- if [ $downloaded_files -lt 1 ]; then
- echo "Only downloaded $downloaded_files files, less than the 1 file uploaded"
- exit 1
- fi
- env:
- SN_LOG: "all"
- timeout-minutes: 10
-
- # Download the same files again to ensure files won't get corrupted.
- - name: Start a client to download the same files again
- run: |
- ./target/release/safe --log-output-dest=data-dir files download --show-holders --retry-strategy quick
- ls -l $CLIENT_DATA_PATH/safe_files
- downloaded_files=$(ls $CLIENT_DATA_PATH/safe_files | wc -l)
- if [ $downloaded_files -lt 1 ]; then
- echo "Only downloaded $downloaded_files files, less than the 1 file uploaded"
- exit 1
- fi
- file_size1=$(stat -c "%s" ./the-test-data_1.zip)
- file_size2=$(stat -c "%s" $CLIENT_DATA_PATH/safe_files/the-test-data_1.zip)
- if [ $file_size1 != $file_size2 ]; then
- echo "The downloaded file has a different size $file_size2 to the original $file_size1."
- exit 1
- fi
- env:
- SN_LOG: "all"
- timeout-minutes: 10
-
- - name: Audit from genesis to collect entire spend DAG and dump to a dot file
- run: |
- ./target/release/safe --log-output-dest=data-dir wallet audit --dot --sk-str 49113d2083f57a976076adbe85decb75115820de1e6e74b47e0429338cef124a > spend_dag_and_statistics.txt
- echo "=============================================================================="
- cat spend_dag_and_statistics.txt
- env:
- SN_LOG: "all"
- timeout-minutes: 5
- if: always()
-
- - name: Ensure discord_ids decrypted
- run: |
- rg 'node_' ./spend_dag_and_statistics.txt -o
- timeout-minutes: 1
- if: always()
-
- - name: Check nodes running
- shell: bash
- timeout-minutes: 1
- continue-on-error: true
- run: pgrep safenode | wc -l
- if: always()
-
- - name: Wait before verifying reward forwarding
- run: sleep 300
-
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: safe_test_logs_memcheck
- platform: ubuntu-latest
- build: true
-
- - name: Check node memory usage
- shell: bash
- # The resources file and churning chunk_size we upload may change, and with it mem consumption.
- # This is set to a value high enough to allow for some variation depending on
- # resources and node location in the network, but hopefully low enough to catch
- # any wild memory issues
- # Any changes to this value should be carefully considered and tested!
- # As we have a bootstrap node acting as an access point for churning nodes and client,
- # The memory usage here will be significantly higher here than in the benchmark test,
- # where we don't have a bootstrap node.
- run: |
- node_peak_mem_limit_mb="300" # mb
-
- peak_mem_usage=$(
- rg '"memory_used_mb":[^,]*' $NODE_DATA_PATH/*/logs/* -o --no-line-number --no-filename |
- awk -F':' '/"memory_used_mb":/{print $2}' |
- sort -n |
- tail -n 1
- )
- echo "Node memory usage: $peak_mem_usage MB"
-
- if (( $(echo "$peak_mem_usage > $node_peak_mem_limit_mb" | bc -l) )); then
- echo "Node memory usage exceeded threshold: $peak_mem_usage MB"
- exit 1
- fi
- if: always()
-
- - name: Check client memory usage
- shell: bash
- # limits here are lower that benchmark tests as there is less going on.
- run: |
- client_peak_mem_limit_mb="1024" # mb
- client_avg_mem_limit_mb="512" # mb
-
- peak_mem_usage=$(
- rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
- awk -F':' '/"memory_used_mb":/{print $2}' |
- sort -n |
- tail -n 1
- )
- echo "Peak memory usage: $peak_mem_usage MB"
- if (( $(echo "$peak_mem_usage > $client_peak_mem_limit_mb" | bc -l) )); then
- echo "Client peak memory usage exceeded threshold: $client_peak_mem_limit_mb MB"
- exit 1
- fi
-
- total_mem=$(
- rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
- awk -F':' '/"memory_used_mb":/ {sum += $2} END {printf "%.0f\n", sum}'
- )
- num_of_times=$(
- rg "\"memory_used_mb\"" $CLIENT_DATA_PATH/logs --glob safe.* -c --stats |
- rg "(\d+) matches" |
- rg "\d+" -o
- )
- echo "num_of_times: $num_of_times"
- echo "Total memory is: $total_mem"
- average_mem=$(($total_mem/$(($num_of_times))))
- echo "Average memory is: $average_mem"
-
- if (( $(echo "$average_mem > $client_avg_mem_limit_mb" | bc -l) )); then
- echo "Client average memory usage exceeded threshold: $client_avg_mem_limit_mb MB"
- exit 1
- fi
-
- - name: Check node swarm_driver handling statistics
- shell: bash
- # With the latest improvements, swarm_driver will be in high chance
- # has no super long handling (longer than 1s).
- # As the `rg` cmd will fail the shell directly if no entry find,
- # hence not covering it.
- # Be aware that if do need to looking for handlings longer than second, it shall be:
- # rg "SwarmCmd handled in [^m,µ,n]*s:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats
- run: |
- num_of_times=$(
- rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
- rg "(\d+) matches" |
- rg "\d+" -o
- )
- echo "Number of long cmd handling times: $num_of_times"
- total_long_handling_ms=$(
- rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
- awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
- )
- echo "Total cmd long handling time is: $total_long_handling_ms ms"
- average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
- echo "Average cmd long handling time is: $average_handling_ms ms"
- total_long_handling=$(($total_long_handling_ms))
- total_num_of_times=$(($num_of_times))
- num_of_times=$(
- rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
- rg "(\d+) matches" |
- rg "\d+" -o
- )
- echo "Number of long event handling times: $num_of_times"
- total_long_handling_ms=$(
- rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
- awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
- )
- echo "Total event long handling time is: $total_long_handling_ms ms"
- average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
- echo "Average event long handling time is: $average_handling_ms ms"
- total_long_handling=$(($total_long_handling_ms+$total_long_handling))
- total_num_of_times=$(($num_of_times+$total_num_of_times))
- average_handling_ms=$(($total_long_handling/$(($total_num_of_times))))
- echo "Total swarm_driver long handling times is: $total_num_of_times"
- echo "Total swarm_driver long handling duration is: $total_long_handling ms"
- echo "Total average swarm_driver long handling duration is: $average_handling_ms ms"
-
- - name: Verify reward forwarding using rg
- shell: bash
- timeout-minutes: 1
- run: |
- min_reward_forwarding_times="100"
- reward_forwarding_count=$(rg "Reward forwarding completed sending spend" $NODE_DATA_PATH -c --stats | \
- rg "(\d+) matches" | rg "\d+" -o)
- echo "Carried out $reward_forwarding_count reward forwardings"
- if (( $(echo "$reward_forwarding_count < $min_reward_forwarding_times" | bc -l) )); then
- echo "Reward forwarding times below the threshold: $min_reward_forwarding_times"
- exit 1
- fi
- if: always()
-
- - name: Upload payment wallet initialization log
- uses: actions/upload-artifact@main
- with:
- name: payment_wallet_initialization_log
- path: initial_balance_from_faucet.txt
- continue-on-error: true
- if: always()
-
- - name: Move faucet log to the working folder
- run: |
- echo "current folder is:"
- pwd
- echo "SAFE_DATA_PATH has: "
- ls -l $SAFE_DATA_PATH
- echo "test_faucet foder has: "
- ls -l $SAFE_DATA_PATH/test_faucet
- echo "logs folder has: "
- ls -l $SAFE_DATA_PATH/test_faucet/logs
- mv $FAUCET_LOG_PATH/*.log ./faucet_log.log
- env:
- SN_LOG: "all"
- continue-on-error: true
- if: always()
- timeout-minutes: 1
-
- - name: Move bootstrap_node log to the working directory
- run: |
- ls -l $BOOTSTRAP_NODE_DATA_PATH
- mv $BOOTSTRAP_NODE_DATA_PATH/safenode.log ./bootstrap_node.log
- continue-on-error: true
- if: always()
- timeout-minutes: 1
-
- - name: Upload faucet log
- uses: actions/upload-artifact@main
- with:
- name: memory_check_faucet_log
- path: faucet_log.log
- continue-on-error: true
- if: always()
-
- - name: Upload bootstrap_node log
- uses: actions/upload-artifact@main
- with:
- name: memory_check_bootstrap_node_log
- path: bootstrap_node.log
- continue-on-error: true
- if: always()
-
- - name: Upload spend DAG and statistics
- uses: actions/upload-artifact@main
- with:
- name: memory_check_spend_dag_and_statistics
- path: spend_dag_and_statistics.txt
- continue-on-error: true
- if: always()
+# jobs:
+# memory-check:
+# runs-on: ubuntu-latest
+# steps:
+# - name: Checkout code
+# uses: actions/checkout@v4
+
+# - name: Check we're on the right commit
+# run: git log -1 --oneline
+
+# - name: Install Rust
+# uses: dtolnay/rust-toolchain@stable
+
+# - uses: Swatinem/rust-cache@v2
+# continue-on-error: true
+
+# - name: install ripgrep
+# shell: bash
+# run: sudo apt-get install -y ripgrep
+
+# - name: Build binaries
+# run: cargo build --release --bin safe --bin safenode
+# timeout-minutes: 30
+
+# - name: Build faucet binary with gifting
+# run: cargo build --release --bin faucet --features gifting
+# timeout-minutes: 30
+
+# - name: Build tests
+# run: cargo test --release -p sn_node --test data_with_churn --test verify_routing_table --no-run
+# timeout-minutes: 30
+
+# - name: Start a node instance that does not undergo churn
+# run: |
+# mkdir -p $BOOTSTRAP_NODE_DATA_PATH
+# ./target/release/safenode --first \
+# --root-dir $BOOTSTRAP_NODE_DATA_PATH --log-output-dest $BOOTSTRAP_NODE_DATA_PATH --local --owner=bootstrap &
+# sleep 10
+# env:
+# SN_LOG: "all"
+
+# - name: Set SAFE_PEERS
+# run: |
+# safe_peers=$(rg "Local node is listening .+ on \".+\"" $BOOTSTRAP_NODE_DATA_PATH -u | \
+# rg '/ip4.*$' -m1 -o | rg '"' -r '')
+# echo "SAFE_PEERS=$safe_peers" >> $GITHUB_ENV
+
+# - name: Check SAFE_PEERS was set
+# shell: bash
+# run: echo "The SAFE_PEERS variable has been set to $SAFE_PEERS"
+
+# - name: Start a node instance to be restarted
+# run: |
+# mkdir -p $RESTART_TEST_NODE_DATA_PATH
+# ./target/release/safenode \
+# --root-dir $RESTART_TEST_NODE_DATA_PATH --log-output-dest $RESTART_TEST_NODE_DATA_PATH --local --owner=restart &
+# sleep 10
+# env:
+# SN_LOG: "all"
+
+# - name: Start a local network
+# env:
+# SN_LOG: "all"
+# uses: maidsafe/sn-local-testnet-action@main
+# with:
+# action: start
+# build: true
+# faucet-path: target/release/faucet
+# interval: 2000
+# join: true
+# node-path: target/release/safenode
+# owner-prefix: node
+# platform: ubuntu-latest
+# set-safe-peers: false
+
+# # In this case we did *not* want SAFE_PEERS to be set to another value by starting the testnet
+# - name: Check SAFE_PEERS was not changed
+# shell: bash
+# run: echo "The SAFE_PEERS variable has been set to ${SAFE_PEERS}"
+
+# - name: Create and fund a wallet to pay for files storage
+# run: |
+# echo "Obtaining address for use with the faucet..."
+# ./target/release/safe --log-output-dest=data-dir wallet create --no-password
+# address=$(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1)
+# echo "Sending tokens to the faucet at $address"
+# ./target/release/faucet --log-output-dest=data-dir send 5000000 $address > initial_balance_from_faucet.txt
+# cat initial_balance_from_faucet.txt
+# cat initial_balance_from_faucet.txt | tail -n 1 > transfer_hex
+# cat transfer_hex
+# ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
+# env:
+# SN_LOG: "all"
+# timeout-minutes: 15
+
+# - name: Move faucet log to the working folder
+# run: |
+# echo "SAFE_DATA_PATH has: "
+# ls -l $SAFE_DATA_PATH
+# echo "test_faucet foder has: "
+# ls -l $SAFE_DATA_PATH/test_faucet
+# echo "logs folder has: "
+# ls -l $SAFE_DATA_PATH/test_faucet/logs
+# mv $FAUCET_LOG_PATH/faucet.log ./faucet_log.log
+# continue-on-error: true
+# if: always()
+# timeout-minutes: 1
+
+# - name: Download 95mb file to be uploaded with the safe client
+# shell: bash
+# run: wget https://sn-node.s3.eu-west-2.amazonaws.com/the-test-data.zip
+
+# # The resources file we upload may change, and with it mem consumption.
+# # Be aware!
+# - name: Start a client to upload files
+# # -p makes files public
+# run: |
+# ls -l
+# ./target/release/safe --log-output-dest=data-dir files upload "./the-test-data.zip" --retry-strategy quick -p
+# env:
+# SN_LOG: "all"
+# timeout-minutes: 25
+
+# # this check needs to be after some transfer activity
+# - name: Check we're warned about using default genesis
+# run: |
+# git log -1 --oneline
+# ls -la $RESTART_TEST_NODE_DATA_PATH
+# cat $RESTART_TEST_NODE_DATA_PATH/safenode.log
+# - name: Check we're warned about using default genesis
+# run: |
+# git log -1 --oneline
+# ls -la $BOOTSTRAP_NODE_DATA_PATH
+# cat $BOOTSTRAP_NODE_DATA_PATH/safenode.log
+
+# - name: Check we're warned about using default genesis
+# run: |
+# git log -1 --oneline
+# ls -la $NODE_DATA_PATH
+# rg "USING DEFAULT" "$NODE_DATA_PATH" -u
+# shell: bash
+
+# # Uploading same file using different client shall not incur any payment neither uploads
+# # Note rg will throw an error directly in case of failed to find a matching pattern.
+# - name: Start a different client to upload the same file
+# run: |
+# pwd
+# mv $CLIENT_DATA_PATH $SAFE_DATA_PATH/client_first
+# ls -l $SAFE_DATA_PATH
+# ls -l $SAFE_DATA_PATH/client_first
+# mkdir $SAFE_DATA_PATH/client
+# ls -l $SAFE_DATA_PATH
+# mv $SAFE_DATA_PATH/client_first/logs $CLIENT_DATA_PATH/logs
+# ls -l $CLIENT_DATA_PATH
+# cp ./the-test-data.zip ./the-test-data_1.zip
+# ./target/release/safe --log-output-dest=data-dir wallet create --no-replace --no-password
+# ./target/release/faucet --log-output-dest=data-dir send 5000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) > initial_balance_from_faucet_1.txt
+# cat initial_balance_from_faucet_1.txt
+# cat initial_balance_from_faucet_1.txt | tail -n 1 > transfer_hex
+# cat transfer_hex
+# ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
+# ./target/release/safe --log-output-dest=data-dir files upload "./the-test-data_1.zip" --retry-strategy quick -p > second_upload.txt
+# cat second_upload.txt
+# rg "New wallet balance: 5000000.000000000" second_upload.txt -c --stats
+# env:
+# SN_LOG: "all"
+# timeout-minutes: 25
+
+# - name: Stop the restart node
+# run: kill $( cat $RESTART_TEST_NODE_DATA_PATH/safenode.pid )
+
+# - name: Start the restart node again
+# run: |
+# ./target/release/safenode \
+# --root-dir $RESTART_TEST_NODE_DATA_PATH --log-output-dest $RESTART_TEST_NODE_DATA_PATH --local --owner=restarted &
+# sleep 10
+# env:
+# SN_LOG: "all"
+
+# - name: Assert we've reloaded some chunks
+# run: rg "Existing record loaded" $RESTART_TEST_NODE_DATA_PATH
+
+# - name: Chunks data integrity during nodes churn
+# run: cargo test --release -p sn_node --test data_with_churn -- --nocapture
+# env:
+# TEST_DURATION_MINS: 5
+# TEST_TOTAL_CHURN_CYCLES: 15
+# SN_LOG: "all"
+# timeout-minutes: 30
+
+# - name: Check current files
+# run: ls -la
+# - name: Check safenode file
+# run: ls /home/runner/work/safe_network/safe_network/target/release
+
+# - name: Check there was no restart issues
+# run: |
+# if rg 'Failed to execute hard-restart command' $NODE_DATA_PATH; then
+# echo "Restart issues detected"
+# exit 1
+# else
+# echo "No restart issues detected"
+# fi
+
+# - name: Verify the routing tables of the nodes
+# run: cargo test --release -p sn_node --test verify_routing_table -- --nocapture
+# env:
+# SLEEP_BEFORE_VERIFICATION: 300
+# timeout-minutes: 10
+
+# - name: Verify restart of nodes using rg
+# shell: bash
+# timeout-minutes: 1
+# # get the counts, then the specific line, and then the digit count only
+# # then check we have an expected level of restarts
+# # TODO: make this use an env var, or relate to testnet size
+# run: |
+# restart_count=$(rg "Node is restarting in" $NODE_DATA_PATH -c --stats | \
+# rg "(\d+) matches" | rg "\d+" -o)
+# echo "Restart $restart_count nodes"
+# peer_removed=$(rg "PeerRemovedFromRoutingTable" $NODE_DATA_PATH -c --stats | \
+# rg "(\d+) matches" | rg "\d+" -o)
+# echo "PeerRemovedFromRoutingTable $peer_removed times"
+# if [ $peer_removed -lt $restart_count ]; then
+# echo "PeerRemovedFromRoutingTable times of: $peer_removed is less than the restart count of: $restart_count"
+# exit 1
+# fi
+# node_count=$(ls $NODE_DATA_PATH | wc -l)
+# echo "Node dir count is $node_count"
+# # TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here
+# # if [ $restart_count -lt $node_count ]; then
+# # echo "Restart count of: $restart_count is less than the node count of: $node_count"
+# # exit 1
+# # fi
+
+# - name: Verify data replication using rg
+# shell: bash
+# timeout-minutes: 1
+# # get the counts, then the specific line, and then the digit count only
+# # then check we have an expected level of replication
+# # TODO: make this use an env var, or relate to testnet size
+# # As the bootstrap_node using separate folder for logging,
+# # hence the folder input to rg needs to cover that as well.
+# run: |
+# sending_list_count=$(rg "Sending a replication list" $NODE_DATA_PATH -c --stats | \
+# rg "(\d+) matches" | rg "\d+" -o)
+# echo "Sent $sending_list_count replication lists"
+# received_list_count=$(rg "Received replication list from" $NODE_DATA_PATH -c --stats | \
+# rg "(\d+) matches" | rg "\d+" -o)
+# echo "Received $received_list_count replication lists"
+# fetching_attempt_count=$(rg "FetchingKeysForReplication" $NODE_DATA_PATH -c --stats | \
+# rg "(\d+) matches" | rg "\d+" -o)
+# echo "Carried out $fetching_attempt_count fetching attempts"
+# if: always()
+
+# - name: Start a client to download files
+# run: |
+# ./target/release/safe --log-output-dest=data-dir files download --retry-strategy quick
+# ls -l $CLIENT_DATA_PATH/safe_files
+# downloaded_files=$(ls $CLIENT_DATA_PATH/safe_files | wc -l)
+# if [ $downloaded_files -lt 1 ]; then
+# echo "Only downloaded $downloaded_files files, less than the 1 file uploaded"
+# exit 1
+# fi
+# env:
+# SN_LOG: "all"
+# timeout-minutes: 10
+
+# # Download the same files again to ensure files won't get corrupted.
+# - name: Start a client to download the same files again
+# run: |
+# ./target/release/safe --log-output-dest=data-dir files download --show-holders --retry-strategy quick
+# ls -l $CLIENT_DATA_PATH/safe_files
+# downloaded_files=$(ls $CLIENT_DATA_PATH/safe_files | wc -l)
+# if [ $downloaded_files -lt 1 ]; then
+# echo "Only downloaded $downloaded_files files, less than the 1 file uploaded"
+# exit 1
+# fi
+# file_size1=$(stat -c "%s" ./the-test-data_1.zip)
+# file_size2=$(stat -c "%s" $CLIENT_DATA_PATH/safe_files/the-test-data_1.zip)
+# if [ $file_size1 != $file_size2 ]; then
+# echo "The downloaded file has a different size $file_size2 to the original $file_size1."
+# exit 1
+# fi
+# env:
+# SN_LOG: "all"
+# timeout-minutes: 10
+
+# - name: Audit from genesis to collect entire spend DAG and dump to a dot file
+# run: |
+# ./target/release/safe --log-output-dest=data-dir wallet audit --dot --sk-str 49113d2083f57a976076adbe85decb75115820de1e6e74b47e0429338cef124a > spend_dag_and_statistics.txt
+# echo "=============================================================================="
+# cat spend_dag_and_statistics.txt
+# env:
+# SN_LOG: "all"
+# timeout-minutes: 5
+# if: always()
+
+# - name: Ensure discord_ids decrypted
+# run: |
+# rg 'node_' ./spend_dag_and_statistics.txt -o
+# timeout-minutes: 1
+# if: always()
+
+# - name: Check nodes running
+# shell: bash
+# timeout-minutes: 1
+# continue-on-error: true
+# run: pgrep safenode | wc -l
+# if: always()
+
+# - name: Wait before verifying reward forwarding
+# run: sleep 300
+
+# - name: Stop the local network and upload logs
+# if: always()
+# uses: maidsafe/sn-local-testnet-action@main
+# with:
+# action: stop
+# log_file_prefix: safe_test_logs_memcheck
+# platform: ubuntu-latest
+# build: true
+
+# - name: Check node memory usage
+# shell: bash
+# # The resources file and churning chunk_size we upload may change, and with it mem consumption.
+# # This is set to a value high enough to allow for some variation depending on
+# # resources and node location in the network, but hopefully low enough to catch
+# # any wild memory issues
+# # Any changes to this value should be carefully considered and tested!
+# # As we have a bootstrap node acting as an access point for churning nodes and client,
+# # The memory usage here will be significantly higher here than in the benchmark test,
+# # where we don't have a bootstrap node.
+# run: |
+# node_peak_mem_limit_mb="300" # mb
+
+# peak_mem_usage=$(
+# rg '"memory_used_mb":[^,]*' $NODE_DATA_PATH/*/logs/* -o --no-line-number --no-filename |
+# awk -F':' '/"memory_used_mb":/{print $2}' |
+# sort -n |
+# tail -n 1
+# )
+# echo "Node memory usage: $peak_mem_usage MB"
+
+# if (( $(echo "$peak_mem_usage > $node_peak_mem_limit_mb" | bc -l) )); then
+# echo "Node memory usage exceeded threshold: $peak_mem_usage MB"
+# exit 1
+# fi
+# if: always()
+
+# - name: Check client memory usage
+# shell: bash
+# # limits here are lower that benchmark tests as there is less going on.
+# run: |
+# client_peak_mem_limit_mb="1024" # mb
+# client_avg_mem_limit_mb="512" # mb
+
+# peak_mem_usage=$(
+# rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
+# awk -F':' '/"memory_used_mb":/{print $2}' |
+# sort -n |
+# tail -n 1
+# )
+# echo "Peak memory usage: $peak_mem_usage MB"
+# if (( $(echo "$peak_mem_usage > $client_peak_mem_limit_mb" | bc -l) )); then
+# echo "Client peak memory usage exceeded threshold: $client_peak_mem_limit_mb MB"
+# exit 1
+# fi
+
+# total_mem=$(
+# rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
+# awk -F':' '/"memory_used_mb":/ {sum += $2} END {printf "%.0f\n", sum}'
+# )
+# num_of_times=$(
+# rg "\"memory_used_mb\"" $CLIENT_DATA_PATH/logs --glob safe.* -c --stats |
+# rg "(\d+) matches" |
+# rg "\d+" -o
+# )
+# echo "num_of_times: $num_of_times"
+# echo "Total memory is: $total_mem"
+# average_mem=$(($total_mem/$(($num_of_times))))
+# echo "Average memory is: $average_mem"
+
+# if (( $(echo "$average_mem > $client_avg_mem_limit_mb" | bc -l) )); then
+# echo "Client average memory usage exceeded threshold: $client_avg_mem_limit_mb MB"
+# exit 1
+# fi
+
+# - name: Check node swarm_driver handling statistics
+# shell: bash
+# # With the latest improvements, swarm_driver will be in high chance
+# # has no super long handling (longer than 1s).
+# # As the `rg` cmd will fail the shell directly if no entry find,
+# # hence not covering it.
+# # Be aware that if do need to looking for handlings longer than second, it shall be:
+# # rg "SwarmCmd handled in [^m,µ,n]*s:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats
+# run: |
+# num_of_times=$(
+# rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
+# rg "(\d+) matches" |
+# rg "\d+" -o
+# )
+# echo "Number of long cmd handling times: $num_of_times"
+# total_long_handling_ms=$(
+# rg "SwarmCmd handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
+# awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
+# )
+# echo "Total cmd long handling time is: $total_long_handling_ms ms"
+# average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
+# echo "Average cmd long handling time is: $average_handling_ms ms"
+# total_long_handling=$(($total_long_handling_ms))
+# total_num_of_times=$(($num_of_times))
+# num_of_times=$(
+# rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -c --stats |
+# rg "(\d+) matches" |
+# rg "\d+" -o
+# )
+# echo "Number of long event handling times: $num_of_times"
+# total_long_handling_ms=$(
+# rg "SwarmEvent handled in [0-9.]+ms:" $NODE_DATA_PATH/*/logs/* --glob safe.* -o --no-line-number --no-filename |
+# awk -F' |ms:' '{sum += $4} END {printf "%.0f\n", sum}'
+# )
+# echo "Total event long handling time is: $total_long_handling_ms ms"
+# average_handling_ms=$(($total_long_handling_ms/$(($num_of_times))))
+# echo "Average event long handling time is: $average_handling_ms ms"
+# total_long_handling=$(($total_long_handling_ms+$total_long_handling))
+# total_num_of_times=$(($num_of_times+$total_num_of_times))
+# average_handling_ms=$(($total_long_handling/$(($total_num_of_times))))
+# echo "Total swarm_driver long handling times is: $total_num_of_times"
+# echo "Total swarm_driver long handling duration is: $total_long_handling ms"
+# echo "Total average swarm_driver long handling duration is: $average_handling_ms ms"
+
+# - name: Verify reward forwarding using rg
+# shell: bash
+# timeout-minutes: 1
+# run: |
+# min_reward_forwarding_times="100"
+# reward_forwarding_count=$(rg "Reward forwarding completed sending spend" $NODE_DATA_PATH -c --stats | \
+# rg "(\d+) matches" | rg "\d+" -o)
+# echo "Carried out $reward_forwarding_count reward forwardings"
+# if (( $(echo "$reward_forwarding_count < $min_reward_forwarding_times" | bc -l) )); then
+# echo "Reward forwarding times below the threshold: $min_reward_forwarding_times"
+# exit 1
+# fi
+# if: always()
+
+# - name: Upload payment wallet initialization log
+# uses: actions/upload-artifact@main
+# with:
+# name: payment_wallet_initialization_log
+# path: initial_balance_from_faucet.txt
+# continue-on-error: true
+# if: always()
+
+# - name: Move faucet log to the working folder
+# run: |
+# echo "current folder is:"
+# pwd
+# echo "SAFE_DATA_PATH has: "
+# ls -l $SAFE_DATA_PATH
+# echo "test_faucet foder has: "
+# ls -l $SAFE_DATA_PATH/test_faucet
+# echo "logs folder has: "
+# ls -l $SAFE_DATA_PATH/test_faucet/logs
+# mv $FAUCET_LOG_PATH/*.log ./faucet_log.log
+# env:
+# SN_LOG: "all"
+# continue-on-error: true
+# if: always()
+# timeout-minutes: 1
+
+# - name: Move bootstrap_node log to the working directory
+# run: |
+# ls -l $BOOTSTRAP_NODE_DATA_PATH
+# mv $BOOTSTRAP_NODE_DATA_PATH/safenode.log ./bootstrap_node.log
+# continue-on-error: true
+# if: always()
+# timeout-minutes: 1
+
+# - name: Upload faucet log
+# uses: actions/upload-artifact@main
+# with:
+# name: memory_check_faucet_log
+# path: faucet_log.log
+# continue-on-error: true
+# if: always()
+
+# - name: Upload bootstrap_node log
+# uses: actions/upload-artifact@main
+# with:
+# name: memory_check_bootstrap_node_log
+# path: bootstrap_node.log
+# continue-on-error: true
+# if: always()
+
+# - name: Upload spend DAG and statistics
+# uses: actions/upload-artifact@main
+# with:
+# name: memory_check_spend_dag_and_statistics
+# path: spend_dag_and_statistics.txt
+# continue-on-error: true
+# if: always()
diff --git a/.github/workflows/merge.yml b/.github/workflows/merge.yml
index b95a0a3488..98ee999b06 100644
--- a/.github/workflows/merge.yml
+++ b/.github/workflows/merge.yml
@@ -5,7 +5,7 @@ on:
# on main, we want to know that all commits are passing at a glance, any deviation should help bisecting errors
# the merge run checks should show on master and enable this clear test/passing history
merge_group:
- branches: [main, alpha*, beta*, rc*]
+ branches: [main, evm-dev]
pull_request:
branches: ["*"]
@@ -73,9 +73,9 @@ jobs:
# See https://doc.rust-lang.org/rustdoc/lints.html for lints that are 'warning' by default.
run: RUSTDOCFLAGS="--deny=warnings" cargo doc --no-deps
- - name: Check local-discovery is not a default feature
+ - name: Check local is not a default feature
shell: bash
- run: if [[ ! $(cargo metadata --no-deps --format-version 1 | jq -r '.packages[].features.default[]? | select(. == "local-discovery")') ]]; then echo "local-discovery is not a default feature in any package."; else echo "local-discovery is a default feature in at least one package." && exit 1; fi
+ run: if [[ ! $(cargo metadata --no-deps --format-version 1 | jq -r '.packages[].features.default[]? | select(. == "local")') ]]; then echo "local is not a default feature in any package."; else echo "local is a default feature in at least one package." && exit 1; fi
- name: Clean out the target directory
run: cargo clean
@@ -110,24 +110,13 @@ jobs:
- uses: Swatinem/rust-cache@v2
- - name: Run CLI tests
- timeout-minutes: 25
- run: cargo test --release --package sn_cli -- --skip test_acc_packet_
-
- # We do not run client `--tests` here as they can require a network
- - name: Run client tests
- timeout-minutes: 25
- run: |
- cargo test --release --package sn_client --lib
- cargo test --release --package sn_client --doc
-
- name: Run node tests
timeout-minutes: 25
run: cargo test --release --package sn_node --lib
- name: Run network tests
timeout-minutes: 25
- run: cargo test --release --package sn_networking
+ run: cargo test --release --package sn_networking --features="open-metrics"
- name: Run protocol tests
timeout-minutes: 25
@@ -162,7 +151,7 @@ jobs:
- os: windows-latest
safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
- os: macos-latest
- safe_path: /Users/runner/Library/Application Support/safe
+ safe_path: /Users/runner/Library/Application\ Support/safe
steps:
- uses: actions/checkout@v4
@@ -171,166 +160,182 @@ jobs:
- uses: Swatinem/rust-cache@v2
- name: Build binaries
- run: cargo build --release --bin safenode --bin safe
- timeout-minutes: 30
-
- - name: Build faucet binary
- run: cargo build --release --bin faucet --features gifting
+ run: cargo build --release --features local --bin safenode --bin autonomi
timeout-minutes: 30
- name: Start a local network
uses: maidsafe/sn-local-testnet-action@main
with:
action: start
- interval: 2000
+ enable-evm-testnet: true
node-path: target/release/safenode
- faucet-path: target/release/faucet
platform: ${{ matrix.os }}
build: true
- - name: Check SAFE_PEERS was set
+ - name: Check if SAFE_PEERS and EVM_NETWORK are set
shell: bash
run: |
if [[ -z "$SAFE_PEERS" ]]; then
echo "The SAFE_PEERS variable has not been set"
exit 1
+ elif [[ -z "$EVM_NETWORK" ]]; then
+ echo "The EVM_NETWORK variable has not been set"
+ exit 1
else
echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ echo "EVM_NETWORK has been set to $EVM_NETWORK"
fi
# only these unit tests require a network, the rest are run above
- - name: Run sn_client --tests
- run: cargo test --package sn_client --release --tests
+ - name: Run autonomi --tests
+ run: cargo test --package autonomi --tests -- --nocapture
env:
- SN_LOG: "all"
+ SN_LOG: "v"
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 15
- - name: Create and fund a wallet to pay for files storage
- run: |
- ./target/release/safe --log-output-dest=data-dir wallet create --no-password
- ./target/release/faucet --log-output-dest=data-dir send 1000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
- ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # FIXME: do this in a generic way for localtestnets
+ - name: export default secret key
+ if: matrix.os != 'windows-latest'
+ run: echo "SECRET_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" >> $GITHUB_ENV
+ shell: bash
+ - name: Set secret key for Windows
+ if: matrix.os == 'windows-latest'
+ run: echo "SECRET_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+ shell: pwsh
- - name: Start a client to upload cost estimate
- run: ./target/release/safe --log-output-dest=data-dir files estimate "./resources"
+ - name: Get file cost
+ run: ./target/release/autonomi --log-output-dest=data-dir file cost "./resources"
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 15
- - name: Start a client to upload files
- run: ./target/release/safe --log-output-dest=data-dir files upload "./resources" --retry-strategy quick
+ - name: File upload
+ run: ./target/release/autonomi --log-output-dest=data-dir file upload "./resources" > ./upload_output 2>&1
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 15
- - name: Start a client to download files
- run: ./target/release/safe --log-output-dest=data-dir files download --retry-strategy quick
+ - name: parse address (unix)
+ if: matrix.os != 'windows-latest'
+ run: |
+ UPLOAD_ADDRESS=$(rg "At address: ([0-9a-f]*)" -o -r '$1' ./upload_output)
+ echo "UPLOAD_ADDRESS=$UPLOAD_ADDRESS" >> $GITHUB_ENV
+ shell: bash
+
+ - name: parse address (win)
+ if: matrix.os == 'windows-latest'
+ run: |
+ $UPLOAD_ADDRESS = rg "At address: ([0-9a-f]*)" -o -r '$1' ./upload_output
+ echo "UPLOAD_ADDRESS=$UPLOAD_ADDRESS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+ shell: pwsh
+
+ - name: File Download
+ run: ./target/release/autonomi --log-output-dest=data-dir file download ${{ env.UPLOAD_ADDRESS }} ./downloaded_resources
env:
- SN_LOG: "all"
- timeout-minutes: 2
+ SN_LOG: "v"
+ timeout-minutes: 5
+
+ - name: Generate register signing key
+ run: ./target/release/autonomi --log-output-dest=data-dir register generate-key
- # Client FoldersApi tests against local network
- - name: Client FoldersApi tests against local network
- run: cargo test --release --package sn_client --test folders_api
+ - name: Create register (writeable by owner)
+ run: ./target/release/autonomi --log-output-dest=data-dir register create baobao 123 > ./register_create_output 2>&1
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 10
- # CLI Acc-Packet files and folders tests against local network
- - name: CLI Acc-Packet files and folders tests
- run: cargo test --release -p sn_cli test_acc_packet -- --nocapture
+ - name: parse register address (unix)
+ if: matrix.os != 'windows-latest'
+ run: |
+ REGISTER_ADDRESS=$(rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_create_output)
+ echo "REGISTER_ADDRESS=$REGISTER_ADDRESS" >> $GITHUB_ENV
+ shell: bash
+
+ - name: parse register address (win)
+ if: matrix.os == 'windows-latest'
+ run: |
+ $REGISTER_ADDRESS = rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_create_output
+ echo "REGISTER_ADDRESS=$REGISTER_ADDRESS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+ shell: pwsh
+
+ - name: Get register
+ run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.REGISTER_ADDRESS }}
env:
- SN_LOG: "all"
- timeout-minutes: 10
+ SN_LOG: "v"
+ timeout-minutes: 5
- - name: Start a client to create a register writable by the owner only
- run: ./target/release/safe --log-output-dest=data-dir register create -n baobao
+ - name: Edit register
+ run: ./target/release/autonomi --log-output-dest=data-dir register edit ${{ env.REGISTER_ADDRESS }} 456
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 10
- - name: Start a client to get a register writable by the owner only
- run: ./target/release/safe --log-output-dest=data-dir register get -n baobao
+ - name: Get register (after edit)
+ run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.REGISTER_ADDRESS }}
env:
- SN_LOG: "all"
- timeout-minutes: 2
+ SN_LOG: "v"
+ timeout-minutes: 5
- - name: Start a client to edit a register writable by the owner only
- run: ./target/release/safe --log-output-dest=data-dir register edit -n baobao wood
+ - name: Create Public Register (writeable by anyone)
+ run: ./target/release/autonomi --log-output-dest=data-dir register create bao 111 --public > ./register_public_create_output 2>&1
env:
- SN_LOG: "all"
- timeout-minutes: 10
- #
- # Next two steps are same with a slight difference in the way they write to the output file (GITHUB_OUTPUT vs ENV:GITHUB_OUTPUT)
- #
- - name: Start a client to create a register writable by anyone
- id: register-address
+ SN_LOG: "v"
+ timeout-minutes: 5
+
+ - name: parse public register address (unix)
if: matrix.os != 'windows-latest'
- run: echo "$(./target/release/safe --log-output-dest=data-dir register create -n trycatch -p | rg REGISTER_ADDRESS )" >> $GITHUB_OUTPUT
- env:
- SN_LOG: "all"
- timeout-minutes: 10
+ run: |
+ PUBLIC_REGISTER_ADDRESS=$(rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_public_create_output)
+ echo "PUBLIC_REGISTER_ADDRESS=$PUBLIC_REGISTER_ADDRESS" >> $GITHUB_ENV
+ shell: bash
- - name: Start a client to create a register writable by anyone
- id: register-address-windows
+ - name: parse public register address (win)
if: matrix.os == 'windows-latest'
- run: echo "$(./target/release/safe --log-output-dest=data-dir register create -n trycatch -p | rg REGISTER_ADDRESS )" >> $ENV:GITHUB_OUTPUT
- env:
- SN_LOG: "all"
- timeout-minutes: 10
+ run: |
+ $PUBLIC_REGISTER_ADDRESS = rg "Register created at address: ([0-9a-f]*)" -o -r '$1' ./register_public_create_output
+ echo "PUBLIC_REGISTER_ADDRESS=$PUBLIC_REGISTER_ADDRESS" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+ shell: pwsh
- - name: Start a client to get a register writable by anyone (current client is the owner)
- run: ./target/release/safe --log-output-dest=data-dir register get -n trycatch
+ - name: Get Public Register (current key is the owner)
+ run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.PUBLIC_REGISTER_ADDRESS }}
env:
- SN_LOG: "all"
- timeout-minutes: 2
+ SN_LOG: "v"
+ timeout-minutes: 5
- - name: Start a client to edit a register writable by anyone (current client is the owner)
- run: ./target/release/safe --log-output-dest=data-dir register edit -n trycatch wood
+ - name: Edit Public Register (current key is the owner)
+ run: ./target/release/autonomi --log-output-dest=data-dir register edit ${{ env.PUBLIC_REGISTER_ADDRESS }} 222
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 10
- - name: Delete client subdir to generate new client
+ - name: Delete current register signing key
shell: bash
run: rm -rf ${{ matrix.safe_path }}/client
- #
- # Next four steps are same with a slight difference in the which output step they read from
- #
- - name: Start a client to get a register writable by anyone (new client is not the owner)
- if: matrix.os != 'windows-latest'
- run: ./target/release/safe --log-output-dest=data-dir register get ${{ steps.register-address.outputs.REGISTER_ADDRESS }}
+
+ - name: Generate new register signing key
+ run: ./target/release/autonomi --log-output-dest=data-dir register generate-key
+
+ - name: Get Public Register (new signing key is not the owner)
+ run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.PUBLIC_REGISTER_ADDRESS }}
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 2
- - name: Start a client to edit a register writable by anyone (new client is not the owner)
- if: matrix.os != 'windows-latest'
- run: ./target/release/safe --log-output-dest=data-dir register edit ${{ steps.register-address.outputs.REGISTER_ADDRESS }} water
+ - name: Edit Public Register (new signing key is not the owner)
+ run: ./target/release/autonomi --log-output-dest=data-dir register edit ${{ env.PUBLIC_REGISTER_ADDRESS }} 333
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 10
- - name: Start a client to get a register writable by anyone (new client is not the owner)
- if: matrix.os == 'windows-latest'
- run: ./target/release/safe --log-output-dest=data-dir register get ${{ steps.register-address-windows.outputs.REGISTER_ADDRESS }}
+ - name: Get Public Register (new signing key is not the owner)
+ run: ./target/release/autonomi --log-output-dest=data-dir register get ${{ env.PUBLIC_REGISTER_ADDRESS }}
env:
- SN_LOG: "all"
+ SN_LOG: "v"
timeout-minutes: 2
- - name: Start a client to edit a register writable by anyone (new client is not the owner)
- if: matrix.os == 'windows-latest'
- run: ./target/release/safe --log-output-dest=data-dir register edit ${{ steps.register-address-windows.outputs.REGISTER_ADDRESS }} water
- env:
- SN_LOG: "all"
- timeout-minutes: 10
-
- name: Stop the local network and upload logs
if: always()
uses: maidsafe/sn-local-testnet-action@main
@@ -339,84 +344,84 @@ jobs:
log_file_prefix: safe_test_logs_e2e
platform: ${{ matrix.os }}
- spend_test:
- if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: spend tests against network
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os: [ubuntu-latest, windows-latest, macos-latest]
- steps:
- - uses: actions/checkout@v4
+ # spend_test:
+ # if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
+ # name: spend tests against network
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # matrix:
+ # os: [ubuntu-latest, windows-latest, macos-latest]
+ # steps:
+ # - uses: actions/checkout@v4
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
- - uses: Swatinem/rust-cache@v2
+ # - uses: Swatinem/rust-cache@v2
- - name: Build binaries
- run: cargo build --release --features=local-discovery --bin safenode
- timeout-minutes: 30
+ # - name: Build binaries
+ # run: cargo build --release --features=local --bin safenode
+ # timeout-minutes: 30
- - name: Build faucet binary
- run: cargo build --release --bin faucet --features="local-discovery,gifting"
- timeout-minutes: 30
+ # - name: Build faucet binary
+ # run: cargo build --release --bin faucet --features="local,gifting"
+ # timeout-minutes: 30
- - name: Build testing executable
- run: cargo test --release -p sn_node --features=local-discovery --test sequential_transfers --test storage_payments --test double_spend --no-run
- env:
- # only set the target dir for windows to bypass the linker issue.
- # happens if we build the node manager via testnet action
- CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 30
+ # - name: Build testing executable
+ # run: cargo test --release -p sn_node --features=local --test sequential_transfers --test storage_payments --test double_spend --no-run
+ # env:
+ # # only set the target dir for windows to bypass the linker issue.
+ # # happens if we build the node manager via testnet action
+ # CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
+ # timeout-minutes: 30
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ${{ matrix.os }}
- build: true
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ${{ matrix.os }}
+ # build: true
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
- - name: execute the sequential transfers tests
- run: cargo test --release -p sn_node --features="local-discovery" --test sequential_transfers -- --nocapture --test-threads=1
- env:
- SN_LOG: "all"
- CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 25
+ # - name: execute the sequential transfers tests
+ # run: cargo test --release -p sn_node --features="local" --test sequential_transfers -- --nocapture --test-threads=1
+ # env:
+ # SN_LOG: "all"
+ # CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
+ # timeout-minutes: 25
- - name: execute the storage payment tests
- run: cargo test --release -p sn_node --features="local-discovery" --test storage_payments -- --nocapture --test-threads=1
- env:
- SN_LOG: "all"
- CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 25
+ # - name: execute the storage payment tests
+ # run: cargo test --release -p sn_node --features="local" --test storage_payments -- --nocapture --test-threads=1
+ # env:
+ # SN_LOG: "all"
+ # CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
+ # timeout-minutes: 25
- - name: execute the double spend tests
- run: cargo test --release -p sn_node --features="local-discovery" --test double_spend -- --nocapture --test-threads=1
- env:
- CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 25
+ # - name: execute the double spend tests
+ # run: cargo test --release -p sn_node --features="local" --test double_spend -- --nocapture --test-threads=1
+ # env:
+ # CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
+ # timeout-minutes: 25
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: safe_test_logs_spend
- platform: ${{ matrix.os }}
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # log_file_prefix: safe_test_logs_spend
+ # platform: ${{ matrix.os }}
# # runs with increased node count
# spend_simulation:
@@ -435,15 +440,15 @@ jobs:
# - uses: Swatinem/rust-cache@v2
# - name: Build binaries
- # run: cargo build --release --features=local-discovery --bin safenode
+ # run: cargo build --release --features=local --bin safenode
# timeout-minutes: 30
# - name: Build faucet binary
- # run: cargo build --release --bin faucet --features="local-discovery,gifting"
+ # run: cargo build --release --bin faucet --features="local,gifting"
# timeout-minutes: 30
# - name: Build testing executable
- # run: cargo test --release -p sn_node --features=local-discovery --test spend_simulation --no-run
+ # run: cargo test --release -p sn_node --features=local --test spend_simulation --no-run
# env:
# # only set the target dir for windows to bypass the linker issue.
# # happens if we build the node manager via testnet action
@@ -472,7 +477,7 @@ jobs:
# fi
# - name: execute the spend simulation
- # run: cargo test --release -p sn_node --features="local-discovery" --test spend_simulation -- --nocapture
+ # run: cargo test --release -p sn_node --features="local" --test spend_simulation -- --nocapture
# env:
# CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
# timeout-minutes: 25
@@ -485,139 +490,138 @@ jobs:
# log_file_prefix: safe_test_logs_spend_simulation
# platform: ${{ matrix.os }}
- token_distribution_test:
+ # token_distribution_test:
+ # if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
+ # name: token distribution test
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # matrix:
+ # os: [ubuntu-latest, windows-latest, macos-latest]
+ # steps:
+ # - uses: actions/checkout@v4
+
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+
+ # - uses: Swatinem/rust-cache@v2
+
+ # - name: Build binaries
+ # run: cargo build --release --features=local,distribution --bin safenode
+ # timeout-minutes: 35
+
+ # - name: Build faucet binary
+ # run: cargo build --release --features=local,distribution,gifting --bin faucet
+ # timeout-minutes: 35
+
+ # - name: Build testing executable
+ # run: cargo test --release --features=local,distribution --no-run
+ # env:
+ # # only set the target dir for windows to bypass the linker issue.
+ # # happens if we build the node manager via testnet action
+ # CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
+ # timeout-minutes: 35
+
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ${{ matrix.os }}
+ # build: true
+
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
+
+ # - name: execute token_distribution tests
+ # run: cargo test --release --features=local,distribution token_distribution -- --nocapture --test-threads=1
+ # env:
+ # SN_LOG: "all"
+ # CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
+ # timeout-minutes: 25
+
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # log_file_prefix: safe_test_logs_token_distribution
+ # platform: ${{ matrix.os }}
+
+ churn:
if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: token distribution test
+ name: Network churning tests
runs-on: ${{ matrix.os }}
strategy:
matrix:
- os: [ubuntu-latest, windows-latest, macos-latest]
+ include:
+ - os: ubuntu-latest
+ node_data_path: /home/runner/.local/share/safe/node
+ safe_path: /home/runner/.local/share/safe
+ - os: windows-latest
+ node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node
+ safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
+ - os: macos-latest
+ node_data_path: /Users/runner/Library/Application Support/safe/node
+ safe_path: /Users/runner/Library/Application Support/safe
steps:
- uses: actions/checkout@v4
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
+ - uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- name: Build binaries
- run: cargo build --release --features=local-discovery,distribution --bin safenode
- timeout-minutes: 35
-
- - name: Build faucet binary
- run: cargo build --release --features=local-discovery,distribution,gifting --bin faucet
- timeout-minutes: 35
+ run: cargo build --release --features local --bin safenode
+ timeout-minutes: 30
- - name: Build testing executable
- run: cargo test --release --features=local-discovery,distribution --no-run
+ - name: Build churn tests
+ run: cargo test --release -p sn_node --features=local --test data_with_churn --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 35
+ timeout-minutes: 30
- name: Start a local network
uses: maidsafe/sn-local-testnet-action@main
with:
action: start
- interval: 2000
+ enable-evm-testnet: true
node-path: target/release/safenode
- faucet-path: target/release/faucet
platform: ${{ matrix.os }}
build: true
- - name: Check SAFE_PEERS was set
+ - name: Check if SAFE_PEERS and EVM_NETWORK are set
shell: bash
run: |
if [[ -z "$SAFE_PEERS" ]]; then
echo "The SAFE_PEERS variable has not been set"
exit 1
+ elif [[ -z "$EVM_NETWORK" ]]; then
+ echo "The EVM_NETWORK variable has not been set"
+ exit 1
else
echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ echo "EVM_NETWORK has been set to $EVM_NETWORK"
fi
- - name: execute token_distribution tests
- run: cargo test --release --features=local-discovery,distribution token_distribution -- --nocapture --test-threads=1
+ - name: Chunks data integrity during nodes churn
+ run: cargo test --release -p sn_node --features=local --test data_with_churn -- --nocapture
env:
+ TEST_DURATION_MINS: 5
+ TEST_TOTAL_CHURN_CYCLES: 15
SN_LOG: "all"
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 25
-
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: safe_test_logs_token_distribution
- platform: ${{ matrix.os }}
-
- churn:
- if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: Network churning tests
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- include:
- - os: ubuntu-latest
- node_data_path: /home/runner/.local/share/safe/node
- safe_path: /home/runner/.local/share/safe
- - os: windows-latest
- node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node
- safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
- - os: macos-latest
- node_data_path: /Users/runner/Library/Application Support/safe/node
- safe_path: /Users/runner/Library/Application Support/safe
- steps:
- - uses: actions/checkout@v4
-
- - uses: dtolnay/rust-toolchain@stable
-
- - uses: Swatinem/rust-cache@v2
-
- - name: Build binaries
- run: cargo build --release --features local-discovery --bin safenode
- timeout-minutes: 30
-
- - name: Build faucet binaries
- run: cargo build --release --features="local-discovery,gifting" --bin faucet
- timeout-minutes: 30
-
- - name: Build churn tests
- run: cargo test --release -p sn_node --features=local-discovery --test data_with_churn --no-run
- env:
- # only set the target dir for windows to bypass the linker issue.
- # happens if we build the node manager via testnet action
- CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 30
-
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ${{ matrix.os }}
- build: true
-
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
-
- - name: Chunks data integrity during nodes churn
- run: cargo test --release -p sn_node --features="local-discovery" --test data_with_churn -- --nocapture
- env:
- TEST_DURATION_MINS: 5
- TEST_TOTAL_CHURN_CYCLES: 15
- SN_LOG: "all"
- CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
- timeout-minutes: 30
+ timeout-minutes: 30
- name: Stop the local network and upload logs
if: always()
@@ -705,15 +709,11 @@ jobs:
- uses: Swatinem/rust-cache@v2
- name: Build binaries
- run: cargo build --release --features local-discovery --bin safenode
- timeout-minutes: 30
-
- - name: Build fuacet binary
- run: cargo build --release --features="local-discovery,gifting" --bin faucet
+ run: cargo build --release --features local --bin safenode
timeout-minutes: 30
- name: Build data location and routing table tests
- run: cargo test --release -p sn_node --features=local-discovery --test verify_data_location --test verify_routing_table --no-run
+ run: cargo test --release -p sn_node --features=local --test verify_data_location --test verify_routing_table --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
@@ -724,30 +724,33 @@ jobs:
uses: maidsafe/sn-local-testnet-action@main
with:
action: start
- interval: 2000
+ enable-evm-testnet: true
node-path: target/release/safenode
- faucet-path: target/release/faucet
platform: ${{ matrix.os }}
build: true
- - name: Check SAFE_PEERS was set
+ - name: Check if SAFE_PEERS and EVM_NETWORK are set
shell: bash
run: |
if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
+ echo "The SAFE_PEERS variable has not been set"
+ exit 1
+ elif [[ -z "$EVM_NETWORK" ]]; then
+ echo "The EVM_NETWORK variable has not been set"
+ exit 1
else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ echo "EVM_NETWORK has been set to $EVM_NETWORK"
fi
- name: Verify the routing tables of the nodes
- run: cargo test --release -p sn_node --features="local-discovery" --test verify_routing_table -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test verify_routing_table -- --nocapture
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 5
- name: Verify the location of the data on the network
- run: cargo test --release -p sn_node --features="local-discovery" --test verify_data_location -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test verify_data_location -- --nocapture
env:
CHURN_COUNT: 6
SN_LOG: "all"
@@ -755,7 +758,7 @@ jobs:
timeout-minutes: 25
- name: Verify the routing tables of the nodes
- run: cargo test --release -p sn_node --features="local-discovery" --test verify_routing_table -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test verify_routing_table -- --nocapture
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 5
@@ -800,525 +803,525 @@ jobs:
exit 1
fi
- faucet_test:
- if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: Faucet test
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
+ # faucet_test:
+ # if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
+ # name: Faucet test
+ # runs-on: ubuntu-latest
+ # steps:
+ # - uses: actions/checkout@v4
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
- - uses: Swatinem/rust-cache@v2
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ # - uses: Swatinem/rust-cache@v2
- - name: install ripgrep
- shell: bash
- run: sudo apt-get install -y ripgrep
+ # - name: install ripgrep
+ # shell: bash
+ # run: sudo apt-get install -y ripgrep
- - name: Build binaries
- run: cargo build --release --bin safenode --bin safe
- timeout-minutes: 30
+ # - name: Build binaries
+ # run: cargo build --release --bin safenode --bin safe
+ # timeout-minutes: 30
- - name: Build faucet binary
- run: cargo build --release --bin faucet --features gifting
- timeout-minutes: 30
+ # - name: Build faucet binary
+ # run: cargo build --release --bin faucet --features gifting
+ # timeout-minutes: 30
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ubuntu-latest
- build: true
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ubuntu-latest
+ # build: true
- - name: Check we're _not_ warned about using default genesis
- run: |
- if rg "USING DEFAULT" "${{ matrix.safe_path }}"/*/*/logs; then
- exit 1
- fi
- shell: bash
+ # - name: Check we're _not_ warned about using default genesis
+ # run: |
+ # if rg "USING DEFAULT" "${{ matrix.safe_path }}"/*/*/logs; then
+ # exit 1
+ # fi
+ # shell: bash
- - name: Move built binaries and clear out target dir
- shell: bash
- run: |
- mv target/release/faucet ~/faucet
- mv target/release/safe ~/safe
- rm -rf target
+ # - name: Move built binaries and clear out target dir
+ # shell: bash
+ # run: |
+ # mv target/release/faucet ~/faucet
+ # mv target/release/safe ~/safe
+ # rm -rf target
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
- - name: Create and fund a wallet first time
- run: |
- ~/safe --log-output-dest=data-dir wallet create --no-password
- ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 1>first.txt
- echo "----------"
- cat first.txt
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Create and fund a wallet first time
+ # run: |
+ # ~/safe --log-output-dest=data-dir wallet create --no-password
+ # ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 1>first.txt
+ # echo "----------"
+ # cat first.txt
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: Move faucet log to the working folder
- run: |
- echo "SAFE_DATA_PATH has: "
- ls -l $SAFE_DATA_PATH
- echo "test_faucet foder has: "
- ls -l $SAFE_DATA_PATH/test_faucet
- echo "logs folder has: "
- ls -l $SAFE_DATA_PATH/test_faucet/logs
- mv $SAFE_DATA_PATH/test_faucet/logs/faucet.log ./faucet_log.log
- env:
- SN_LOG: "all"
- SAFE_DATA_PATH: /home/runner/.local/share/safe
- continue-on-error: true
- if: always()
- timeout-minutes: 1
+ # - name: Move faucet log to the working folder
+ # run: |
+ # echo "SAFE_DATA_PATH has: "
+ # ls -l $SAFE_DATA_PATH
+ # echo "test_faucet foder has: "
+ # ls -l $SAFE_DATA_PATH/test_faucet
+ # echo "logs folder has: "
+ # ls -l $SAFE_DATA_PATH/test_faucet/logs
+ # mv $SAFE_DATA_PATH/test_faucet/logs/faucet.log ./faucet_log.log
+ # env:
+ # SN_LOG: "all"
+ # SAFE_DATA_PATH: /home/runner/.local/share/safe
+ # continue-on-error: true
+ # if: always()
+ # timeout-minutes: 1
- - name: Upload faucet log
- uses: actions/upload-artifact@main
- with:
- name: faucet_test_first_faucet_log
- path: faucet_log.log
- continue-on-error: true
- if: always()
+ # - name: Upload faucet log
+ # uses: actions/upload-artifact@main
+ # with:
+ # name: faucet_test_first_faucet_log
+ # path: faucet_log.log
+ # continue-on-error: true
+ # if: always()
- - name: Create and fund a wallet second time
- run: |
- ls -l /home/runner/.local/share
- ls -l /home/runner/.local/share/safe
- rm -rf /home/runner/.local/share/safe/test_faucet
- rm -rf /home/runner/.local/share/safe/test_genesis
- rm -rf /home/runner/.local/share/safe/client
- ~/safe --log-output-dest=data-dir wallet create --no-password
- ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 1>second.txt
- echo "----------"
- cat second.txt
- if grep "genesis is already spent" second.txt; then
- echo "Duplicated faucet rejected"
- else
- echo "Duplicated faucet not rejected!"
- exit 1
- fi
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Create and fund a wallet second time
+ # run: |
+ # ls -l /home/runner/.local/share
+ # ls -l /home/runner/.local/share/safe
+ # rm -rf /home/runner/.local/share/safe/test_faucet
+ # rm -rf /home/runner/.local/share/safe/test_genesis
+ # rm -rf /home/runner/.local/share/safe/client
+ # ~/safe --log-output-dest=data-dir wallet create --no-password
+ # ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 1>second.txt
+ # echo "----------"
+ # cat second.txt
+ # if grep "genesis is already spent" second.txt; then
+ # echo "Duplicated faucet rejected"
+ # else
+ # echo "Duplicated faucet not rejected!"
+ # exit 1
+ # fi
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: Create and fund a wallet with different keypair
- run: |
- ls -l /home/runner/.local/share
- ls -l /home/runner/.local/share/safe
- rm -rf /home/runner/.local/share/safe/test_faucet
- rm -rf /home/runner/.local/share/safe/test_genesis
- rm -rf /home/runner/.local/share/safe/client
- ~/safe --log-output-dest=data-dir wallet create --no-password
- if GENESIS_PK=a9925296499299fdbf4412509d342a92e015f5b996e9acd1d2ab7f2326e3ad05934326efdc345345a95e973ac1bb6637 GENESIS_SK=40f6bbc870355c68138ac70b450b6425af02b49874df3f141b7018378ceaac66 nohup ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1); then
- echo "Faucet with different genesis key not rejected!"
- exit 1
- else
- echo "Faucet with different genesis key rejected"
- fi
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Create and fund a wallet with different keypair
+ # run: |
+ # ls -l /home/runner/.local/share
+ # ls -l /home/runner/.local/share/safe
+ # rm -rf /home/runner/.local/share/safe/test_faucet
+ # rm -rf /home/runner/.local/share/safe/test_genesis
+ # rm -rf /home/runner/.local/share/safe/client
+ # ~/safe --log-output-dest=data-dir wallet create --no-password
+ # if GENESIS_PK=a9925296499299fdbf4412509d342a92e015f5b996e9acd1d2ab7f2326e3ad05934326efdc345345a95e973ac1bb6637 GENESIS_SK=40f6bbc870355c68138ac70b450b6425af02b49874df3f141b7018378ceaac66 nohup ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1); then
+ # echo "Faucet with different genesis key not rejected!"
+ # exit 1
+ # else
+ # echo "Faucet with different genesis key rejected"
+ # fi
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: Build faucet binary again without the gifting feature
- run: cargo build --release --bin faucet
- timeout-minutes: 30
+ # - name: Build faucet binary again without the gifting feature
+ # run: cargo build --release --bin faucet
+ # timeout-minutes: 30
- - name: Start up a faucet in server mode
- run: |
- ls -l /home/runner/.local/share
- ls -l /home/runner/.local/share/safe
- rm -rf /home/runner/.local/share/safe/test_faucet
- rm -rf /home/runner/.local/share/safe/test_genesis
- rm -rf /home/runner/.local/share/safe/client
- target/release/faucet server &
- sleep 60
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Start up a faucet in server mode
+ # run: |
+ # ls -l /home/runner/.local/share
+ # ls -l /home/runner/.local/share/safe
+ # rm -rf /home/runner/.local/share/safe/test_faucet
+ # rm -rf /home/runner/.local/share/safe/test_genesis
+ # rm -rf /home/runner/.local/share/safe/client
+ # target/release/faucet server &
+ # sleep 60
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: check there is no upload happens
- shell: bash
- run: |
- if grep -r "NanoTokens(10) }, Output" $NODE_DATA_PATH
- then
- echo "We find ongoing upload !"
- exit 1
- fi
- env:
- NODE_DATA_PATH: /home/runner/.local/share/safe/node
- timeout-minutes: 1
+ # - name: check there is no upload happens
+ # shell: bash
+ # run: |
+ # if grep -r "NanoTokens(10) }, Output" $NODE_DATA_PATH
+ # then
+ # echo "We find ongoing upload !"
+ # exit 1
+ # fi
+ # env:
+ # NODE_DATA_PATH: /home/runner/.local/share/safe/node
+ # timeout-minutes: 1
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- platform: ubuntu-latest
- log_file_prefix: safe_test_logs_faucet
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # platform: ubuntu-latest
+ # log_file_prefix: safe_test_logs_faucet
- large_file_upload_test:
- if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: Large file upload
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
+ # large_file_upload_test:
+ # if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
+ # name: Large file upload
+ # runs-on: ubuntu-latest
+ # steps:
+ # - uses: actions/checkout@v4
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
- - uses: Swatinem/rust-cache@v2
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ # - uses: Swatinem/rust-cache@v2
- - name: install ripgrep
- shell: bash
- run: sudo apt-get install -y ripgrep
+ # - name: install ripgrep
+ # shell: bash
+ # run: sudo apt-get install -y ripgrep
- - name: Check the available space
- run: |
- df
- echo "Home dir:"
- du -sh /home/runner/
- echo "Home subdirs:"
- du -sh /home/runner/*/
- echo "PWD:"
- du -sh .
- echo "PWD subdirs:"
- du -sh */
-
- - name: Download material, 1.1G
- shell: bash
- run: |
- wget https://releases.ubuntu.com/14.04.6/ubuntu-14.04.6-desktop-i386.iso
- ls -l
+ # - name: Check the available space
+ # run: |
+ # df
+ # echo "Home dir:"
+ # du -sh /home/runner/
+ # echo "Home subdirs:"
+ # du -sh /home/runner/*/
+ # echo "PWD:"
+ # du -sh .
+ # echo "PWD subdirs:"
+ # du -sh */
+
+ # - name: Download material, 1.1G
+ # shell: bash
+ # run: |
+ # wget https://releases.ubuntu.com/14.04.6/ubuntu-14.04.6-desktop-i386.iso
+ # ls -l
- - name: Build binaries
- run: cargo build --release --bin safenode --bin safe
- timeout-minutes: 30
+ # - name: Build binaries
+ # run: cargo build --release --bin safenode --bin safe
+ # timeout-minutes: 30
- - name: Build faucet binary
- run: cargo build --release --bin faucet --features gifting
- timeout-minutes: 30
+ # - name: Build faucet binary
+ # run: cargo build --release --bin faucet --features gifting
+ # timeout-minutes: 30
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ubuntu-latest
- build: true
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ubuntu-latest
+ # build: true
- - name: Check we're _not_ warned about using default genesis
- run: |
- if rg "USING DEFAULT" "${{ matrix.safe_path }}"/*/*/logs; then
- exit 1
- fi
- shell: bash
+ # - name: Check we're _not_ warned about using default genesis
+ # run: |
+ # if rg "USING DEFAULT" "${{ matrix.safe_path }}"/*/*/logs; then
+ # exit 1
+ # fi
+ # shell: bash
- # The test currently fails because the GH runner runs out of disk space. So we clear out the target dir here.
- # Might be related to additional deps used in the codebase.
- - name: Move built binaries and clear out target dir
- shell: bash
- run: |
- mv target/release/faucet ~/faucet
- mv target/release/safe ~/safe
- rm -rf target
+ # # The test currently fails because the GH runner runs out of disk space. So we clear out the target dir here.
+ # # Might be related to additional deps used in the codebase.
+ # - name: Move built binaries and clear out target dir
+ # shell: bash
+ # run: |
+ # mv target/release/faucet ~/faucet
+ # mv target/release/safe ~/safe
+ # rm -rf target
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
- - name: Check the available space post download
- run: |
- df
- echo "Home dir:"
- du -sh /home/runner/
- echo "Home subdirs:"
- du -sh /home/runner/*/
- echo "PWD:"
- du -sh .
- echo "PWD subdirs:"
- du -sh */
-
- - name: Create and fund a wallet to pay for files storage
- run: |
- ~/safe --log-output-dest=data-dir wallet create --no-password
- ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
- ~/safe --log-output-dest=data-dir wallet receive --file transfer_hex
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Check the available space post download
+ # run: |
+ # df
+ # echo "Home dir:"
+ # du -sh /home/runner/
+ # echo "Home subdirs:"
+ # du -sh /home/runner/*/
+ # echo "PWD:"
+ # du -sh .
+ # echo "PWD subdirs:"
+ # du -sh */
+
+ # - name: Create and fund a wallet to pay for files storage
+ # run: |
+ # ~/safe --log-output-dest=data-dir wallet create --no-password
+ # ~/faucet --log-output-dest=data-dir send 100000000 $(~/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
+ # ~/safe --log-output-dest=data-dir wallet receive --file transfer_hex
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: Start a client to upload
- run: ~/safe --log-output-dest=data-dir files upload "ubuntu-14.04.6-desktop-i386.iso" --retry-strategy quick
- env:
- SN_LOG: "all"
- timeout-minutes: 30
+ # - name: Start a client to upload
+ # run: ~/safe --log-output-dest=data-dir files upload "ubuntu-14.04.6-desktop-i386.iso" --retry-strategy quick
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 30
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- platform: ubuntu-latest
- log_file_prefix: safe_test_logs_large_file_upload
- build: true
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # platform: ubuntu-latest
+ # log_file_prefix: safe_test_logs_large_file_upload
+ # build: true
- - name: check there is no failed replication fetch
- shell: bash
- run: |
- if grep -r "failed to fetch" $NODE_DATA_PATH
- then
- echo "We find failed replication fetch"
- exit 1
- fi
- env:
- NODE_DATA_PATH: /home/runner/.local/share/safe/node
- timeout-minutes: 1
+ # - name: check there is no failed replication fetch
+ # shell: bash
+ # run: |
+ # if grep -r "failed to fetch" $NODE_DATA_PATH
+ # then
+ # echo "We find failed replication fetch"
+ # exit 1
+ # fi
+ # env:
+ # NODE_DATA_PATH: /home/runner/.local/share/safe/node
+ # timeout-minutes: 1
- - name: Check the home dir leftover space
- run: |
- df
- du -sh /home/runner/
+ # - name: Check the home dir leftover space
+ # run: |
+ # df
+ # du -sh /home/runner/
- - name: Confirm the wallet files (cash_notes, confirmed_spends)
- run: |
- pwd
- ls $CLIENT_DATA_PATH/ -l
- ls $CLIENT_DATA_PATH/wallet -l
- ls $CLIENT_DATA_PATH/wallet/cash_notes -l
- ls $CLIENT_DATA_PATH/wallet/confirmed_spends -l
- ls $CLIENT_DATA_PATH/logs -l
- env:
- CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
- timeout-minutes: 1
+ # - name: Confirm the wallet files (cash_notes, confirmed_spends)
+ # run: |
+ # pwd
+ # ls $CLIENT_DATA_PATH/ -l
+ # ls $CLIENT_DATA_PATH/wallet -l
+ # ls $CLIENT_DATA_PATH/wallet/cash_notes -l
+ # ls $CLIENT_DATA_PATH/wallet/confirmed_spends -l
+ # ls $CLIENT_DATA_PATH/logs -l
+ # env:
+ # CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
+ # timeout-minutes: 1
- replication_bench_with_heavy_upload:
- if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
- name: Replication bench with heavy upload
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
+ # replication_bench_with_heavy_upload:
+ # if: "!startsWith(github.event.head_commit.message, 'chore(release):')"
+ # name: Replication bench with heavy upload
+ # runs-on: ubuntu-latest
+ # steps:
+ # - uses: actions/checkout@v4
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
- - uses: Swatinem/rust-cache@v2
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ # - uses: Swatinem/rust-cache@v2
- - name: install ripgrep
- shell: bash
- run: sudo apt-get install -y ripgrep
+ # - name: install ripgrep
+ # shell: bash
+ # run: sudo apt-get install -y ripgrep
- - name: Download materials to create two 300MB test_files to be uploaded by client
- shell: bash
- run: |
- mkdir test_data_1
- cd test_data_1
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safe-qiWithListeners-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode-qiWithListeners-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode_rpc_client-qiWithListeners-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/faucet-qilesssubs-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safe-qilesssubs-x86_64.tar.gz
- ls -l
- cd ..
- tar -cvzf test_data_1.tar.gz test_data_1
- mkdir test_data_2
- cd test_data_2
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safenode-qilesssubs-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safenode_rpc_client-qilesssubs-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/faucet-DebugMem-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safe-DebugMem-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safenode-DebugMem-x86_64.tar.gz
- ls -l
- cd ..
- tar -cvzf test_data_2.tar.gz test_data_2
- ls -l
- mkdir test_data_3
- cd test_data_3
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safenode_rpc_client-DebugMem-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/faucet-DebugMem-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safe-DebugMem-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safenode-DebugMem-x86_64.tar.gz
- wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safenode_rpc_client-DebugMem-x86_64.tar.gz
- ls -l
- cd ..
- tar -cvzf test_data_3.tar.gz test_data_3
- ls -l
- df
+ # - name: Download materials to create two 300MB test_files to be uploaded by client
+ # shell: bash
+ # run: |
+ # mkdir test_data_1
+ # cd test_data_1
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safe-qiWithListeners-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode-qiWithListeners-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/Qi930/safenode_rpc_client-qiWithListeners-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/faucet-qilesssubs-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safe-qilesssubs-x86_64.tar.gz
+ # ls -l
+ # cd ..
+ # tar -cvzf test_data_1.tar.gz test_data_1
+ # mkdir test_data_2
+ # cd test_data_2
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safenode-qilesssubs-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/QiSubdivisionBranch/safenode_rpc_client-qilesssubs-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/faucet-DebugMem-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safe-DebugMem-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safenode-DebugMem-x86_64.tar.gz
+ # ls -l
+ # cd ..
+ # tar -cvzf test_data_2.tar.gz test_data_2
+ # ls -l
+ # mkdir test_data_3
+ # cd test_data_3
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush/safenode_rpc_client-DebugMem-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/faucet-DebugMem-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safe-DebugMem-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safenode-DebugMem-x86_64.tar.gz
+ # wget https://sn-node.s3.eu-west-2.amazonaws.com/joshuef/RemoveArtificalReplPush2/safenode_rpc_client-DebugMem-x86_64.tar.gz
+ # ls -l
+ # cd ..
+ # tar -cvzf test_data_3.tar.gz test_data_3
+ # ls -l
+ # df
- - name: Build binaries
- run: cargo build --release --bin safenode --bin safe
- timeout-minutes: 30
+ # - name: Build binaries
+ # run: cargo build --release --bin safenode --bin safe
+ # timeout-minutes: 30
- - name: Build faucet binary
- run: cargo build --release --bin faucet --features gifting
- timeout-minutes: 30
+ # - name: Build faucet binary
+ # run: cargo build --release --bin faucet --features gifting
+ # timeout-minutes: 30
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ubuntu-latest
- build: true
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ubuntu-latest
+ # build: true
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
- - name: Create and fund a wallet to pay for files storage
- run: |
- ./target/release/safe --log-output-dest=data-dir wallet create --no-password
- ./target/release/faucet --log-output-dest=data-dir send 100000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
- ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Create and fund a wallet to pay for files storage
+ # run: |
+ # ./target/release/safe --log-output-dest=data-dir wallet create --no-password
+ # ./target/release/faucet --log-output-dest=data-dir send 100000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
+ # ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: Start a client to upload first file
- run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_1.tar.gz" --retry-strategy quick
- env:
- SN_LOG: "all"
- timeout-minutes: 5
+ # - name: Start a client to upload first file
+ # run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_1.tar.gz" --retry-strategy quick
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 5
- - name: Ensure no leftover cash_notes and payment files
- run: |
- expected_cash_notes_files="1"
- expected_payment_files="0"
- pwd
- ls $CLIENT_DATA_PATH/ -l
- ls $CLIENT_DATA_PATH/wallet -l
- ls $CLIENT_DATA_PATH/wallet/cash_notes -l
- cash_note_files=$(ls $CLIENT_DATA_PATH/wallet/cash_notes | wc -l)
- echo "Find $cash_note_files cash_note files"
- if [ $expected_cash_notes_files -lt $cash_note_files ]; then
- echo "Got too many cash_note files leftover: $cash_note_files"
- exit 1
- fi
- ls $CLIENT_DATA_PATH/wallet/payments -l
- payment_files=$(ls $CLIENT_DATA_PATH/wallet/payments | wc -l)
- if [ $expected_payment_files -lt $payment_files ]; then
- echo "Got too many payment files leftover: $payment_files"
- exit 1
- fi
- env:
- CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
- timeout-minutes: 10
+ # - name: Ensure no leftover cash_notes and payment files
+ # run: |
+ # expected_cash_notes_files="1"
+ # expected_payment_files="0"
+ # pwd
+ # ls $CLIENT_DATA_PATH/ -l
+ # ls $CLIENT_DATA_PATH/wallet -l
+ # ls $CLIENT_DATA_PATH/wallet/cash_notes -l
+ # cash_note_files=$(ls $CLIENT_DATA_PATH/wallet/cash_notes | wc -l)
+ # echo "Find $cash_note_files cash_note files"
+ # if [ $expected_cash_notes_files -lt $cash_note_files ]; then
+ # echo "Got too many cash_note files leftover: $cash_note_files"
+ # exit 1
+ # fi
+ # ls $CLIENT_DATA_PATH/wallet/payments -l
+ # payment_files=$(ls $CLIENT_DATA_PATH/wallet/payments | wc -l)
+ # if [ $expected_payment_files -lt $payment_files ]; then
+ # echo "Got too many payment files leftover: $payment_files"
+ # exit 1
+ # fi
+ # env:
+ # CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
+ # timeout-minutes: 10
- - name: Wait for certain period
- run: sleep 300
- timeout-minutes: 6
+ # - name: Wait for certain period
+ # run: sleep 300
+ # timeout-minutes: 6
- - name: Use same client to upload second file
- run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_2.tar.gz" --retry-strategy quick
- env:
- SN_LOG: "all"
- timeout-minutes: 10
+ # - name: Use same client to upload second file
+ # run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_2.tar.gz" --retry-strategy quick
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 10
- - name: Ensure no leftover cash_notes and payment files
- run: |
- expected_cash_notes_files="1"
- expected_payment_files="0"
- pwd
- ls $CLIENT_DATA_PATH/ -l
- ls $CLIENT_DATA_PATH/wallet -l
- ls $CLIENT_DATA_PATH/wallet/cash_notes -l
- cash_note_files=$(find $CLIENT_DATA_PATH/wallet/cash_notes -type f | wc -l)
- if (( $(echo "$cash_note_files > $expected_cash_notes_files" | bc -l) )); then
- echo "Got too many cash_note files leftover: $cash_note_files when we expected $expected_cash_notes_files"
- exit 1
- fi
- ls $CLIENT_DATA_PATH/wallet/payments -l
- payment_files=$(find $CLIENT_DATA_PATH/wallet/payments -type f | wc -l)
- if (( $(echo "$payment_files > $expected_payment_files" | bc -l) )); then
- echo "Got too many payment files leftover: $payment_files"
- exit 1
- fi
- env:
- CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
- timeout-minutes: 10
+ # - name: Ensure no leftover cash_notes and payment files
+ # run: |
+ # expected_cash_notes_files="1"
+ # expected_payment_files="0"
+ # pwd
+ # ls $CLIENT_DATA_PATH/ -l
+ # ls $CLIENT_DATA_PATH/wallet -l
+ # ls $CLIENT_DATA_PATH/wallet/cash_notes -l
+ # cash_note_files=$(find $CLIENT_DATA_PATH/wallet/cash_notes -type f | wc -l)
+ # if (( $(echo "$cash_note_files > $expected_cash_notes_files" | bc -l) )); then
+ # echo "Got too many cash_note files leftover: $cash_note_files when we expected $expected_cash_notes_files"
+ # exit 1
+ # fi
+ # ls $CLIENT_DATA_PATH/wallet/payments -l
+ # payment_files=$(find $CLIENT_DATA_PATH/wallet/payments -type f | wc -l)
+ # if (( $(echo "$payment_files > $expected_payment_files" | bc -l) )); then
+ # echo "Got too many payment files leftover: $payment_files"
+ # exit 1
+ # fi
+ # env:
+ # CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
+ # timeout-minutes: 10
- - name: Wait for certain period
- run: sleep 300
- timeout-minutes: 6
+ # - name: Wait for certain period
+ # run: sleep 300
+ # timeout-minutes: 6
- # Start a different client to avoid local wallet slow down with more payments handled.
- - name: Start a different client
- run: |
- pwd
- mv $CLIENT_DATA_PATH $SAFE_DATA_PATH/client_first
- ls -l $SAFE_DATA_PATH
- ls -l $SAFE_DATA_PATH/client_first
- mkdir $SAFE_DATA_PATH/client
- ls -l $SAFE_DATA_PATH
- mv $SAFE_DATA_PATH/client_first/logs $CLIENT_DATA_PATH/logs
- ls -l $CLIENT_DATA_PATH
- ./target/release/safe --log-output-dest=data-dir wallet create --no-password
- ./target/release/faucet --log-output-dest=data-dir send 100000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
- ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
- env:
- SN_LOG: "all"
- SAFE_DATA_PATH: /home/runner/.local/share/safe
- CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
- timeout-minutes: 25
+ # # Start a different client to avoid local wallet slow down with more payments handled.
+ # - name: Start a different client
+ # run: |
+ # pwd
+ # mv $CLIENT_DATA_PATH $SAFE_DATA_PATH/client_first
+ # ls -l $SAFE_DATA_PATH
+ # ls -l $SAFE_DATA_PATH/client_first
+ # mkdir $SAFE_DATA_PATH/client
+ # ls -l $SAFE_DATA_PATH
+ # mv $SAFE_DATA_PATH/client_first/logs $CLIENT_DATA_PATH/logs
+ # ls -l $CLIENT_DATA_PATH
+ # ./target/release/safe --log-output-dest=data-dir wallet create --no-password
+ # ./target/release/faucet --log-output-dest=data-dir send 100000000 $(./target/release/safe --log-output-dest=data-dir wallet address | tail -n 1) | tail -n 1 > transfer_hex
+ # ./target/release/safe --log-output-dest=data-dir wallet receive --file transfer_hex
+ # env:
+ # SN_LOG: "all"
+ # SAFE_DATA_PATH: /home/runner/.local/share/safe
+ # CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
+ # timeout-minutes: 25
- - name: Use second client to upload third file
- run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_3.tar.gz" --retry-strategy quick
- env:
- SN_LOG: "all"
- timeout-minutes: 10
+ # - name: Use second client to upload third file
+ # run: ./target/release/safe --log-output-dest=data-dir files upload "./test_data_3.tar.gz" --retry-strategy quick
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 10
- - name: Ensure no leftover cash_notes and payment files
- run: |
- expected_cash_notes_files="1"
- expected_payment_files="0"
- pwd
- ls $CLIENT_DATA_PATH/ -l
- ls $CLIENT_DATA_PATH/wallet -l
- ls $CLIENT_DATA_PATH/wallet/cash_notes -l
- cash_note_files=$(ls $CLIENT_DATA_PATH/wallet/cash_notes | wc -l)
- echo "Find $cash_note_files cash_note files"
- if [ $expected_cash_notes_files -lt $cash_note_files ]; then
- echo "Got too many cash_note files leftover: $cash_note_files"
- exit 1
- fi
- ls $CLIENT_DATA_PATH/wallet/payments -l
- payment_files=$(ls $CLIENT_DATA_PATH/wallet/payments | wc -l)
- if [ $expected_payment_files -lt $payment_files ]; then
- echo "Got too many payment files leftover: $payment_files"
- exit 1
- fi
- env:
- CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
- timeout-minutes: 10
+ # - name: Ensure no leftover cash_notes and payment files
+ # run: |
+ # expected_cash_notes_files="1"
+ # expected_payment_files="0"
+ # pwd
+ # ls $CLIENT_DATA_PATH/ -l
+ # ls $CLIENT_DATA_PATH/wallet -l
+ # ls $CLIENT_DATA_PATH/wallet/cash_notes -l
+ # cash_note_files=$(ls $CLIENT_DATA_PATH/wallet/cash_notes | wc -l)
+ # echo "Find $cash_note_files cash_note files"
+ # if [ $expected_cash_notes_files -lt $cash_note_files ]; then
+ # echo "Got too many cash_note files leftover: $cash_note_files"
+ # exit 1
+ # fi
+ # ls $CLIENT_DATA_PATH/wallet/payments -l
+ # payment_files=$(ls $CLIENT_DATA_PATH/wallet/payments | wc -l)
+ # if [ $expected_payment_files -lt $payment_files ]; then
+ # echo "Got too many payment files leftover: $payment_files"
+ # exit 1
+ # fi
+ # env:
+ # CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
+ # timeout-minutes: 10
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: safe_test_logs_heavy_replicate_bench
- platform: ubuntu-latest
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # log_file_prefix: safe_test_logs_heavy_replicate_bench
+ # platform: ubuntu-latest
diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml
new file mode 100644
index 0000000000..70db60d68e
--- /dev/null
+++ b/.github/workflows/nightly-release.yml
@@ -0,0 +1,251 @@
+name: nightly release
+
+on:
+ schedule:
+ - cron: '0 0 * * *' # Run every night at midnight UTC
+ workflow_dispatch: # This also allows the workflow to be triggered manually
+
+env:
+ WORKFLOW_URL: https://github.com/maidsafe/safe_network/actions/runs
+
+jobs:
+ build:
+ if: ${{ github.repository_owner == 'maidsafe' }}
+ name: build
+ environment: stable
+ env:
+ FOUNDATION_PK: ${{ vars.FOUNDATION_PK }}
+ GENESIS_PK: ${{ vars.GENESIS_PK }}
+ GENESIS_SK: ${{ secrets.GENESIS_SK }}
+ NETWORK_ROYALTIES_PK: ${{ vars.NETWORK_ROYALTIES_PK }}
+ PAYMENT_FORWARD_PK: ${{ vars.PAYMENT_FORWARD_PK }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ include:
+ - os: windows-latest
+ target: x86_64-pc-windows-msvc
+ - os: macos-latest
+ target: x86_64-apple-darwin
+ - os: macos-latest
+ target: aarch64-apple-darwin
+ - os: ubuntu-latest
+ target: x86_64-unknown-linux-musl
+ - os: ubuntu-latest
+ target: arm-unknown-linux-musleabi
+ - os: ubuntu-latest
+ target: armv7-unknown-linux-musleabihf
+ - os: ubuntu-latest
+ target: aarch64-unknown-linux-musl
+ steps:
+ - uses: actions/checkout@v4
+ - uses: dtolnay/rust-toolchain@stable
+ - uses: cargo-bins/cargo-binstall@main
+ - shell: bash
+ run: cargo binstall --no-confirm just
+
+ - name: build nightly release artifacts
+ shell: bash
+ run: |
+ just build-release-artifacts "${{ matrix.target }}" "true"
+
+ - uses: actions/upload-artifact@main
+ with:
+ name: safe_network-${{ matrix.target }}
+ path: |
+ artifacts
+ !artifacts/.cargo-lock
+
+ - name: post notification to slack on failure
+ if: ${{ failure() }}
+ uses: bryannice/gitactions-slack-notification@2.0.0
+ env:
+ SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
+ SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
+ SLACK_TITLE: "Release Failed"
+
+ s3-release:
+ if: ${{ github.repository_owner == 'maidsafe' }}
+ name: s3 release
+ runs-on: ubuntu-latest
+ needs: [build]
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.S3_DEPLOY_AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DEPLOY_AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: eu-west-2
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-x86_64-pc-windows-msvc
+ path: artifacts/x86_64-pc-windows-msvc/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-x86_64-unknown-linux-musl
+ path: artifacts/x86_64-unknown-linux-musl/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-x86_64-apple-darwin
+ path: artifacts/x86_64-apple-darwin/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-aarch64-apple-darwin
+ path: artifacts/aarch64-apple-darwin/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-arm-unknown-linux-musleabi
+ path: artifacts/arm-unknown-linux-musleabi/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-armv7-unknown-linux-musleabihf
+ path: artifacts/armv7-unknown-linux-musleabihf/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-aarch64-unknown-linux-musl
+ path: artifacts/aarch64-unknown-linux-musl/release
+
+ - uses: cargo-bins/cargo-binstall@main
+ - name: install just
+ shell: bash
+ run: cargo binstall --no-confirm just
+
+ - name: remove latest nightly release
+ shell: bash
+ run: |
+ just delete-s3-bin "faucet" "nightly"
+ just delete-s3-bin "nat-detection" "nightly"
+ just delete-s3-bin "node-launchpad" "nightly"
+ just delete-s3-bin "safe" "nightly"
+ just delete-s3-bin "safenode" "nightly"
+ just delete-s3-bin "safenode_rpc_client" "nightly"
+ just delete-s3-bin "safenode-manager" "nightly"
+ just delete-s3-bin "safenodemand" "nightly"
+ just delete-s3-bin "sn_auditor" "nightly"
+
+ - name: upload binaries to S3
+ shell: bash
+ run: |
+ version=$(date +"%Y.%m.%d")
+ just package-bin "faucet" "$version"
+ just package-bin "nat-detection" "$version"
+ just package-bin "node-launchpad" "$version"
+ just package-bin "safe" "$version"
+ just package-bin "safenode" "$version"
+ just package-bin "safenode_rpc_client" "$version"
+ just package-bin "safenode-manager" "$version"
+ just package-bin "safenodemand" "$version"
+ just package-bin "sn_auditor" "$version"
+ just upload-all-packaged-bins-to-s3
+
+ rm -rf packaged_bins
+ just package-bin "faucet" "nightly"
+ just package-bin "nat-detection" "nightly"
+ just package-bin "node-launchpad" "nightly"
+ just package-bin "safe" "nightly"
+ just package-bin "safenode" "nightly"
+ just package-bin "safenode_rpc_client" "nightly"
+ just package-bin "safenode-manager" "nightly"
+ just package-bin "safenodemand" "nightly"
+ just package-bin "sn_auditor" "nightly"
+ just upload-all-packaged-bins-to-s3
+
+ github-release:
+ if: ${{ github.repository_owner == 'maidsafe' }}
+ name: github release
+ runs-on: ubuntu-latest
+ needs: [s3-release]
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-x86_64-pc-windows-msvc
+ path: artifacts/x86_64-pc-windows-msvc/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-x86_64-unknown-linux-musl
+ path: artifacts/x86_64-unknown-linux-musl/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-x86_64-apple-darwin
+ path: artifacts/x86_64-apple-darwin/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-aarch64-apple-darwin
+ path: artifacts/aarch64-apple-darwin/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-arm-unknown-linux-musleabi
+ path: artifacts/arm-unknown-linux-musleabi/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-armv7-unknown-linux-musleabihf
+ path: artifacts/armv7-unknown-linux-musleabihf/release
+ - uses: actions/download-artifact@master
+ with:
+ name: safe_network-aarch64-unknown-linux-musl
+ path: artifacts/aarch64-unknown-linux-musl/release
+
+ - uses: cargo-bins/cargo-binstall@main
+ - name: install just
+ shell: bash
+ run: cargo binstall --no-confirm just
+
+ - name: set package version
+ shell: bash
+ run: |
+ version=$(date +"%Y.%m.%d")
+ echo "PACKAGE_VERSION=$version" >> $GITHUB_ENV
+
+ - name: package release artifacts
+ shell: bash
+ run: just package-all-architectures
+
+ - name: delete existing nightly release
+ env:
+ GITHUB_TOKEN: ${{ secrets.VERSION_BUMP_COMMIT_PAT }}
+ run: |
+ releases=$(gh api repos/${{ github.repository }}/releases --paginate)
+ echo "$releases" | jq -c '.[]' | while read release; do
+ tag_name=$(echo $release | jq -r '.tag_name')
+ release_id=$(echo $release | jq -r '.id')
+
+ if [[ $tag_name == nightly* ]]; then
+ echo "deleting nightly release $tag_name"
+ gh api -X DELETE repos/${{ github.repository }}/releases/$release_id
+ exit 0
+ fi
+ done
+
+ - name: create new nightly release
+ uses: actions/create-release@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.VERSION_BUMP_COMMIT_PAT }}
+ with:
+ tag_name: nightly-${{ env.PACKAGE_VERSION }}
+ release_name: "${{ env.PACKAGE_VERSION }} Nightly Release"
+ body: |
+ Nightly release of the Autonomi binary set, built from the `main` branch.
+
+ These binaries should be compatible with the stable network, but they should be considered experimental.
+
+ For the most reliable experience, prefer the latest stable release.
+ draft: false
+ prerelease: true
+
+ - name: upload artifacts as assets
+ env:
+ GITHUB_TOKEN: ${{ secrets.VERSION_BUMP_COMMIT_PAT }}
+ shell: bash
+ run: |
+ (
+ cd packaged_architectures
+ ls | xargs gh release upload nightly-${{ env.PACKAGE_VERSION }}
+ )
+
+ - name: post notification to slack on failure
+ if: ${{ failure() }}
+ uses: bryannice/gitactions-slack-notification@2.0.0
+ env:
+ SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
+ SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
+ SLACK_TITLE: "Nightly Release Failed"
\ No newline at end of file
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index 7165866f79..aac0ac9ad4 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -154,7 +154,7 @@ jobs:
- name: Run network tests
timeout-minutes: 25
- run: cargo test --release -p sn_networking
+ run: cargo test --release -p sn_networking --features="open-metrics"
- name: Run protocol tests
timeout-minutes: 25
@@ -199,11 +199,11 @@ jobs:
continue-on-error: true
- name: Build binaries
- run: cargo build --release --features=local-discovery --bin safenode --bin faucet
+ run: cargo build --release --features=local --bin safenode --bin faucet
timeout-minutes: 30
- name: Build testing executable
- run: cargo test --release -p sn_node --features=local-discovery --test sequential_transfers --test storage_payments --test double_spend --no-run
+ run: cargo test --release -p sn_node --features=local --test sequential_transfers --test storage_payments --test double_spend --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
@@ -221,21 +221,21 @@ jobs:
build: true
- name: execute the sequential transfers test
- run: cargo test --release -p sn_node --features="local-discovery" --test sequential_transfers -- --nocapture --test-threads=1
+ run: cargo test --release -p sn_node --features="local" --test sequential_transfers -- --nocapture --test-threads=1
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
SN_LOG: "all"
timeout-minutes: 10
- name: execute the storage payment tests
- run: cargo test --release -p sn_node --features="local-discovery" --test storage_payments -- --nocapture --test-threads=1
+ run: cargo test --release -p sn_node --features="local" --test storage_payments -- --nocapture --test-threads=1
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
SN_LOG: "all"
timeout-minutes: 10
- name: execute the double spend tests
- run: cargo test --release -p sn_node --features="local-discovery" --test double_spend -- --nocapture --test-threads=1
+ run: cargo test --release -p sn_node --features="local" --test double_spend -- --nocapture --test-threads=1
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 25
@@ -277,11 +277,11 @@ jobs:
continue-on-error: true
- name: Build binaries
- run: cargo build --release --features=local-discovery --bin safenode --bin faucet
+ run: cargo build --release --features=local --bin safenode --bin faucet
timeout-minutes: 30
- name: Build testing executable
- run: cargo test --release -p sn_node --features=local-discovery --test spend_simulation --no-run
+ run: cargo test --release -p sn_node --features=local --test spend_simulation --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
@@ -300,7 +300,7 @@ jobs:
build: true
- name: execute the spend simulation test
- run: cargo test --release -p sn_node --features="local-discovery" --test spend_simulation -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test spend_simulation -- --nocapture
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 25
@@ -341,11 +341,11 @@ jobs:
- uses: Swatinem/rust-cache@v2
- name: Build binaries
- run: cargo build --release --features=local-discovery,distribution --bin safenode --bin faucet
+ run: cargo build --release --features=local,distribution --bin safenode --bin faucet
timeout-minutes: 30
- name: Build testing executable
- run: cargo test --release --features=local-discovery,distribution --no-run
+ run: cargo test --release --features=local,distribution --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
@@ -373,7 +373,7 @@ jobs:
fi
- name: execute token_distribution tests
- run: cargo test --release --features=local-discovery,distribution token_distribution -- --nocapture --test-threads=1
+ run: cargo test --release --features=local,distribution token_distribution -- --nocapture --test-threads=1
env:
SN_LOG: "all"
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
@@ -412,11 +412,11 @@ jobs:
continue-on-error: true
- name: Build binaries
- run: cargo build --release --features local-discovery --bin safenode --bin faucet
+ run: cargo build --release --features local --bin safenode --bin faucet
timeout-minutes: 30
- name: Build churn tests
- run: cargo test --release -p sn_node --features=local-discovery --test data_with_churn --no-run
+ run: cargo test --release -p sn_node --features=local --test data_with_churn --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
@@ -434,7 +434,7 @@ jobs:
build: true
- name: Chunks data integrity during nodes churn (during 10min) (in theory)
- run: cargo test --release -p sn_node --features="local-discovery" --test data_with_churn -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test data_with_churn -- --nocapture
env:
TEST_DURATION_MINS: 60
TEST_CHURN_CYCLES: 6
@@ -537,11 +537,11 @@ jobs:
continue-on-error: true
- name: Build binaries
- run: cargo build --release --features local-discovery --bin safenode --bin faucet
+ run: cargo build --release --features local --bin safenode --bin faucet
timeout-minutes: 30
- name: Build data location and routing table tests
- run: cargo test --release -p sn_node --features=local-discovery --test verify_data_location --test verify_routing_table --no-run
+ run: cargo test --release -p sn_node --features=local --test verify_data_location --test verify_routing_table --no-run
env:
# only set the target dir for windows to bypass the linker issue.
# happens if we build the node manager via testnet action
@@ -559,20 +559,20 @@ jobs:
build: true
- name: Verify the Routing table of the nodes
- run: cargo test --release -p sn_node --features="local-discovery" --test verify_routing_table -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test verify_routing_table -- --nocapture
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 5
- name: Verify the location of the data on the network
- run: cargo test --release -p sn_node --features="local-discovery" --test verify_data_location -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test verify_data_location -- --nocapture
env:
SN_LOG: "all"
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 90
- name: Verify the routing tables of the nodes
- run: cargo test --release -p sn_node --features="local-discovery" --test verify_routing_table -- --nocapture
+ run: cargo test --release -p sn_node --features="local" --test verify_routing_table -- --nocapture
env:
CARGO_TARGET_DIR: ${{ matrix.os == 'windows-latest' && './test-target' || '.' }}
timeout-minutes: 5
diff --git a/.github/workflows/nightly_wan.yml b/.github/workflows/nightly_wan.yml
index 78c1ff756f..9c84f58488 100644
--- a/.github/workflows/nightly_wan.yml
+++ b/.github/workflows/nightly_wan.yml
@@ -1,13 +1,16 @@
name: Nightly -- Full WAN Network Tests
on:
-# todo: this is totally broken atm. Fix and re-enable.
-# schedule:
-# - cron: "0 0 * * *"
+ schedule:
+ - cron: "0 0 * * *"
+ # enable as below for testing purpose.
+ # pull_request:
+ # branches: ["*"]
workflow_dispatch:
env:
CARGO_INCREMENTAL: 0 # bookkeeping for incremental builds has overhead, not useful in CI.
+ NETWORK_NAME: DEV-01
WORKFLOW_URL: https://github.com/maidsafe/stableset_net/actions/runs
jobs:
e2e:
@@ -29,31 +32,30 @@ jobs:
run: cargo build --release --bin safe
timeout-minutes: 30
- - name: Start a WAN network
- uses: maidsafe/sn-testnet-action@main
+ - name: setup testnet-deploy
+ uses: maidsafe/sn-testnet-control-action/init-testnet-deploy@main
with:
- action: create
- re-attempts: 3
- rust-log: debug
ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- security-group-id: sg-0d47df5b3f0d01e2a
- subnet-id: subnet-018f2ab26755df7f9
+
+ - name: launch ${{ env.NETWORK_NAME }}
+ uses: maidsafe/sn-testnet-control-action/launch-network@main
+ with:
+ ansible-forks: ${{ env.ANSIBLE_FORKS }}
+ environment-type: development
+ node-vm-count: 10
node-count: 20
- vm-count: 1
+ uploader-vm-count: 0
+ bootstrap-node-vm-count: 0
+ log-format: json
+ network-name: ${{ env.NETWORK_NAME }}
provider: digital-ocean
- testnet-name: NightlyE2E
- # if we were to run on a PR, use the below
- # safe-network-user: ${{ github.actor }}"
- # safe-network-branch: ${{ github.event.pull_request.head.ref }}
- # Specify custom branch to prevent the deployer from fetching the latest release.
- # The latest release contains the `network-contacts` feature turned on.
- safe-network-user: maidsafe
safe-network-branch: main
+ safe-network-user: maidsafe
- name: Check env variables
shell: bash
@@ -61,10 +63,17 @@ jobs:
echo "Peer is $SAFE_PEERS"
echo "Deployment inventory is $SN_INVENTORY"
+ - name: start faucet
+ uses: maidsafe/sn-testnet-control-action/start-faucet@main
+ with:
+ network-name: ${{ env.NETWORK_NAME }}
+
- name: Obtain the funds from the faucet
run: |
+ set -e
+
# read the inventory file
- inventory_path=/home/runner/.local/share/safe/testnet-deploy/NightlyE2E-inventory.json
+ inventory_path=/home/runner/.local/share/safe/testnet-deploy/${{ env.NETWORK_NAME }}-inventory.json
echo "Inventory Path: $inventory_path"
faucet_address=$(jq -r '.faucet_address' $inventory_path)
cargo run --bin safe --release -- wallet get-faucet ${faucet_address}
@@ -73,48 +82,44 @@ jobs:
timeout-minutes: 2
- name: Start a client to carry out chunk actions
- run: cargo run --bin safe --release -- --log-output-dest=data-dir files upload "./resources" --retry-strategy quick
+ run: |
+ set -e
+ cargo run --bin safe --release -- --log-output-dest=data-dir files upload "./resources" --retry-strategy quick
env:
SN_LOG: "all"
timeout-minutes: 2
- name: Start a client to create a register
- run: cargo run --bin safe --release -- --log-output-dest=data-dir register create -n baobao
+ run: |
+ set -e
+ cargo run --bin safe --release -- --log-output-dest=data-dir register create -n baobao
env:
SN_LOG: "all"
timeout-minutes: 2
- name: Start a client to get a register
- run: cargo run --bin safe --release -- --log-output-dest=data-dir register get -n baobao
+ run: |
+ set -e
+ cargo run --bin safe --release -- --log-output-dest=data-dir register get -n baobao
env:
SN_LOG: "all"
timeout-minutes: 2
- name: Start a client to edit a register
- run: cargo run --bin safe --release -- --log-output-dest=data-dir register edit -n baobao wood
+ run: |
+ set -e
+ cargo run --bin safe --release -- --log-output-dest=data-dir register edit -n baobao wood
env:
SN_LOG: "all"
timeout-minutes: 2
- - name: Fetch network logs
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: logs
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlyE2E
- safe-network-user: maidsafe
- safe-network-branch: main
+ # - name: Fetch network logs
+ # uses: maidsafe/sn-testnet-control-action/fetch-logs@main
+ # with:
+ # re-attempts: 3
+ # rust-log: debug
+ # provider: digital-ocean
+ # network-name: ${{ env.NETWORK_NAME }}
- name: Upload local logs
if: always()
@@ -126,554 +131,517 @@ jobs:
~/.local/share/safe/*/*/*.log*
~/.local/share/safe/client/logs/*/*.log*
- - name: Stop the WAN network
+ - name: destroy network
if: always()
- uses: maidsafe/sn-testnet-action@main
+ uses: maidsafe/sn-testnet-control-action/destroy-network@main
with:
- action: destroy
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
+ network-name: ${{ env.NETWORK_NAME }}
provider: digital-ocean
- testnet-name: NightlyE2E
- safe-network-user: maidsafe
- safe-network-branch: main
-
- # - name: post notification to slack on failure
- # if: ${{ failure() }}
- # uses: bryannice/gitactions-slack-notification@2.0.0
- # env:
- # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
- # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
- # SLACK_TITLE: "Nightly E2E Test Run Failed"
-
- spend_test:
- name: Spend tests against network
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os: [ubuntu-latest]
- steps:
- - uses: actions/checkout@v4
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
-
- - uses: Swatinem/rust-cache@v2
- continue-on-error: true
-
- - name: Build testing executable
- run: cargo test --release -p sn_node --features=local-discovery --test sequential_transfers --test storage_payments --test double_spend --test spend_simulation --no-run
- timeout-minutes: 40
-
- - name: Start a WAN network
- uses: maidsafe/sn-testnet-action@main
- with:
- action: create
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- security-group-id: sg-0d47df5b3f0d01e2a
- subnet-id: subnet-018f2ab26755df7f9
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlySpendTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- - name: Check env variables
- shell: bash
- run: |
- echo "Peer is $SAFE_PEERS"
- echo "Deployment inventory is $SN_INVENTORY"
- - name: execute the sequential transfers test
- run: cargo test --release -p sn_node --test sequential_transfers -- --nocapture --test-threads=1
+ - name: post notification to slack on failure
+ if: ${{ failure() }}
+ uses: bryannice/gitactions-slack-notification@2.0.0
env:
- SN_LOG: "all"
- timeout-minutes: 45
-
- - name: execute the storage payment tests
- run: cargo test --release -p sn_node --test storage_payments -- --nocapture --test-threads=1
- env:
- SN_LOG: "all"
- timeout-minutes: 45
-
- - name: execute the double spend tests
- run: cargo test --release -p sn_node --test double_spend -- --nocapture --test-threads=1
- timeout-minutes: 45
-
-
- - name: execute the spend simulation tests
- run: cargo test --release -p sn_node --test spend_simulation -- --nocapture --test-threads=1
- timeout-minutes: 45
-
- - name: Small wait to allow reward receipt
- run: sleep 30
- timeout-minutes: 1
-
- - name: Fetch network logs
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: logs
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlySpendTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- - name: Upload local logs
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: local_logs_NightlySpendTest
- path: |
- ~/.local/share/safe/node/*/logs/*.log*
- ~/.local/share/safe/*/*/*.log*
- ~/.local/share/safe/client/logs/*/*.log*
-
- - name: Stop the WAN network
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: destroy
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlySpendTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- # - name: post notification to slack on failure
- # if: ${{ failure() }}
- # uses: bryannice/gitactions-slack-notification@2.0.0
- # env:
- # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
- # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
- # SLACK_TITLE: "Nightly Spend Test Run Failed"
-
- churn:
- name: Network churning tests
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- include:
- - os: ubuntu-latest
- wan_logs_path: /home/runner/sn-testnet-deploy/logs
- local_safe_path: /home/runner/.local/share/safe
- # - os: windows-latest
- # node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node
- # safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
- # - os: macos-latest
- # node_data_path: /Users/runner/Library/Application Support/safe/node
- # safe_path: /Users/runner/Library/Application Support/safe
- steps:
- - uses: actions/checkout@v4
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
-
- - name: install ripgrep
- run: sudo apt-get -y install ripgrep
-
- - uses: Swatinem/rust-cache@v2
- continue-on-error: true
-
- - name: Build churn tests
- run: cargo test --release -p sn_node --test data_with_churn --no-run
- timeout-minutes: 30
-
- - name: Start a WAN network
- uses: maidsafe/sn-testnet-action@main
- with:
- action: create
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- security-group-id: sg-0d47df5b3f0d01e2a
- subnet-id: subnet-018f2ab26755df7f9
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlyChurnTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- - name: Check env variables
- shell: bash
- run: |
- echo "Peer is $SAFE_PEERS"
- echo "Deployment inventory is $SN_INVENTORY"
-
- - name: Chunks data integrity during nodes churn
- run: cargo test --release -p sn_node --test data_with_churn -- --nocapture
- env:
- # TEST_DURATION_MINS: 60
- # TEST_CHURN_CYCLES: 6
- # SN_LOG: "all"
- # todo: lower time for testing
- TEST_DURATION_MINS: 10
- TEST_CHURN_CYCLES: 2
- SN_LOG: "all"
- timeout-minutes: 90
-
- - name: Fetch network logs
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: logs
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlyChurnTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- - name: Upload local logs
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: local_logs_NightlyChurnTest
- path: |
- ~/.local/share/safe/node/*/logs/*.log*
- ~/.local/share/safe/*/*/*.log*
- ~/.local/share/safe/client/logs/*/*.log*
-
- - name: Stop the WAN network
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: destroy
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlyChurnTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- # TODO: re-enable the below scripts once we have proper way to restart nodes.
- # Currently on remote network (not local), the nodes do not handle restart RPC cmd well. They reuse the same
- # log location and the logs are over written. Hence the scripts might give false outputs.
-
- # - name: Verify restart of nodes using rg
- # shell: bash
- # timeout-minutes: 1
- # # get the counts, then the specific line, and then the digit count only
- # # then check we have an expected level of restarts
- # # TODO: make this use an env var, or relate to testnet size
- # run : |
- # restart_count=$(rg "Node is restarting in" "${{ matrix.wan_logs_path }}" -c --stats | \
- # rg "(\d+) matches" | rg "\d+" -o)
- # echo "Restart $restart_count nodes"
- # peer_removed=$(rg "PeerRemovedFromRoutingTable" "${{ matrix.wan_logs_path }}" -c --stats | \
- # rg "(\d+) matches" | rg "\d+" -o)
- # echo "PeerRemovedFromRoutingTable $peer_removed times"
- # if [ $peer_removed -lt $restart_count ]; then
- # echo "PeerRemovedFromRoutingTable times of: $peer_removed is less than the restart count of: $restart_count"
- # exit 1
- # fi
- # node_count=$(find "${{ matrix.wan_logs_path }}" -type d | awk -F/ 'NF==9' | grep -E "/12D3KooW" | wc -l)
- # echo "Node dir count is $node_count"
- # # TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here
-
- # # if [ $restart_count -lt $node_count ]; then
- # # echo "Restart count of: $restart_count is less than the node count of: $node_count"
- # # exit 1
- # # fi
-
- # - name: Verify data replication using rg
- # shell: bash
- # timeout-minutes: 1
- # # get the counts, then the specific line, and then the digit count only
- # # then check we have an expected level of replication
- # # TODO: make this use an env var, or relate to testnet size
- # run : |
- # fetching_attempt_count=$(rg "FetchingKeysForReplication" "${{ matrix.wan_logs_path }}" -c --stats | \
- # rg "(\d+) matches" | rg "\d+" -o)
- # echo "Carried out $fetching_attempt_count fetching attempts"
- # node_count=$(find "${{ matrix.wan_logs_path }}" -type d | awk -F/ 'NF==9' | grep -E "/12D3KooW" | wc -l)
- # if [ $fetching_attempt_count -lt $node_count ]; then
- # echo "Replication fetching attempts of: $fetching_attempt_count is less than the node count of: $node_count"
- # exit 1
- # fi
-
- # Only error out after uploading the logs
- - name: Don't log raw data
- if: always() && matrix.os != 'windows-latest' # causes error
- shell: bash
- timeout-minutes: 10
- run: |
- if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 15000 { print; exit 1 }'
- then
- echo "We are logging an extremely large data"
- exit 1
- fi
- # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
- #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
- if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 15000 { print; exit 1 }'
- then
- echo "We are logging an extremely large data"
- exit 1
- fi
-
- # sanity check
- if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 1000 { print; exit 1 }'
- then
- echo "Sanity check pass for local safe path"
- fi
- # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
- #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
- if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 1000 { print; exit 1 }'
- then
- echo "Sanity check pass for wan logs path"
- fi
-
- # - name: post notification to slack on failure
- # if: ${{ failure() }}
- # uses: bryannice/gitactions-slack-notification@2.0.0
- # env:
- # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
- # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
- # SLACK_TITLE: "Nightly Churn Test Run Failed"
-
- verify_data_location_routing_table:
- name: Verify data location and Routing Table
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- include:
- - os: ubuntu-latest
- wan_logs_path: /home/runner/sn-testnet-deploy/logs
- local_safe_path: /home/runner/.local/share/safe
- # - os: windows-latest
- # node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node
- # safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
- # - os: macos-latest
- # node_data_path: /Users/runner/Library/Application Support/safe/node
- # safe_path: /Users/runner/Library/Application Support/safe
- steps:
- - uses: actions/checkout@v4
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
-
- - name: install ripgrep
- run: sudo apt-get -y install ripgrep
-
- - uses: Swatinem/rust-cache@v2
- continue-on-error: true
-
- - name: Build data location and routing table tests
- run: cargo test --release -p sn_node --test verify_data_location --test verify_routing_table --no-run
- timeout-minutes: 30
-
- - name: Start a WAN network
- uses: maidsafe/sn-testnet-action@main
- with:
- action: create
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- security-group-id: sg-0d47df5b3f0d01e2a
- subnet-id: subnet-018f2ab26755df7f9
- node-count: 20
- vm-count: 1
- testnet-name: NightlyDataLocationTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- - name: Check env variables
- shell: bash
- run: |
- echo "Peer is $SAFE_PEERS"
- echo "Deployment inventory is $SN_INVENTORY"
-
- - name: Verify the Routing table of the nodes
- run: cargo test --release -p sn_node --test verify_routing_table -- --nocapture
- timeout-minutes: 5
-
- - name: Verify the location of the data on the network
- run: cargo test --release -p sn_node --test verify_data_location -- --nocapture
- env:
- SN_LOG: "all"
- timeout-minutes: 90
-
- - name: Verify the routing tables of the nodes
- run: cargo test --release -p sn_node --test verify_routing_table -- --nocapture
- timeout-minutes: 5
-
- - name: Fetch network logs
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: logs
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlyDataLocationTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- - name: Upload local logs
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: local_logs_NightlyDataLocationTest
- path: |
- ~/.local/share/safe/node/*/logs/*.log*
- ~/.local/share/safe/*/*/*.log*
- ~/.local/share/safe/client/logs/*/*.log*
-
- - name: Stop the WAN network
- if: always()
- uses: maidsafe/sn-testnet-action@main
- with:
- action: destroy
- re-attempts: 3
- rust-log: debug
- ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
- aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
- aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
- aws-region: eu-west-2
- do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
- ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
- node-count: 20
- vm-count: 1
- provider: digital-ocean
- testnet-name: NightlyDataLocationTest
- safe-network-user: maidsafe
- safe-network-branch: main
-
- # TODO: re-enable the below scripts once we have proper way to restart nodes.
- # Currently on remote network (not local), the nodes do not handle restart RPC cmd well. They reuse the same
- # log location and the logs are over written. Hence the scripts might give false outputs.
-
- # - name: Verify restart of nodes using rg
- # shell: bash
- # timeout-minutes: 1
- # # get the counts, then the specific line, and then the digit count only
- # # then check we have an expected level of restarts
- # # TODO: make this use an env var, or relate to testnet size
- # run : |
- # restart_count=$(rg "Node is restarting in" "${{ matrix.wan_logs_path }}" -c --stats | \
- # rg "(\d+) matches" | rg "\d+" -o)
- # echo "Restart $restart_count nodes"
- # peer_removed=$(rg "PeerRemovedFromRoutingTable" "${{ matrix.wan_logs_path }}" -c --stats | \
- # rg "(\d+) matches" | rg "\d+" -o)
- # echo "PeerRemovedFromRoutingTable $peer_removed times"
- # if [ $peer_removed -lt $restart_count ]; then
- # echo "PeerRemovedFromRoutingTable times of: $peer_removed is less than the restart count of: $restart_count"
- # exit 1
- # fi
- # node_count=$(find "${{ matrix.wan_logs_path }}" -type d | awk -F/ 'NF==9' | grep -E "/12D3KooW" | wc -l)
- # echo "Node dir count is $node_count"
- # # TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here
-
- # # if [ $restart_count -lt $node_count ]; then
- # # echo "Restart count of: $restart_count is less than the node count of: $node_count"
- # # exit 1
- # # fi
-
- # Only error out after uploading the logs
- - name: Don't log raw data
- if: always() && matrix.os != 'windows-latest' # causes error
- shell: bash
- timeout-minutes: 10
- run: |
- if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 15000 { print; exit 1 }'
- then
- echo "We are logging an extremely large data"
- exit 1
- fi
- # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
- #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
- if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 15000 { print; exit 1 }'
- then
- echo "We are logging an extremely large data"
- exit 1
- fi
-
- # sanity check
- if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 1000 { print; exit 1 }'
- then
- echo "Sanity check pass for local safe path"
- fi
- # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
- #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
- if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 1000 { print; exit 1 }'
- then
- echo echo "Sanity check pass for wan logs path"
- fi
-
- # - name: post notification to slack on failure
- # if: ${{ failure() }}
- # uses: bryannice/gitactions-slack-notification@2.0.0
- # env:
- # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
- # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
- # SLACK_TITLE: "Nightly Data Location Test Run Failed"
+ SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
+ SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
+ SLACK_TITLE: "Nightly E2E Test Run Failed"
+
+ # spend_test:
+ # name: Spend tests against network
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # matrix:
+ # os: [ubuntu-latest]
+ # steps:
+ # - uses: actions/checkout@v4
+
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+
+ # - uses: Swatinem/rust-cache@v2
+ # continue-on-error: true
+
+ # - name: Build testing executable
+ # run: cargo test --release -p sn_node --features=local --test sequential_transfers --test storage_payments --test double_spend --test spend_simulation --no-run
+ # timeout-minutes: 40
+
+ # - name: setup testnet-deploy
+ # uses: maidsafe/sn-testnet-control-action/init-testnet-deploy@main
+ # with:
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+
+ # - name: launch ${{ env.NETWORK_NAME }}
+ # uses: maidsafe/sn-testnet-control-action/launch-network@main
+ # with:
+ # ansible-forks: ${{ env.ANSIBLE_FORKS }}
+ # beta-encryption-key: ${{ env.DEFAULT_PAYMENT_FORWARD_SK }}
+ # environment-type: development
+ # faucet-version: ${{ env.FAUCET_VERSION }}
+ # log-format: json
+ # network-name: ${{ env.NETWORK_NAME }}
+ # network-contacts-file-name: ${{ env.NETWORK_CONTACTS_FILE_NAME }}
+ # provider: digital-ocean
+ # safe-network-branch: main
+ # safe-network-user: maidsafe
+
+ # - name: Check env variables
+ # shell: bash
+ # run: |
+ # echo "Peer is $SAFE_PEERS"
+ # echo "Deployment inventory is $SN_INVENTORY"
+
+ # - name: execute the sequential transfers test
+ # run: cargo test --release -p sn_node --test sequential_transfers -- --nocapture --test-threads=1
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 45
+
+ # - name: execute the storage payment tests
+ # run: cargo test --release -p sn_node --test storage_payments -- --nocapture --test-threads=1
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 45
+
+ # - name: execute the double spend tests
+ # run: cargo test --release -p sn_node --test double_spend -- --nocapture --test-threads=1
+ # timeout-minutes: 45
+
+ # - name: execute the spend simulation tests
+ # run: cargo test --release -p sn_node --test spend_simulation -- --nocapture --test-threads=1
+ # timeout-minutes: 45
+
+ # - name: Small wait to allow reward receipt
+ # run: sleep 30
+ # timeout-minutes: 1
+
+ # - name: Fetch network logs
+ # uses: ermineJose/sn-testnet-control-action/fetch-logs@feat-add_fetch-logs-action
+ # with:
+ # re-attempts: 3
+ # rust-log: debug
+ # provider: digital-ocean
+ # testnet-name: ${{ env.NETWORK_NAME }}
+
+ # - name: Upload local logs
+ # if: always()
+ # uses: actions/upload-artifact@v4
+ # with:
+ # name: local_logs_NightlySpendTest
+ # path: |
+ # ~/.local/share/safe/node/*/logs/*.log*
+ # ~/.local/share/safe/*/*/*.log*
+ # ~/.local/share/safe/client/logs/*/*.log*
+
+ # - name: destroy network
+ # uses: maidsafe/sn-testnet-control-action/destroy-network@main
+ # with:
+ # network-name: ${{ env.NETWORK_NAME }}
+ # provider: digital-ocean
+
+ # - name: post notification to slack on failure
+ # if: ${{ failure() }}
+ # uses: bryannice/gitactions-slack-notification@2.0.0
+ # env:
+ # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
+ # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
+ # SLACK_TITLE: "Nightly Spend Test Run Failed"
+
+ # churn:
+ # name: Network churning tests
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # matrix:
+ # include:
+ # - os: ubuntu-latest
+ # wan_logs_path: /home/runner/sn-testnet-deploy/logs
+ # local_safe_path: /home/runner/.local/share/safe
+ # # - os: windows-latest
+ # # node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node
+ # # safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
+ # # - os: macos-latest
+ # # node_data_path: /Users/runner/Library/Application Support/safe/node
+ # # safe_path: /Users/runner/Library/Application Support/safe
+ # steps:
+ # - uses: actions/checkout@v4
+ #
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ #
+ # - name: install ripgrep
+ # run: sudo apt-get -y install ripgrep
+ #
+ # - uses: Swatinem/rust-cache@v2
+ # continue-on-error: true
+ #
+ # - name: Build churn tests
+ # run: cargo test --release -p sn_node --test data_with_churn --no-run
+ # timeout-minutes: 30
+ #
+ # - name: Start a WAN network
+ # uses: maidsafe/sn-testnet-action@main
+ # with:
+ # action: create
+ # re-attempts: 3
+ # rust-log: debug
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+ # security-group-id: sg-0d47df5b3f0d01e2a
+ # subnet-id: subnet-018f2ab26755df7f9
+ # node-count: 20
+ # vm-count: 1
+ # provider: digital-ocean
+ # testnet-name: NightlyChurnTest
+ # safe-network-user: maidsafe
+ # safe-network-branch: main
+ #
+ # - name: Check env variables
+ # shell: bash
+ # run: |
+ # echo "Peer is $SAFE_PEERS"
+ # echo "Deployment inventory is $SN_INVENTORY"
+ #
+ # - name: Chunks data integrity during nodes churn
+ # run: cargo test --release -p sn_node --test data_with_churn -- --nocapture
+ # env:
+ # # TEST_DURATION_MINS: 60
+ # # TEST_CHURN_CYCLES: 6
+ # # SN_LOG: "all"
+ # # todo: lower time for testing
+ # TEST_DURATION_MINS: 10
+ # TEST_CHURN_CYCLES: 2
+ # SN_LOG: "all"
+ # timeout-minutes: 90
+ #
+ # - name: Fetch network logs
+ # if: always()
+ # uses: maidsafe/sn-testnet-action@main
+ # with:
+ # action: logs
+ # re-attempts: 3
+ # rust-log: debug
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+ # node-count: 20
+ # vm-count: 1
+ # provider: digital-ocean
+ # testnet-name: NightlyChurnTest
+ # safe-network-user: maidsafe
+ # safe-network-branch: main
+ #
+ # - name: Upload local logs
+ # if: always()
+ # uses: actions/upload-artifact@v4
+ # with:
+ # name: local_logs_NightlyChurnTest
+ # path: |
+ # ~/.local/share/safe/node/*/logs/*.log*
+ # ~/.local/share/safe/*/*/*.log*
+ # ~/.local/share/safe/client/logs/*/*.log*
+ #
+ # - name: Stop the WAN network
+ # if: always()
+ # uses: maidsafe/sn-testnet-action@main
+ # with:
+ # action: destroy
+ # re-attempts: 3
+ # rust-log: debug
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+ # node-count: 20
+ # vm-count: 1
+ # provider: digital-ocean
+ # testnet-name: NightlyChurnTest
+ # safe-network-user: maidsafe
+ # safe-network-branch: main
+ #
+ # # TODO: re-enable the below scripts once we have proper way to restart nodes.
+ # # Currently on remote network (not local), the nodes do not handle restart RPC cmd well. They reuse the same
+ # # log location and the logs are over written. Hence the scripts might give false outputs.
+ #
+ # # - name: Verify restart of nodes using rg
+ # # shell: bash
+ # # timeout-minutes: 1
+ # # # get the counts, then the specific line, and then the digit count only
+ # # # then check we have an expected level of restarts
+ # # # TODO: make this use an env var, or relate to testnet size
+ # # run : |
+ # # restart_count=$(rg "Node is restarting in" "${{ matrix.wan_logs_path }}" -c --stats | \
+ # # rg "(\d+) matches" | rg "\d+" -o)
+ # # echo "Restart $restart_count nodes"
+ # # peer_removed=$(rg "PeerRemovedFromRoutingTable" "${{ matrix.wan_logs_path }}" -c --stats | \
+ # # rg "(\d+) matches" | rg "\d+" -o)
+ # # echo "PeerRemovedFromRoutingTable $peer_removed times"
+ # # if [ $peer_removed -lt $restart_count ]; then
+ # # echo "PeerRemovedFromRoutingTable times of: $peer_removed is less than the restart count of: $restart_count"
+ # # exit 1
+ # # fi
+ # # node_count=$(find "${{ matrix.wan_logs_path }}" -type d | awk -F/ 'NF==9' | grep -E "/12D3KooW" | wc -l)
+ # # echo "Node dir count is $node_count"
+ # # # TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here
+ #
+ # # # if [ $restart_count -lt $node_count ]; then
+ # # # echo "Restart count of: $restart_count is less than the node count of: $node_count"
+ # # # exit 1
+ # # # fi
+ #
+ # # - name: Verify data replication using rg
+ # # shell: bash
+ # # timeout-minutes: 1
+ # # # get the counts, then the specific line, and then the digit count only
+ # # # then check we have an expected level of replication
+ # # # TODO: make this use an env var, or relate to testnet size
+ # # run : |
+ # # fetching_attempt_count=$(rg "FetchingKeysForReplication" "${{ matrix.wan_logs_path }}" -c --stats | \
+ # # rg "(\d+) matches" | rg "\d+" -o)
+ # # echo "Carried out $fetching_attempt_count fetching attempts"
+ # # node_count=$(find "${{ matrix.wan_logs_path }}" -type d | awk -F/ 'NF==9' | grep -E "/12D3KooW" | wc -l)
+ # # if [ $fetching_attempt_count -lt $node_count ]; then
+ # # echo "Replication fetching attempts of: $fetching_attempt_count is less than the node count of: $node_count"
+ # # exit 1
+ # # fi
+ #
+ # # Only error out after uploading the logs
+ # - name: Don't log raw data
+ # if: always() && matrix.os != 'windows-latest' # causes error
+ # shell: bash
+ # timeout-minutes: 10
+ # run: |
+ # if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 15000 { print; exit 1 }'
+ # then
+ # echo "We are logging an extremely large data"
+ # exit 1
+ # fi
+ # # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
+ # #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
+ # if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 15000 { print; exit 1 }'
+ # then
+ # echo "We are logging an extremely large data"
+ # exit 1
+ # fi
+ #
+ # # sanity check
+ # if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 1000 { print; exit 1 }'
+ # then
+ # echo "Sanity check pass for local safe path"
+ # fi
+ # # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
+ # #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
+ # if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 1000 { print; exit 1 }'
+ # then
+ # echo "Sanity check pass for wan logs path"
+ # fi
+ #
+ # # - name: post notification to slack on failure
+ # # if: ${{ failure() }}
+ # # uses: bryannice/gitactions-slack-notification@2.0.0
+ # # env:
+ # # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
+ # # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
+ # # SLACK_TITLE: "Nightly Churn Test Run Failed"
+ #
+ # verify_data_location_routing_table:
+ # name: Verify data location and Routing Table
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # matrix:
+ # include:
+ # - os: ubuntu-latest
+ # wan_logs_path: /home/runner/sn-testnet-deploy/logs
+ # local_safe_path: /home/runner/.local/share/safe
+ # # - os: windows-latest
+ # # node_data_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe\\node
+ # # safe_path: C:\\Users\\runneradmin\\AppData\\Roaming\\safe
+ # # - os: macos-latest
+ # # node_data_path: /Users/runner/Library/Application Support/safe/node
+ # # safe_path: /Users/runner/Library/Application Support/safe
+ # steps:
+ # - uses: actions/checkout@v4
+ #
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ #
+ # - name: install ripgrep
+ # run: sudo apt-get -y install ripgrep
+ #
+ # - uses: Swatinem/rust-cache@v2
+ # continue-on-error: true
+ #
+ # - name: Build data location and routing table tests
+ # run: cargo test --release -p sn_node --test verify_data_location --test verify_routing_table --no-run
+ # timeout-minutes: 30
+ #
+ # - name: Start a WAN network
+ # uses: maidsafe/sn-testnet-action@main
+ # with:
+ # action: create
+ # re-attempts: 3
+ # rust-log: debug
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+ # security-group-id: sg-0d47df5b3f0d01e2a
+ # subnet-id: subnet-018f2ab26755df7f9
+ # node-count: 20
+ # vm-count: 1
+ # testnet-name: NightlyDataLocationTest
+ # safe-network-user: maidsafe
+ # safe-network-branch: main
+ #
+ # - name: Check env variables
+ # shell: bash
+ # run: |
+ # echo "Peer is $SAFE_PEERS"
+ # echo "Deployment inventory is $SN_INVENTORY"
+ #
+ # - name: Verify the Routing table of the nodes
+ # run: cargo test --release -p sn_node --test verify_routing_table -- --nocapture
+ # timeout-minutes: 5
+ #
+ # - name: Verify the location of the data on the network
+ # run: cargo test --release -p sn_node --test verify_data_location -- --nocapture
+ # env:
+ # SN_LOG: "all"
+ # timeout-minutes: 90
+ #
+ # - name: Verify the routing tables of the nodes
+ # run: cargo test --release -p sn_node --test verify_routing_table -- --nocapture
+ # timeout-minutes: 5
+ #
+ # - name: Fetch network logs
+ # if: always()
+ # uses: maidsafe/sn-testnet-action@main
+ # with:
+ # action: logs
+ # re-attempts: 3
+ # rust-log: debug
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+ # node-count: 20
+ # vm-count: 1
+ # provider: digital-ocean
+ # testnet-name: NightlyDataLocationTest
+ # safe-network-user: maidsafe
+ # safe-network-branch: main
+ #
+ # - name: Upload local logs
+ # if: always()
+ # uses: actions/upload-artifact@v4
+ # with:
+ # name: local_logs_NightlyDataLocationTest
+ # path: |
+ # ~/.local/share/safe/node/*/logs/*.log*
+ # ~/.local/share/safe/*/*/*.log*
+ # ~/.local/share/safe/client/logs/*/*.log*
+ #
+ # - name: Stop the WAN network
+ # if: always()
+ # uses: maidsafe/sn-testnet-action@main
+ # with:
+ # action: destroy
+ # re-attempts: 3
+ # rust-log: debug
+ # ansible-vault-password: ${{ secrets.SN_TESTNET_ANSIBLE_VAULT_PASSWORD }}
+ # aws-access-key-id: ${{ secrets.SN_TESTNET_AWS_ACCESS_KEY_ID }}
+ # aws-access-key-secret: ${{ secrets.SN_TESTNET_AWS_SECRET_ACCESS_KEY }}
+ # aws-region: eu-west-2
+ # do-token: ${{ secrets.SN_TESTNET_DO_PAT }}
+ # ssh-secret-key: ${{ secrets.SN_TESTNET_SSH_KEY }}
+ # node-count: 20
+ # vm-count: 1
+ # provider: digital-ocean
+ # testnet-name: NightlyDataLocationTest
+ # safe-network-user: maidsafe
+ # safe-network-branch: main
+ #
+ # # TODO: re-enable the below scripts once we have proper way to restart nodes.
+ # # Currently on remote network (not local), the nodes do not handle restart RPC cmd well. They reuse the same
+ # # log location and the logs are over written. Hence the scripts might give false outputs.
+ #
+ # # - name: Verify restart of nodes using rg
+ # # shell: bash
+ # # timeout-minutes: 1
+ # # # get the counts, then the specific line, and then the digit count only
+ # # # then check we have an expected level of restarts
+ # # # TODO: make this use an env var, or relate to testnet size
+ # # run : |
+ # # restart_count=$(rg "Node is restarting in" "${{ matrix.wan_logs_path }}" -c --stats | \
+ # # rg "(\d+) matches" | rg "\d+" -o)
+ # # echo "Restart $restart_count nodes"
+ # # peer_removed=$(rg "PeerRemovedFromRoutingTable" "${{ matrix.wan_logs_path }}" -c --stats | \
+ # # rg "(\d+) matches" | rg "\d+" -o)
+ # # echo "PeerRemovedFromRoutingTable $peer_removed times"
+ # # if [ $peer_removed -lt $restart_count ]; then
+ # # echo "PeerRemovedFromRoutingTable times of: $peer_removed is less than the restart count of: $restart_count"
+ # # exit 1
+ # # fi
+ # # node_count=$(find "${{ matrix.wan_logs_path }}" -type d | awk -F/ 'NF==9' | grep -E "/12D3KooW" | wc -l)
+ # # echo "Node dir count is $node_count"
+ # # # TODO: reenable this once the testnet dir creation is tidied up to avoid a large count here
+ #
+ # # # if [ $restart_count -lt $node_count ]; then
+ # # # echo "Restart count of: $restart_count is less than the node count of: $node_count"
+ # # # exit 1
+ # # # fi
+ #
+ # # Only error out after uploading the logs
+ # - name: Don't log raw data
+ # if: always() && matrix.os != 'windows-latest' # causes error
+ # shell: bash
+ # timeout-minutes: 10
+ # run: |
+ # if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 15000 { print; exit 1 }'
+ # then
+ # echo "We are logging an extremely large data"
+ # exit 1
+ # fi
+ # # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
+ # #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
+ # if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 15000 { print; exit 1 }'
+ # then
+ # echo "We are logging an extremely large data"
+ # exit 1
+ # fi
+ #
+ # # sanity check
+ # if ! rg '^' "${{ matrix.local_safe_path }}"/client/logs | awk 'length($0) > 1000 { print; exit 1 }'
+ # then
+ # echo "Sanity check pass for local safe path"
+ # fi
+ # # node dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/safenode1/safenode.log
+ # #faucet dir structure: ~/sn-testnet-deploy/logs/NightlyChurnTest/NightlyChurnTest-genesis/faucet/logs/faucet.log
+ # if ! rg '^' "${{ matrix.wan_logs_path }}"/*/*/*/ | awk 'length($0) > 1000 { print; exit 1 }'
+ # then
+ # echo echo "Sanity check pass for wan logs path"
+ # fi
+ #
+ # # - name: post notification to slack on failure
+ # # if: ${{ failure() }}
+ # # uses: bryannice/gitactions-slack-notification@2.0.0
+ # # env:
+ # # SLACK_INCOMING_WEBHOOK: ${{ secrets.SLACK_GH_ACTIONS_WEBHOOK_URL }}
+ # # SLACK_MESSAGE: "Please check the logs for the run at ${{ env.WORKFLOW_URL }}/${{ github.run_id }}"
+ # # SLACK_TITLE: "Nightly Data Location Test Run Failed"
diff --git a/.github/workflows/node_man_tests.yml b/.github/workflows/node_man_tests.yml
index ea49a67372..55cd701cbf 100644
--- a/.github/workflows/node_man_tests.yml
+++ b/.github/workflows/node_man_tests.yml
@@ -35,122 +35,122 @@ jobs:
- shell: bash
run: cargo test --lib --package sn-node-manager
- node-manager-user-mode-e2e-tests:
- name: user-mode e2e
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- include:
- - { os: ubuntu-latest }
- - { os: macos-latest }
- steps:
- - uses: actions/checkout@v4
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
- - uses: Swatinem/rust-cache@v2
-
- - name: Build binaries
- run: cargo build --release --bin safenode --bin faucet
- timeout-minutes: 30
-
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ${{ matrix.os }}
- build: true
-
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
-
- - shell: bash
- run: |
- cargo test --package sn-node-manager --release --test e2e -- --nocapture
-
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: node_man_tests_user_mode
- platform: ${{ matrix.os }}
-
- node-manager-e2e-tests:
- name: system-wide e2e
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- include:
- - { os: ubuntu-latest, elevated: sudo -E env PATH="$PATH" }
- - { os: macos-latest, elevated: sudo -E }
- - { os: windows-latest }
- steps:
- - uses: actions/checkout@v4
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@stable
- - uses: Swatinem/rust-cache@v2
-
- - name: Build binaries
- run: cargo build --release --bin safenode --bin faucet
- timeout-minutes: 30
-
- - name: Start a local network
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: start
- interval: 2000
- node-path: target/release/safenode
- faucet-path: target/release/faucet
- platform: ${{ matrix.os }}
- build: true
-
- - name: Check SAFE_PEERS was set
- shell: bash
- run: |
- if [[ -z "$SAFE_PEERS" ]]; then
- echo "The SAFE_PEERS variable has not been set"
- exit 1
- else
- echo "SAFE_PEERS has been set to $SAFE_PEERS"
- fi
-
- - shell: bash
- if: matrix.os == 'ubuntu-latest' || matrix.os == 'macos-latest'
- run: |
- ${{ matrix.elevated }} rustup default stable
- ${{ matrix.elevated }} cargo test --package sn-node-manager --release --test e2e -- --nocapture
-
- # Powershell step runs as admin by default.
- - name: run integration test in powershell
- if: matrix.os == 'windows-latest'
- shell: pwsh
- run: |
- curl -L -o WinSW.exe $env:WINSW_URL
-
- New-Item -ItemType Directory -Force -Path "$env:GITHUB_WORKSPACE\bin"
- Move-Item -Path WinSW.exe -Destination "$env:GITHUB_WORKSPACE\bin"
- $env:PATH += ";$env:GITHUB_WORKSPACE\bin"
-
- cargo test --release --package sn-node-manager --test e2e -- --nocapture
-
- - name: Stop the local network and upload logs
- if: always()
- uses: maidsafe/sn-local-testnet-action@main
- with:
- action: stop
- log_file_prefix: node_man_tests_system_wide
- platform: ${{ matrix.os }}
+ # node-manager-user-mode-e2e-tests:
+ # name: user-mode e2e
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # fail-fast: false
+ # matrix:
+ # include:
+ # - { os: ubuntu-latest }
+ # - { os: macos-latest }
+ # steps:
+ # - uses: actions/checkout@v4
+
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ # - uses: Swatinem/rust-cache@v2
+
+ # - name: Build binaries
+ # run: cargo build --release --bin safenode --bin faucet
+ # timeout-minutes: 30
+
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ${{ matrix.os }}
+ # build: true
+
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
+
+ # - shell: bash
+ # run: |
+ # cargo test --package sn-node-manager --release --test e2e -- --nocapture
+
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # log_file_prefix: node_man_tests_user_mode
+ # platform: ${{ matrix.os }}
+
+ # node-manager-e2e-tests:
+ # name: system-wide e2e
+ # runs-on: ${{ matrix.os }}
+ # strategy:
+ # fail-fast: false
+ # matrix:
+ # include:
+ # - { os: ubuntu-latest, elevated: sudo -E env PATH="$PATH" }
+ # - { os: macos-latest, elevated: sudo -E }
+ # - { os: windows-latest }
+ # steps:
+ # - uses: actions/checkout@v4
+
+ # - name: Install Rust
+ # uses: dtolnay/rust-toolchain@stable
+ # - uses: Swatinem/rust-cache@v2
+
+ # - name: Build binaries
+ # run: cargo build --release --bin safenode --bin faucet
+ # timeout-minutes: 30
+
+ # - name: Start a local network
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: start
+ # interval: 2000
+ # node-path: target/release/safenode
+ # faucet-path: target/release/faucet
+ # platform: ${{ matrix.os }}
+ # build: true
+
+ # - name: Check SAFE_PEERS was set
+ # shell: bash
+ # run: |
+ # if [[ -z "$SAFE_PEERS" ]]; then
+ # echo "The SAFE_PEERS variable has not been set"
+ # exit 1
+ # else
+ # echo "SAFE_PEERS has been set to $SAFE_PEERS"
+ # fi
+
+ # - shell: bash
+ # if: matrix.os == 'ubuntu-latest' || matrix.os == 'macos-latest'
+ # run: |
+ # ${{ matrix.elevated }} rustup default stable
+ # ${{ matrix.elevated }} cargo test --package sn-node-manager --release --test e2e -- --nocapture
+
+ # # Powershell step runs as admin by default.
+ # - name: run integration test in powershell
+ # if: matrix.os == 'windows-latest'
+ # shell: pwsh
+ # run: |
+ # curl -L -o WinSW.exe $env:WINSW_URL
+
+ # New-Item -ItemType Directory -Force -Path "$env:GITHUB_WORKSPACE\bin"
+ # Move-Item -Path WinSW.exe -Destination "$env:GITHUB_WORKSPACE\bin"
+ # $env:PATH += ";$env:GITHUB_WORKSPACE\bin"
+
+ # cargo test --release --package sn-node-manager --test e2e -- --nocapture
+
+ # - name: Stop the local network and upload logs
+ # if: always()
+ # uses: maidsafe/sn-local-testnet-action@main
+ # with:
+ # action: stop
+ # log_file_prefix: node_man_tests_system_wide
+ # platform: ${{ matrix.os }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 810a8dc264..0d1ecc79fd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,22 +7,31 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
*When editing this file, please respect a line length of 100.*
-## 2024-10-08
+## 2024-10-22
-### Network
+Unfortunately the entry for this release will not have fully detailed changes. This release is
+special in that it's very large and moves us to a new, EVM-based payments system. The Github Release
+description has a list of all the merged PRs. If you want more detail, consult the PR list. Normal
+service will resume for subsequent releases.
-#### Changed
+Here is a brief summary of the changes:
-- Optimize auditor tracking by not to re-attempt fetched spend.
-- Optimize auditor tracking function by using DashMap and stream.
+- A new `autonomi` CLI that uses EVM payments and replaces the previous `safe` CLI.
+- A new `autonomi` API that replaces `sn_client` with a simpler interface.
+- The node has been changed to use EVM payments.
+- The node runs without a wallet. This increases security and removes the need for forwarding.
+- Data is paid for through an EVM smart contract. Payment proofs are not linked to the original
+ data.
+- Payment royalties have been removed, resulting in less centralization and fees.
-## 2024-10-07
+## 2024-10-08
### Network
#### Changed
-- The auditor's webservice has new endpoints that allow it to be restarted or terminated
+- Optimize auditor tracking by not to re-attempt fetched spend.
+- Optimize auditor tracking function by using DashMap and stream.
## 2024-10-07
diff --git a/Cargo.lock b/Cargo.lock
index f35188f13d..81f3daed4e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -33,18 +33,6 @@ dependencies = [
"generic-array 0.14.7",
]
-[[package]]
-name = "aes"
-version = "0.7.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8"
-dependencies = [
- "cfg-if",
- "cipher 0.3.0",
- "cpufeatures",
- "opaque-debug 0.3.1",
-]
-
[[package]]
name = "aes"
version = "0.8.4"
@@ -52,7 +40,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
- "cipher 0.4.4",
+ "cipher",
"cpufeatures",
]
@@ -63,8 +51,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1"
dependencies = [
"aead",
- "aes 0.8.4",
- "cipher 0.4.4",
+ "aes",
+ "cipher",
"ctr",
"ghash",
"subtle",
@@ -77,8 +65,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae0784134ba9375416d469ec31e7c5f9fa94405049cf08c5ce5b4698be673e0d"
dependencies = [
"aead",
- "aes 0.8.4",
- "cipher 0.4.4",
+ "aes",
+ "cipher",
"ctr",
"polyval",
"subtle",
@@ -128,6 +116,533 @@ version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f"
+[[package]]
+name = "alloy"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "056f2c01b2aed86e15b43c47d109bfc8b82553dc34e66452875e51247ec31ab2"
+dependencies = [
+ "alloy-consensus",
+ "alloy-contract",
+ "alloy-core",
+ "alloy-eips",
+ "alloy-genesis",
+ "alloy-network",
+ "alloy-node-bindings",
+ "alloy-provider",
+ "alloy-rpc-client",
+ "alloy-rpc-types",
+ "alloy-serde",
+ "alloy-signer",
+ "alloy-signer-local",
+ "alloy-transport",
+ "alloy-transport-http",
+]
+
+[[package]]
+name = "alloy-chains"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "805f7a974de5804f5c053edc6ca43b20883bdd3a733b3691200ae3a4b454a2db"
+dependencies = [
+ "num_enum",
+ "strum",
+]
+
+[[package]]
+name = "alloy-consensus"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "705687d5bfd019fee57cf9e206b27b30a9a9617535d5590a02b171e813208f8e"
+dependencies = [
+ "alloy-eips",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "auto_impl",
+ "c-kzg",
+ "derive_more",
+ "serde",
+]
+
+[[package]]
+name = "alloy-contract"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "917f7d12cf3971dc8c11c9972f732b35ccb9aaaf5f28f2f87e9e6523bee3a8ad"
+dependencies = [
+ "alloy-dyn-abi",
+ "alloy-json-abi",
+ "alloy-network",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-provider",
+ "alloy-rpc-types-eth",
+ "alloy-sol-types",
+ "alloy-transport",
+ "futures",
+ "futures-util",
+ "thiserror",
+]
+
+[[package]]
+name = "alloy-core"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ce854562e7cafd5049189d0268d6e5cba05fe6c9cb7c6f8126a79b94800629c"
+dependencies = [
+ "alloy-dyn-abi",
+ "alloy-json-abi",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-sol-types",
+]
+
+[[package]]
+name = "alloy-dyn-abi"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b499852e1d0e9b8c6db0f24c48998e647c0d5762a01090f955106a7700e4611"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-primitives",
+ "alloy-sol-type-parser",
+ "alloy-sol-types",
+ "const-hex",
+ "itoa",
+ "serde",
+ "serde_json",
+ "winnow",
+]
+
+[[package]]
+name = "alloy-eip2930"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rlp",
+ "serde",
+]
+
+[[package]]
+name = "alloy-eip7702"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea59dc42102bc9a1905dc57901edc6dd48b9f38115df86c7d252acba70d71d04"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rlp",
+ "serde",
+]
+
+[[package]]
+name = "alloy-eips"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ffb906284a1e1f63c4607da2068c8197458a352d0b3e9796e67353d72a9be85"
+dependencies = [
+ "alloy-eip2930",
+ "alloy-eip7702",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "c-kzg",
+ "derive_more",
+ "once_cell",
+ "serde",
+ "sha2 0.10.8",
+]
+
+[[package]]
+name = "alloy-genesis"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8429cf4554eed9b40feec7f4451113e76596086447550275e3def933faf47ce3"
+dependencies = [
+ "alloy-primitives",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-json-abi"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a438d4486b5d525df3b3004188f9d5cd1d65cd30ecc41e5a3ccef6f6342e8af9"
+dependencies = [
+ "alloy-primitives",
+ "alloy-sol-type-parser",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "alloy-json-rpc"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8fa8a1a3c4cbd221f2b8e3693aeb328fca79a757fe556ed08e47bbbc2a70db7"
+dependencies = [
+ "alloy-primitives",
+ "alloy-sol-types",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "tracing",
+]
+
+[[package]]
+name = "alloy-network"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85fa23a6a9d612b52e402c995f2d582c25165ec03ac6edf64c861a76bc5b87cd"
+dependencies = [
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-json-rpc",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-rpc-types-eth",
+ "alloy-serde",
+ "alloy-signer",
+ "alloy-sol-types",
+ "async-trait",
+ "auto_impl",
+ "futures-utils-wasm",
+ "thiserror",
+]
+
+[[package]]
+name = "alloy-network-primitives"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "801492711d4392b2ccf5fc0bc69e299fa1aab15167d74dcaa9aab96a54f684bd"
+dependencies = [
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-primitives",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-node-bindings"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4f1334a738aa1710cb8227441b3fcc319202ce78e967ef37406940242df4a454"
+dependencies = [
+ "alloy-genesis",
+ "alloy-primitives",
+ "k256",
+ "rand 0.8.5",
+ "serde_json",
+ "tempfile",
+ "thiserror",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "alloy-primitives"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "260d3ff3bff0bb84599f032a2f2c6828180b0ea0cd41fdaf44f39cef3ba41861"
+dependencies = [
+ "alloy-rlp",
+ "bytes",
+ "cfg-if",
+ "const-hex",
+ "derive_more",
+ "hashbrown 0.14.5",
+ "hex-literal",
+ "indexmap 2.5.0",
+ "itoa",
+ "k256",
+ "keccak-asm",
+ "paste",
+ "proptest",
+ "rand 0.8.5",
+ "ruint",
+ "rustc-hash",
+ "serde",
+ "sha3 0.10.8",
+ "tiny-keccak",
+]
+
+[[package]]
+name = "alloy-provider"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fcfaa4ffec0af04e3555686b8aacbcdf7d13638133a0672749209069750f78a6"
+dependencies = [
+ "alloy-chains",
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-json-rpc",
+ "alloy-network",
+ "alloy-network-primitives",
+ "alloy-node-bindings",
+ "alloy-primitives",
+ "alloy-rpc-client",
+ "alloy-rpc-types-anvil",
+ "alloy-rpc-types-eth",
+ "alloy-signer-local",
+ "alloy-transport",
+ "alloy-transport-http",
+ "async-stream",
+ "async-trait",
+ "auto_impl",
+ "dashmap",
+ "futures",
+ "futures-utils-wasm",
+ "lru",
+ "pin-project",
+ "reqwest 0.12.7",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "tokio",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "alloy-rlp"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26154390b1d205a4a7ac7352aa2eb4f81f391399d4e2f546fb81a2f8bb383f62"
+dependencies = [
+ "alloy-rlp-derive",
+ "arrayvec",
+ "bytes",
+]
+
+[[package]]
+name = "alloy-rlp-derive"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+]
+
+[[package]]
+name = "alloy-rpc-client"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "370143ed581aace6e663342d21d209c6b2e34ee6142f7d6675adb518deeaf0dc"
+dependencies = [
+ "alloy-json-rpc",
+ "alloy-primitives",
+ "alloy-transport",
+ "alloy-transport-http",
+ "futures",
+ "pin-project",
+ "reqwest 0.12.7",
+ "serde",
+ "serde_json",
+ "tokio",
+ "tokio-stream",
+ "tower 0.5.1",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "alloy-rpc-types"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ffc534b7919e18f35e3aa1f507b6f3d9d92ec298463a9f6beaac112809d8d06"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rpc-types-anvil",
+ "alloy-rpc-types-eth",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-rpc-types-anvil"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d780adaa5d95b07ad92006b2feb68ecfa7e2015f7d5976ceaac4c906c73ebd07"
+dependencies = [
+ "alloy-primitives",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-rpc-types-eth"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "413f4aa3ccf2c3e4234a047c5fa4727916d7daf25a89f9b765df0ba09784fd87"
+dependencies = [
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "alloy-sol-types",
+ "derive_more",
+ "itertools 0.13.0",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "alloy-serde"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9dff0ab1cdd43ca001e324dc27ee0e8606bd2161d6623c63e0e0b8c4dfc13600"
+dependencies = [
+ "alloy-primitives",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "alloy-signer"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fd4e0ad79c81a27ca659be5d176ca12399141659fef2bcbfdc848da478f4504"
+dependencies = [
+ "alloy-primitives",
+ "async-trait",
+ "auto_impl",
+ "elliptic-curve 0.13.8",
+ "k256",
+ "thiserror",
+]
+
+[[package]]
+name = "alloy-signer-local"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "494e0a256f3e99f2426f994bcd1be312c02cb8f88260088dacb33a8b8936475f"
+dependencies = [
+ "alloy-consensus",
+ "alloy-network",
+ "alloy-primitives",
+ "alloy-signer",
+ "async-trait",
+ "k256",
+ "rand 0.8.5",
+ "thiserror",
+]
+
+[[package]]
+name = "alloy-sol-macro"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68e7f6e8fe5b443f82b3f1e15abfa191128f71569148428e49449d01f6f49e8b"
+dependencies = [
+ "alloy-sol-macro-expander",
+ "alloy-sol-macro-input",
+ "proc-macro-error2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+]
+
+[[package]]
+name = "alloy-sol-macro-expander"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b96ce28d2fde09abb6135f410c41fad670a3a770b6776869bd852f1df102e6f"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-sol-macro-input",
+ "const-hex",
+ "heck 0.5.0",
+ "indexmap 2.5.0",
+ "proc-macro-error2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+ "syn-solidity",
+ "tiny-keccak",
+]
+
+[[package]]
+name = "alloy-sol-macro-input"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "906746396a8296537745711630d9185746c0b50c033d5e9d18b0a6eba3d53f90"
+dependencies = [
+ "alloy-json-abi",
+ "const-hex",
+ "dunce",
+ "heck 0.5.0",
+ "proc-macro2",
+ "quote",
+ "serde_json",
+ "syn 2.0.77",
+ "syn-solidity",
+]
+
+[[package]]
+name = "alloy-sol-type-parser"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc85178909a49c8827ffccfc9103a7ce1767ae66a801b69bdc326913870bf8e6"
+dependencies = [
+ "serde",
+ "winnow",
+]
+
+[[package]]
+name = "alloy-sol-types"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d86a533ce22525969661b25dfe296c112d35eb6861f188fd284f8bd4bb3842ae"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-primitives",
+ "alloy-sol-macro",
+ "const-hex",
+ "serde",
+]
+
+[[package]]
+name = "alloy-transport"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ac3e97dad3d31770db0fc89bd6a63b789fbae78963086733f960cf32c483904"
+dependencies = [
+ "alloy-json-rpc",
+ "base64 0.22.1",
+ "futures-util",
+ "futures-utils-wasm",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "tokio",
+ "tower 0.5.1",
+ "tracing",
+ "url",
+ "wasm-bindgen-futures",
+]
+
+[[package]]
+name = "alloy-transport-http"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b367dcccada5b28987c2296717ee04b9a5637aacd78eacb1726ef211678b5212"
+dependencies = [
+ "alloy-json-rpc",
+ "alloy-transport",
+ "reqwest 0.12.7",
+ "serde_json",
+ "tower 0.5.1",
+ "tracing",
+ "url",
+]
+
[[package]]
name = "android-tzdata"
version = "0.1.1"
@@ -210,6 +725,130 @@ version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
+[[package]]
+name = "ark-ff"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6"
+dependencies = [
+ "ark-ff-asm 0.3.0",
+ "ark-ff-macros 0.3.0",
+ "ark-serialize 0.3.0",
+ "ark-std 0.3.0",
+ "derivative",
+ "num-bigint 0.4.6",
+ "num-traits",
+ "paste",
+ "rustc_version 0.3.3",
+ "zeroize",
+]
+
+[[package]]
+name = "ark-ff"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba"
+dependencies = [
+ "ark-ff-asm 0.4.2",
+ "ark-ff-macros 0.4.2",
+ "ark-serialize 0.4.2",
+ "ark-std 0.4.0",
+ "derivative",
+ "digest 0.10.7",
+ "itertools 0.10.5",
+ "num-bigint 0.4.6",
+ "num-traits",
+ "paste",
+ "rustc_version 0.4.1",
+ "zeroize",
+]
+
+[[package]]
+name = "ark-ff-asm"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44"
+dependencies = [
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-asm"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348"
+dependencies = [
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-macros"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20"
+dependencies = [
+ "num-bigint 0.4.6",
+ "num-traits",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-macros"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565"
+dependencies = [
+ "num-bigint 0.4.6",
+ "num-traits",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-serialize"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671"
+dependencies = [
+ "ark-std 0.3.0",
+ "digest 0.9.0",
+]
+
+[[package]]
+name = "ark-serialize"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5"
+dependencies = [
+ "ark-std 0.4.0",
+ "digest 0.10.7",
+ "num-bigint 0.4.6",
+]
+
+[[package]]
+name = "ark-std"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c"
+dependencies = [
+ "num-traits",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "ark-std"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185"
+dependencies = [
+ "num-traits",
+ "rand 0.8.5",
+]
+
[[package]]
name = "arrayref"
version = "0.3.9"
@@ -252,7 +891,7 @@ checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
"synstructure",
]
@@ -264,7 +903,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -336,9 +975,9 @@ dependencies = [
[[package]]
name = "async-stream"
-version = "0.3.6"
+version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
+checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51"
dependencies = [
"async-stream-impl",
"futures-core",
@@ -347,37 +986,24 @@ dependencies = [
[[package]]
name = "async-stream-impl"
-version = "0.3.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.79",
-]
-
-[[package]]
-name = "async-trait"
-version = "0.1.83"
+version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd"
+checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
-name = "asynchronous-codec"
-version = "0.6.2"
+name = "async-trait"
+version = "0.1.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568"
+checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1"
dependencies = [
- "bytes",
- "futures-sink",
- "futures-util",
- "memchr",
- "pin-project-lite",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
]
[[package]]
@@ -415,45 +1041,97 @@ dependencies = [
"winapi",
]
+[[package]]
+name = "auto_impl"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+]
+
[[package]]
name = "autocfg"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78"
dependencies = [
- "autocfg 1.4.0",
+ "autocfg 1.3.0",
]
[[package]]
name = "autocfg"
-version = "1.4.0"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
+checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
[[package]]
name = "autonomi"
-version = "0.1.2"
+version = "0.2.0"
dependencies = [
"bip39",
"blsttc",
"bytes",
- "libp2p",
+ "console_error_panic_hook",
+ "const-hex",
+ "evmlib",
+ "eyre",
+ "futures",
+ "hex 0.4.3",
+ "instant",
+ "js-sys",
+ "libp2p 0.54.1",
"rand 0.8.5",
"rmp-serde",
"self_encryption",
"serde",
- "sn_client",
+ "serde-wasm-bindgen",
+ "sha2 0.10.8",
+ "sn_bls_ckd",
+ "sn_curv",
+ "sn_evm",
+ "sn_logging",
+ "sn_networking",
+ "sn_peers_acquisition",
"sn_protocol",
"sn_registers",
- "sn_transfers",
+ "test_utils",
"thiserror",
+ "tiny_http",
"tokio",
"tracing",
"tracing-subscriber",
+ "tracing-web",
"walkdir",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm-bindgen-test",
"xor_name",
]
+[[package]]
+name = "autonomi-cli"
+version = "0.1.1"
+dependencies = [
+ "autonomi",
+ "clap",
+ "color-eyre",
+ "criterion",
+ "dirs-next",
+ "eyre",
+ "indicatif",
+ "rand 0.8.5",
+ "rayon",
+ "sn_build_info",
+ "sn_logging",
+ "sn_peers_acquisition",
+ "tempfile",
+ "tokio",
+ "tracing",
+]
+
[[package]]
name = "axum"
version = "0.6.20"
@@ -477,7 +1155,7 @@ dependencies = [
"rustversion",
"serde",
"sync_wrapper 0.1.2",
- "tower",
+ "tower 0.4.13",
"tower-layer",
"tower-service",
]
@@ -540,6 +1218,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce"
+[[package]]
+name = "base16ct"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
+
[[package]]
name = "base64"
version = "0.13.1"
@@ -564,12 +1248,6 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
-[[package]]
-name = "bech32"
-version = "0.10.0-beta"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98f7eed2b2781a6f0b5c903471d48e15f56fb4e1165df8a9a2337fd1a59d45ea"
-
[[package]]
name = "better-panic"
version = "0.3.0"
@@ -595,7 +1273,7 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f2635620bf0b9d4576eb7bb9a38a55df78bd1205d26fa994b25911a69f212f"
dependencies = [
- "bitcoin_hashes 0.11.0",
+ "bitcoin_hashes",
"serde",
"unicode-normalization",
]
@@ -615,43 +1293,12 @@ version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
-[[package]]
-name = "bitcoin"
-version = "0.31.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c85783c2fe40083ea54a33aa2f0ba58831d90fcd190f5bdc47e74e84d2a96ae"
-dependencies = [
- "base64 0.21.7",
- "bech32",
- "bitcoin-internals",
- "bitcoin_hashes 0.13.0",
- "hex-conservative",
- "hex_lit",
- "secp256k1 0.28.2",
-]
-
-[[package]]
-name = "bitcoin-internals"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9425c3bf7089c983facbae04de54513cce73b41c7f9ff8c845b54e7bc64ebbfb"
-
[[package]]
name = "bitcoin_hashes"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90064b8dee6815a6470d60bad07bbbaee885c0e12d04177138fa3291a01b7bc4"
-[[package]]
-name = "bitcoin_hashes"
-version = "0.13.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1930a4dabfebb8d7d9992db18ebe3ae2876f0a305fab206fd168df931ede293b"
-dependencies = [
- "bitcoin-internals",
- "hex-conservative",
-]
-
[[package]]
name = "bitflags"
version = "1.3.2"
@@ -719,16 +1366,6 @@ dependencies = [
"generic-array 0.14.7",
]
-[[package]]
-name = "block-modes"
-version = "0.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2cb03d1bed155d89dce0f845b7899b18a9a163e148fd004e1c28421a783e2d8e"
-dependencies = [
- "block-padding 0.2.1",
- "cipher 0.3.0",
-]
-
[[package]]
name = "block-padding"
version = "0.1.5"
@@ -773,8 +1410,8 @@ checksum = "1ff3694b352ece02eb664a09ffb948ee69b35afa2e6ac444a6b8cb9d515deebd"
dependencies = [
"blst",
"byte-slice-cast",
- "ff",
- "group",
+ "ff 0.12.1",
+ "group 0.12.1",
"pairing",
"rand_core 0.6.4",
"serde",
@@ -789,8 +1426,8 @@ checksum = "1186a39763321a0b73d1a10aa4fc067c5d042308509e8f6cc31d2c2a7ac61ac2"
dependencies = [
"blst",
"blstrs",
- "ff",
- "group",
+ "ff 0.12.1",
+ "group 0.12.1",
"hex 0.4.3",
"hex_fmt",
"pairing",
@@ -903,6 +1540,21 @@ dependencies = [
"pkg-config",
]
+[[package]]
+name = "c-kzg"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0307f72feab3300336fb803a57134159f6e20139af1357f36c54cb90d8e8928"
+dependencies = [
+ "blst",
+ "cc",
+ "glob",
+ "hex 0.4.3",
+ "libc",
+ "once_cell",
+ "serde",
+]
+
[[package]]
name = "camino"
version = "1.1.9"
@@ -929,7 +1581,7 @@ checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037"
dependencies = [
"camino",
"cargo-platform",
- "semver",
+ "semver 1.0.23",
"serde",
"serde_json",
"thiserror",
@@ -962,7 +1614,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
dependencies = [
- "cipher 0.4.4",
+ "cipher",
]
[[package]]
@@ -976,9 +1628,9 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.1.24"
+version = "1.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938"
+checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0"
dependencies = [
"jobserver",
"libc",
@@ -998,7 +1650,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818"
dependencies = [
"cfg-if",
- "cipher 0.4.4",
+ "cipher",
"cpufeatures",
]
@@ -1010,7 +1662,7 @@ checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35"
dependencies = [
"aead",
"chacha20",
- "cipher 0.4.4",
+ "cipher",
"poly1305",
"zeroize",
]
@@ -1025,6 +1677,7 @@ dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
+ "serde",
"wasm-bindgen",
"windows-targets 0.52.6",
]
@@ -1062,15 +1715,6 @@ dependencies = [
"half",
]
-[[package]]
-name = "cipher"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7"
-dependencies = [
- "generic-array 0.14.7",
-]
-
[[package]]
name = "cipher"
version = "0.4.4"
@@ -1084,9 +1728,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.5.19"
+version = "4.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615"
+checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac"
dependencies = [
"clap_builder",
"clap_derive",
@@ -1094,9 +1738,9 @@ dependencies = [
[[package]]
name = "clap-verbosity-flag"
-version = "2.2.2"
+version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e099138e1807662ff75e2cebe4ae2287add879245574489f9b1588eb5e5564ed"
+checksum = "63d19864d6b68464c59f7162c9914a0b569ddc2926b4a2d71afe62a9738eff53"
dependencies = [
"clap",
"log",
@@ -1104,9 +1748,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.5.19"
+version = "4.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b"
+checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73"
dependencies = [
"anstream",
"anstyle",
@@ -1114,19 +1758,19 @@ dependencies = [
"strsim",
"terminal_size",
"unicase",
- "unicode-width 0.2.0",
+ "unicode-width",
]
[[package]]
name = "clap_derive"
-version = "4.5.18"
+version = "4.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
+checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -1246,7 +1890,7 @@ dependencies = [
"encode_unicode",
"lazy_static",
"libc",
- "unicode-width 0.1.14",
+ "unicode-width",
"windows-sys 0.52.0",
]
@@ -1260,6 +1904,19 @@ dependencies = [
"wasm-bindgen",
]
+[[package]]
+name = "const-hex"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94fb8a24a26d37e1ffd45343323dc9fe6654ceea44c12f2fcb3d7ac29e610bc6"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "hex 0.4.3",
+ "proptest",
+ "serde",
+]
+
[[package]]
name = "const-oid"
version = "0.9.6"
@@ -1359,8 +2016,6 @@ version = "7.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "387808c885b79055facbd4b2e806a683fe1bc37abc7dfa5fea1974ad2d4137b0"
dependencies = [
- "num",
- "quickcheck",
"serde",
"tiny-keccak",
]
@@ -1496,6 +2151,18 @@ dependencies = [
"zeroize",
]
+[[package]]
+name = "crypto-bigint"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
+dependencies = [
+ "generic-array 0.14.7",
+ "rand_core 0.6.4",
+ "subtle",
+ "zeroize",
+]
+
[[package]]
name = "crypto-common"
version = "0.1.6"
@@ -1523,7 +2190,7 @@ version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
dependencies = [
- "cipher 0.4.4",
+ "cipher",
]
[[package]]
@@ -1550,7 +2217,7 @@ dependencies = [
"curve25519-dalek-derive",
"digest 0.10.7",
"fiat-crypto",
- "rustc_version",
+ "rustc_version 0.4.1",
"subtle",
"zeroize",
]
@@ -1563,7 +2230,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -1585,7 +2252,7 @@ dependencies = [
"itertools 0.12.1",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
"synstructure",
]
@@ -1610,7 +2277,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -1621,7 +2288,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [
"darling_core",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -1714,6 +2381,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
dependencies = [
"powerfmt",
+ "serde",
+]
+
+[[package]]
+name = "derivative"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
]
[[package]]
@@ -1728,16 +2407,24 @@ dependencies = [
]
[[package]]
-name = "dialoguer"
-version = "0.11.0"
+name = "derive_more"
+version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de"
+checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
dependencies = [
- "console",
- "shell-words",
- "tempfile",
- "thiserror",
- "zeroize",
+ "derive_more-impl",
+]
+
+[[package]]
+name = "derive_more-impl"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+ "unicode-xid",
]
[[package]]
@@ -1777,6 +2464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer 0.10.4",
+ "const-oid",
"crypto-common",
"subtle",
]
@@ -1851,7 +2539,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -1869,21 +2557,6 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
-[[package]]
-name = "dot-generator"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0aaac7ada45f71873ebce336491d1c1bc4a7c8042c7cea978168ad59e805b871"
-dependencies = [
- "dot-structures",
-]
-
-[[package]]
-name = "dot-structures"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "675e35c02a51bb4d4618cb4885b3839ce6d1787c97b664474d9208d074742e20"
-
[[package]]
name = "downcast"
version = "0.11.0"
@@ -1909,11 +2582,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c"
dependencies = [
"der 0.6.1",
- "elliptic-curve",
- "rfc6979",
+ "elliptic-curve 0.12.3",
+ "rfc6979 0.3.1",
"signature 1.6.4",
]
+[[package]]
+name = "ecdsa"
+version = "0.16.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
+dependencies = [
+ "der 0.7.9",
+ "digest 0.10.7",
+ "elliptic-curve 0.13.8",
+ "rfc6979 0.4.0",
+ "signature 2.2.0",
+ "spki 0.7.3",
+]
+
[[package]]
name = "ed25519"
version = "2.2.3"
@@ -1951,16 +2638,35 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3"
dependencies = [
- "base16ct",
- "crypto-bigint",
+ "base16ct 0.1.1",
+ "crypto-bigint 0.4.9",
"der 0.6.1",
"digest 0.10.7",
- "ff",
+ "ff 0.12.1",
"generic-array 0.14.7",
- "group",
+ "group 0.12.1",
"pkcs8 0.9.0",
"rand_core 0.6.4",
- "sec1",
+ "sec1 0.3.0",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "elliptic-curve"
+version = "0.13.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
+dependencies = [
+ "base16ct 0.2.0",
+ "crypto-bigint 0.5.5",
+ "digest 0.10.7",
+ "ff 0.13.0",
+ "generic-array 0.14.7",
+ "group 0.13.0",
+ "pkcs8 0.10.2",
+ "rand_core 0.6.4",
+ "sec1 0.7.3",
"subtle",
"zeroize",
]
@@ -1989,7 +2695,7 @@ dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -2039,6 +2745,32 @@ dependencies = [
"pin-project-lite",
]
+[[package]]
+name = "evm_testnet"
+version = "0.1.1"
+dependencies = [
+ "clap",
+ "dirs-next",
+ "evmlib",
+ "sn_evm",
+ "tokio",
+]
+
+[[package]]
+name = "evmlib"
+version = "0.1.1"
+dependencies = [
+ "alloy",
+ "dirs-next",
+ "getrandom 0.2.15",
+ "rand 0.8.5",
+ "serde",
+ "serde_with",
+ "thiserror",
+ "tokio",
+ "tracing",
+]
+
[[package]]
name = "eyre"
version = "0.6.12"
@@ -2078,6 +2810,17 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
+[[package]]
+name = "fastrlp"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418"
+dependencies = [
+ "arrayvec",
+ "auto_impl",
+ "bytes",
+]
+
[[package]]
name = "ff"
version = "0.12.1"
@@ -2089,6 +2832,16 @@ dependencies = [
"subtle",
]
+[[package]]
+name = "ff"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449"
+dependencies = [
+ "rand_core 0.6.4",
+ "subtle",
+]
+
[[package]]
name = "ff-zeroize"
version = "0.6.3"
@@ -2155,6 +2908,18 @@ dependencies = [
"winapi",
]
+[[package]]
+name = "fixed-hash"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534"
+dependencies = [
+ "byteorder",
+ "rand 0.8.5",
+ "rustc-hex",
+ "static_assertions",
+]
+
[[package]]
name = "fixedbitset"
version = "0.4.2"
@@ -2163,9 +2928,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
-version = "1.0.34"
+version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0"
+checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253"
dependencies = [
"crc32fast",
"miniz_oxide 0.8.0",
@@ -2306,7 +3071,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -2371,6 +3136,12 @@ dependencies = [
"slab",
]
+[[package]]
+name = "futures-utils-wasm"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9"
+
[[package]]
name = "generic-array"
version = "0.12.4"
@@ -2388,6 +3159,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
+ "zeroize",
]
[[package]]
@@ -2702,7 +3474,7 @@ checksum = "999ce923619f88194171a67fb3e6d613653b8d4d6078b529b15a765da0edcc17"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -2980,31 +3752,26 @@ dependencies = [
]
[[package]]
-name = "graphviz-rust"
-version = "0.9.0"
+name = "group"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c33d03804e2ce21db5821f2beb4e54f844a8f90326e6bd99a1771dc54aef427"
+checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7"
dependencies = [
- "dot-generator",
- "dot-structures",
- "into-attr",
- "into-attr-derive",
- "pest",
- "pest_derive",
+ "ff 0.12.1",
"rand 0.8.5",
- "tempfile",
+ "rand_core 0.6.4",
+ "rand_xorshift 0.3.0",
+ "subtle",
]
[[package]]
name = "group"
-version = "0.12.1"
+version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7"
+checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
dependencies = [
- "ff",
- "rand 0.8.5",
+ "ff 0.13.0",
"rand_core 0.6.4",
- "rand_xorshift 0.3.0",
"subtle",
]
@@ -3020,7 +3787,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http 0.2.12",
- "indexmap 2.6.0",
+ "indexmap 2.5.0",
"slab",
"tokio",
"tokio-util 0.7.12",
@@ -3057,14 +3824,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash",
"allocator-api2",
+ "serde",
]
-[[package]]
-name = "hashbrown"
-version = "0.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
-
[[package]]
name = "headers"
version = "0.3.9"
@@ -3141,10 +3903,10 @@ dependencies = [
]
[[package]]
-name = "hex-conservative"
-version = "0.1.2"
+name = "hex-literal"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "212ab92002354b4819390025006c897e8140934349e8635c9b077f47b4dcbd20"
+checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46"
[[package]]
name = "hex_fmt"
@@ -3152,12 +3914,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f"
-[[package]]
-name = "hex_lit"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3011d1213f159867b13cfd6ac92d2cd5f1345762c63be3554e84092d85a50bbd"
-
[[package]]
name = "hickory-proto"
version = "0.24.1"
@@ -3310,9 +4066,9 @@ dependencies = [
[[package]]
name = "httparse"
-version = "1.9.5"
+version = "1.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946"
+checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9"
[[package]]
name = "httpdate"
@@ -3425,9 +4181,9 @@ dependencies = [
[[package]]
name = "hyper-util"
-version = "0.1.9"
+version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b"
+checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba"
dependencies = [
"bytes",
"futures-channel",
@@ -3438,6 +4194,7 @@ dependencies = [
"pin-project-lite",
"socket2",
"tokio",
+ "tower 0.4.13",
"tower-service",
"tracing",
]
@@ -3555,6 +4312,26 @@ dependencies = [
"winapi-util",
]
+[[package]]
+name = "impl-codec"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f"
+dependencies = [
+ "parity-scale-codec",
+]
+
+[[package]]
+name = "impl-trait-for-tuples"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "indenter"
version = "0.3.3"
@@ -3567,18 +4344,20 @@ version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
- "autocfg 1.4.0",
+ "autocfg 1.3.0",
"hashbrown 0.12.3",
+ "serde",
]
[[package]]
name = "indexmap"
-version = "2.6.0"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
+checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
dependencies = [
"equivalent",
- "hashbrown 0.15.0",
+ "hashbrown 0.14.5",
+ "serde",
]
[[package]]
@@ -3592,7 +4371,7 @@ dependencies = [
"number_prefix",
"portable-atomic",
"tokio",
- "unicode-width 0.1.14",
+ "unicode-width",
]
[[package]]
@@ -3602,7 +4381,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "232929e1d75fe899576a3d5c7416ad0d88dbfbb3c3d6aa00873a7408a50ddb88"
dependencies = [
"ahash",
- "indexmap 2.6.0",
+ "indexmap 2.5.0",
"is-terminal",
"itoa",
"log",
@@ -3630,7 +4409,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b23a0c8dfe501baac4adf6ebbfa6eddf8f0c07f56b058cc1288017e32397846c"
dependencies = [
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -3645,28 +4424,6 @@ dependencies = [
"web-sys",
]
-[[package]]
-name = "into-attr"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "18b48c537e49a709e678caec3753a7dba6854661a1eaa27675024283b3f8b376"
-dependencies = [
- "dot-structures",
-]
-
-[[package]]
-name = "into-attr-derive"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ecac7c1ae6cd2c6a3a64d1061a8bdc7f52ff62c26a831a2301e54c1b5d70d5b1"
-dependencies = [
- "dot-generator",
- "dot-structures",
- "into-attr",
- "quote",
- "syn 1.0.109",
-]
-
[[package]]
name = "ipconfig"
version = "0.3.2"
@@ -3764,6 +4521,19 @@ dependencies = [
"serde",
]
+[[package]]
+name = "k256"
+version = "0.13.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b"
+dependencies = [
+ "cfg-if",
+ "ecdsa 0.16.9",
+ "elliptic-curve 0.13.8",
+ "once_cell",
+ "sha2 0.10.8",
+]
+
[[package]]
name = "keccak"
version = "0.1.5"
@@ -3773,6 +4543,16 @@ dependencies = [
"cpufeatures",
]
+[[package]]
+name = "keccak-asm"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6"
+dependencies = [
+ "digest 0.10.7",
+ "sha3-asm",
+]
+
[[package]]
name = "lazy_static"
version = "1.4.0"
@@ -3781,9 +4561,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.159"
+version = "0.2.158"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5"
+checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
[[package]]
name = "libm"
@@ -3803,22 +4583,47 @@ dependencies = [
"futures-timer",
"getrandom 0.2.15",
"instant",
- "libp2p-allow-block-list",
+ "libp2p-allow-block-list 0.3.0",
+ "libp2p-connection-limits 0.3.1",
+ "libp2p-core 0.41.3",
+ "libp2p-identify 0.44.2",
+ "libp2p-identity",
+ "libp2p-kad 0.45.3",
+ "libp2p-metrics 0.14.1",
+ "libp2p-swarm 0.44.2",
+ "multiaddr",
+ "pin-project",
+ "rw-stream-sink",
+ "thiserror",
+]
+
+[[package]]
+name = "libp2p"
+version = "0.54.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbbe80f9c7e00526cd6b838075b9c171919404a4732cb2fa8ece0a093223bfc4"
+dependencies = [
+ "bytes",
+ "either",
+ "futures",
+ "futures-timer",
+ "getrandom 0.2.15",
+ "libp2p-allow-block-list 0.4.0",
"libp2p-autonat",
- "libp2p-connection-limits",
- "libp2p-core",
+ "libp2p-connection-limits 0.4.0",
+ "libp2p-core 0.42.0",
"libp2p-dns",
"libp2p-gossipsub",
- "libp2p-identify",
+ "libp2p-identify 0.45.0",
"libp2p-identity",
- "libp2p-kad",
+ "libp2p-kad 0.46.2",
"libp2p-mdns",
- "libp2p-metrics",
+ "libp2p-metrics 0.15.0",
"libp2p-noise",
"libp2p-quic",
"libp2p-relay",
"libp2p-request-response",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"libp2p-tcp",
"libp2p-upnp",
"libp2p-websocket",
@@ -3836,31 +4641,49 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "107b238b794cb83ab53b74ad5dcf7cca3200899b72fe662840cfb52f5b0a32e6"
dependencies = [
- "libp2p-core",
+ "libp2p-core 0.41.3",
+ "libp2p-identity",
+ "libp2p-swarm 0.44.2",
+ "void",
+]
+
+[[package]]
+name = "libp2p-allow-block-list"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1027ccf8d70320ed77e984f273bc8ce952f623762cb9bf2d126df73caef8041"
+dependencies = [
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"void",
]
[[package]]
name = "libp2p-autonat"
-version = "0.12.0"
+version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d95151726170e41b591735bf95c42b888fe4aa14f65216a9fbf0edcc04510586"
+checksum = "a083675f189803d0682a2726131628e808144911dad076858bfbe30b13065499"
dependencies = [
"async-trait",
- "asynchronous-codec 0.6.2",
+ "asynchronous-codec",
+ "bytes",
+ "either",
"futures",
+ "futures-bounded",
"futures-timer",
- "instant",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"libp2p-request-response",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"quick-protobuf",
- "quick-protobuf-codec 0.2.0",
+ "quick-protobuf-codec",
"rand 0.8.5",
+ "rand_core 0.6.4",
+ "thiserror",
"tracing",
+ "void",
+ "web-time",
]
[[package]]
@@ -3869,9 +4692,21 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7cd50a78ccfada14de94cbacd3ce4b0138157f376870f13d3a8422cd075b4fd"
dependencies = [
- "libp2p-core",
+ "libp2p-core 0.41.3",
+ "libp2p-identity",
+ "libp2p-swarm 0.44.2",
+ "void",
+]
+
+[[package]]
+name = "libp2p-connection-limits"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8d003540ee8baef0d254f7b6bfd79bac3ddf774662ca0abf69186d517ef82ad8"
+dependencies = [
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"void",
]
@@ -3903,16 +4738,44 @@ dependencies = [
"web-time",
]
+[[package]]
+name = "libp2p-core"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a61f26c83ed111104cd820fe9bc3aaabbac5f1652a1d213ed6e900b7918a1298"
+dependencies = [
+ "either",
+ "fnv",
+ "futures",
+ "futures-timer",
+ "libp2p-identity",
+ "multiaddr",
+ "multihash",
+ "multistream-select",
+ "once_cell",
+ "parking_lot",
+ "pin-project",
+ "quick-protobuf",
+ "rand 0.8.5",
+ "rw-stream-sink",
+ "smallvec",
+ "thiserror",
+ "tracing",
+ "unsigned-varint 0.8.0",
+ "void",
+ "web-time",
+]
+
[[package]]
name = "libp2p-dns"
-version = "0.41.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d17cbcf7160ff35c3e8e560de4a068fe9d6cb777ea72840e48eb76ff9576c4b6"
+checksum = "97f37f30d5c7275db282ecd86e54f29dd2176bd3ac656f06abf43bedb21eb8bd"
dependencies = [
"async-trait",
"futures",
"hickory-resolver",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"parking_lot",
"smallvec",
@@ -3921,12 +4784,12 @@ dependencies = [
[[package]]
name = "libp2p-gossipsub"
-version = "0.46.1"
+version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d665144a616dadebdc5fff186b1233488cdcd8bfb1223218ff084b6d052c94f7"
+checksum = "b4e830fdf24ac8c444c12415903174d506e1e077fbe3875c404a78c5935a8543"
dependencies = [
- "asynchronous-codec 0.7.0",
- "base64 0.21.7",
+ "asynchronous-codec",
+ "base64 0.22.1",
"byteorder",
"bytes",
"either",
@@ -3935,19 +4798,19 @@ dependencies = [
"futures-ticker",
"getrandom 0.2.15",
"hex_fmt",
- "instant",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"prometheus-client",
"quick-protobuf",
- "quick-protobuf-codec 0.3.1",
+ "quick-protobuf-codec",
"rand 0.8.5",
"regex",
"sha2 0.10.8",
"smallvec",
"tracing",
"void",
+ "web-time",
]
[[package]]
@@ -3956,17 +4819,40 @@ version = "0.44.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5d635ebea5ca0c3c3e77d414ae9b67eccf2a822be06091b9c1a0d13029a1e2f"
dependencies = [
- "asynchronous-codec 0.7.0",
+ "asynchronous-codec",
+ "either",
+ "futures",
+ "futures-bounded",
+ "futures-timer",
+ "libp2p-core 0.41.3",
+ "libp2p-identity",
+ "libp2p-swarm 0.44.2",
+ "lru",
+ "quick-protobuf",
+ "quick-protobuf-codec",
+ "smallvec",
+ "thiserror",
+ "tracing",
+ "void",
+]
+
+[[package]]
+name = "libp2p-identify"
+version = "0.45.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1711b004a273be4f30202778856368683bd9a83c4c7dcc8f848847606831a4e3"
+dependencies = [
+ "asynchronous-codec",
"either",
"futures",
"futures-bounded",
"futures-timer",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"lru",
"quick-protobuf",
- "quick-protobuf-codec 0.3.1",
+ "quick-protobuf-codec",
"smallvec",
"thiserror",
"tracing",
@@ -3998,7 +4884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc5767727d062c4eac74dd812c998f0e488008e82cce9c33b463d38423f9ad2"
dependencies = [
"arrayvec",
- "asynchronous-codec 0.7.0",
+ "asynchronous-codec",
"bytes",
"either",
"fnv",
@@ -4006,11 +4892,39 @@ dependencies = [
"futures-bounded",
"futures-timer",
"instant",
- "libp2p-core",
+ "libp2p-core 0.41.3",
+ "libp2p-identity",
+ "libp2p-swarm 0.44.2",
+ "quick-protobuf",
+ "quick-protobuf-codec",
+ "rand 0.8.5",
+ "sha2 0.10.8",
+ "smallvec",
+ "thiserror",
+ "tracing",
+ "uint",
+ "void",
+]
+
+[[package]]
+name = "libp2p-kad"
+version = "0.46.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ced237d0bd84bbebb7c2cad4c073160dacb4fe40534963c32ed6d4c6bb7702a3"
+dependencies = [
+ "arrayvec",
+ "asynchronous-codec",
+ "bytes",
+ "either",
+ "fnv",
+ "futures",
+ "futures-bounded",
+ "futures-timer",
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"quick-protobuf",
- "quick-protobuf-codec 0.3.1",
+ "quick-protobuf-codec",
"rand 0.8.5",
"sha2 0.10.8",
"smallvec",
@@ -4018,21 +4932,22 @@ dependencies = [
"tracing",
"uint",
"void",
+ "web-time",
]
[[package]]
name = "libp2p-mdns"
-version = "0.45.1"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49007d9a339b3e1d7eeebc4d67c05dbf23d300b7d091193ec2d3f26802d7faf2"
+checksum = "14b8546b6644032565eb29046b42744aee1e9f261ed99671b2c93fb140dba417"
dependencies = [
"data-encoding",
"futures",
"hickory-proto",
"if-watch",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"rand 0.8.5",
"smallvec",
"socket2",
@@ -4049,27 +4964,44 @@ checksum = "fdac91ae4f291046a3b2660c039a2830c931f84df2ee227989af92f7692d3357"
dependencies = [
"futures",
"instant",
- "libp2p-core",
- "libp2p-identify",
+ "libp2p-core 0.41.3",
+ "libp2p-identify 0.44.2",
+ "libp2p-identity",
+ "libp2p-kad 0.45.3",
+ "libp2p-swarm 0.44.2",
+ "pin-project",
+ "prometheus-client",
+]
+
+[[package]]
+name = "libp2p-metrics"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77ebafa94a717c8442d8db8d3ae5d1c6a15e30f2d347e0cd31d057ca72e42566"
+dependencies = [
+ "futures",
+ "libp2p-core 0.42.0",
+ "libp2p-identify 0.45.0",
"libp2p-identity",
- "libp2p-kad",
+ "libp2p-kad 0.46.2",
"libp2p-relay",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"pin-project",
"prometheus-client",
+ "web-time",
]
[[package]]
name = "libp2p-noise"
-version = "0.44.0"
+version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ecd0545ce077f6ea5434bcb76e8d0fe942693b4380aaad0d34a358c2bd05793"
+checksum = "36b137cb1ae86ee39f8e5d6245a296518912014eaa87427d24e6ff58cfc1b28c"
dependencies = [
- "asynchronous-codec 0.7.0",
+ "asynchronous-codec",
"bytes",
"curve25519-dalek 4.1.3",
"futures",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"multiaddr",
"multihash",
@@ -4087,15 +5019,15 @@ dependencies = [
[[package]]
name = "libp2p-quic"
-version = "0.10.3"
+version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c67296ad4e092e23f92aea3d2bdb6f24eab79c0929ed816dfb460ea2f4567d2b"
+checksum = "46352ac5cd040c70e88e7ff8257a2ae2f891a4076abad2c439584a31c15fd24e"
dependencies = [
"bytes",
"futures",
"futures-timer",
"if-watch",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"libp2p-tls",
"parking_lot",
@@ -4111,21 +5043,21 @@ dependencies = [
[[package]]
name = "libp2p-relay"
-version = "0.17.2"
+version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d1c667cfabf3dd675c8e3cea63b7b98434ecf51721b7894cbb01d29983a6a9b"
+checksum = "10df23d7f5b5adcc129f4a69d6fbd05209e356ccf9e8f4eb10b2692b79c77247"
dependencies = [
- "asynchronous-codec 0.7.0",
+ "asynchronous-codec",
"bytes",
"either",
"futures",
"futures-bounded",
"futures-timer",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"quick-protobuf",
- "quick-protobuf-codec 0.3.1",
+ "quick-protobuf-codec",
"rand 0.8.5",
"static_assertions",
"thiserror",
@@ -4136,24 +5068,24 @@ dependencies = [
[[package]]
name = "libp2p-request-response"
-version = "0.26.3"
+version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c314fe28368da5e3a262553fb0ad575c1c8934c461e10de10265551478163836"
+checksum = "1356c9e376a94a75ae830c42cdaea3d4fe1290ba409a22c809033d1b7dcab0a6"
dependencies = [
"async-trait",
"cbor4ii",
"futures",
"futures-bounded",
"futures-timer",
- "instant",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
- "libp2p-swarm",
+ "libp2p-swarm 0.45.1",
"rand 0.8.5",
"serde",
"smallvec",
"tracing",
"void",
+ "web-time",
]
[[package]]
@@ -4166,9 +5098,30 @@ dependencies = [
"fnv",
"futures",
"futures-timer",
- "getrandom 0.2.15",
"instant",
- "libp2p-core",
+ "libp2p-core 0.41.3",
+ "libp2p-identity",
+ "lru",
+ "multistream-select",
+ "once_cell",
+ "rand 0.8.5",
+ "smallvec",
+ "tracing",
+ "void",
+]
+
+[[package]]
+name = "libp2p-swarm"
+version = "0.45.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7dd6741793d2c1fb2088f67f82cf07261f25272ebe3c0b0c311e0c6b50e851a"
+dependencies = [
+ "either",
+ "fnv",
+ "futures",
+ "futures-timer",
+ "getrandom 0.2.15",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"libp2p-swarm-derive",
"lru",
@@ -4180,31 +5133,32 @@ dependencies = [
"tracing",
"void",
"wasm-bindgen-futures",
+ "web-time",
]
[[package]]
name = "libp2p-swarm-derive"
-version = "0.34.2"
+version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5daceb9dd908417b6dfcfe8e94098bc4aac54500c282e78120b885dadc09b999"
+checksum = "206e0aa0ebe004d778d79fb0966aa0de996c19894e2c0605ba2f8524dd4443d8"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
name = "libp2p-tcp"
-version = "0.41.0"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b2460fc2748919adff99ecbc1aab296e4579e41f374fb164149bd2c9e529d4c"
+checksum = "ad964f312c59dcfcac840acd8c555de8403e295d39edf96f5240048b5fcaa314"
dependencies = [
"futures",
"futures-timer",
"if-watch",
"libc",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"socket2",
"tokio",
@@ -4213,13 +5167,13 @@ dependencies = [
[[package]]
name = "libp2p-tls"
-version = "0.4.1"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b7b831e55ce2aa6c354e6861a85fdd4dd0a2b97d5e276fabac0e4810a71776"
+checksum = "47b23dddc2b9c355f73c1e36eb0c3ae86f7dc964a3715f0731cfad352db4d847"
dependencies = [
"futures",
"futures-rustls",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"rcgen",
"ring 0.17.8",
@@ -4232,15 +5186,15 @@ dependencies = [
[[package]]
name = "libp2p-upnp"
-version = "0.2.2"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cccf04b0e3ff3de52d07d5fd6c3b061d0e7f908ffc683c32d9638caedce86fc8"
+checksum = "01bf2d1b772bd3abca049214a3304615e6a36fa6ffc742bdd1ba774486200b8f"
dependencies = [
"futures",
"futures-timer",
"igd-next",
- "libp2p-core",
- "libp2p-swarm",
+ "libp2p-core 0.42.0",
+ "libp2p-swarm 0.45.1",
"tokio",
"tracing",
"void",
@@ -4248,14 +5202,14 @@ dependencies = [
[[package]]
name = "libp2p-websocket"
-version = "0.43.2"
+version = "0.44.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85b953b6803a1f3161a989538974d72511c4e48a4af355337b6fb90723c56c05"
+checksum = "888b2ff2e5d8dcef97283daab35ad1043d18952b65e05279eecbe02af4c6e347"
dependencies = [
"either",
"futures",
"futures-rustls",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"libp2p-identity",
"parking_lot",
"pin-project-lite",
@@ -4269,14 +5223,14 @@ dependencies = [
[[package]]
name = "libp2p-websocket-websys"
-version = "0.3.3"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f95cd8a32fcf94ad1e5c2de37c2a05a5a4188d8358b005859a0fc9e63b6953bc"
+checksum = "38cf9b429dd07be52cd82c4c484b1694df4209210a7db3b9ffb00c7606e230c8"
dependencies = [
"bytes",
"futures",
"js-sys",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"parking_lot",
"send_wrapper 0.6.0",
"thiserror",
@@ -4287,13 +5241,13 @@ dependencies = [
[[package]]
name = "libp2p-yamux"
-version = "0.45.2"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ddd5265f6b80f94d48a3963541aad183cc598a645755d2f1805a373e41e0716b"
+checksum = "788b61c80789dba9760d8c669a5bedb642c8267555c803fabd8396e4ca5c5882"
dependencies = [
"either",
"futures",
- "libp2p-core",
+ "libp2p-core 0.42.0",
"thiserror",
"tracing",
"yamux 0.12.1",
@@ -4329,7 +5283,7 @@ version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
dependencies = [
- "autocfg 1.4.0",
+ "autocfg 1.3.0",
"scopeguard",
]
@@ -4418,6 +5372,16 @@ dependencies = [
"unicase",
]
+[[package]]
+name = "minicov"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c71e683cd655513b99affab7d317deb690528255a0d5f717f1024093c12b169"
+dependencies = [
+ "cc",
+ "walkdir",
+]
+
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@@ -4442,19 +5406,6 @@ dependencies = [
"adler2",
]
-[[package]]
-name = "minreq"
-version = "2.12.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "763d142cdff44aaadd9268bebddb156ef6c65a0e13486bb81673cf2d8739f9b0"
-dependencies = [
- "log",
- "once_cell",
- "rustls 0.21.12",
- "rustls-webpki 0.101.7",
- "webpki-roots 0.25.4",
-]
-
[[package]]
name = "mio"
version = "0.8.11"
@@ -4531,7 +5482,7 @@ dependencies = [
"cfg-if",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -4554,9 +5505,9 @@ dependencies = [
[[package]]
name = "multiaddr"
-version = "0.18.2"
+version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961"
+checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070"
dependencies = [
"arrayref",
"byteorder",
@@ -4567,7 +5518,7 @@ dependencies = [
"percent-encoding",
"serde",
"static_assertions",
- "unsigned-varint 0.8.0",
+ "unsigned-varint 0.7.2",
"url",
]
@@ -4614,14 +5565,16 @@ dependencies = [
[[package]]
name = "nat-detection"
-version = "0.2.7"
+version = "0.2.8"
dependencies = [
"clap",
"clap-verbosity-flag",
"color-eyre",
"futures",
- "libp2p",
+ "libp2p 0.54.1",
+ "sn_build_info",
"sn_networking",
+ "sn_protocol",
"tokio",
"tracing",
"tracing-log 0.2.0",
@@ -4729,7 +5682,7 @@ dependencies = [
[[package]]
name = "node-launchpad"
-version = "0.3.19"
+version = "0.4.0"
dependencies = [
"atty",
"better-panic",
@@ -4752,13 +5705,17 @@ dependencies = [
"pretty_assertions",
"prometheus-parse",
"ratatui",
- "reqwest 0.12.8",
+ "regex",
+ "reqwest 0.12.7",
"serde",
"serde_json",
"signal-hook",
"sn-node-manager",
"sn-releases",
+ "sn_build_info",
+ "sn_evm",
"sn_peers_acquisition",
+ "sn_protocol",
"sn_service_management",
"strip-ansi-escapes",
"strum",
@@ -4816,27 +5773,13 @@ dependencies = [
"winapi",
]
-[[package]]
-name = "num"
-version = "0.4.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
-dependencies = [
- "num-bigint 0.4.6",
- "num-complex",
- "num-integer",
- "num-iter",
- "num-rational",
- "num-traits",
-]
-
[[package]]
name = "num-bigint"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
dependencies = [
- "autocfg 1.4.0",
+ "autocfg 1.3.0",
"num-integer",
"num-traits",
]
@@ -4852,16 +5795,6 @@ dependencies = [
"serde",
]
-[[package]]
-name = "num-complex"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
-dependencies = [
- "num-traits",
- "serde",
-]
-
[[package]]
name = "num-conv"
version = "0.1.0"
@@ -4888,46 +5821,43 @@ dependencies = [
]
[[package]]
-name = "num-iter"
-version = "0.1.45"
+name = "num-traits"
+version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
- "autocfg 1.4.0",
- "num-integer",
- "num-traits",
+ "autocfg 1.3.0",
+ "libm",
]
[[package]]
-name = "num-rational"
-version = "0.4.2"
+name = "num_cpus"
+version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
+checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
- "num-bigint 0.4.6",
- "num-integer",
- "num-traits",
- "serde",
+ "hermit-abi 0.3.9",
+ "libc",
]
[[package]]
-name = "num-traits"
-version = "0.2.19"
+name = "num_enum"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179"
dependencies = [
- "autocfg 1.4.0",
- "libm",
+ "num_enum_derive",
]
[[package]]
-name = "num_cpus"
-version = "1.16.0"
+name = "num_enum_derive"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
+checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56"
dependencies = [
- "hermit-abi 0.3.9",
- "libc",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
]
[[package]]
@@ -4965,12 +5895,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.20.1"
+version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1"
-dependencies = [
- "portable-atomic",
-]
+checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "oorandom"
@@ -5133,8 +6060,8 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594"
dependencies = [
- "ecdsa",
- "elliptic-curve",
+ "ecdsa 0.14.8",
+ "elliptic-curve 0.12.3",
"sha2 0.10.8",
]
@@ -5144,7 +6071,7 @@ version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "135590d8bdba2b31346f9cd1fb2a912329f5135e832a4f422942eb6ead8b6b3b"
dependencies = [
- "group",
+ "group 0.12.1",
]
[[package]]
@@ -5162,6 +6089,32 @@ dependencies = [
"zeroize",
]
+[[package]]
+name = "parity-scale-codec"
+version = "3.6.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee"
+dependencies = [
+ "arrayvec",
+ "bitvec",
+ "byte-slice-cast",
+ "impl-trait-for-tuples",
+ "parity-scale-codec-derive",
+ "serde",
+]
+
+[[package]]
+name = "parity-scale-codec-derive"
+version = "3.6.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c"
+dependencies = [
+ "proc-macro-crate",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "parking"
version = "2.2.1"
@@ -5273,7 +6226,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -5294,9 +6247,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
dependencies = [
"fixedbitset",
- "indexmap 2.6.0",
- "serde",
- "serde_derive",
+ "indexmap 2.5.0",
]
[[package]]
@@ -5316,7 +6267,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -5353,9 +6304,9 @@ dependencies = [
[[package]]
name = "pkg-config"
-version = "0.3.31"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
+checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
[[package]]
name = "plist"
@@ -5364,7 +6315,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016"
dependencies = [
"base64 0.22.1",
- "indexmap 2.6.0",
+ "indexmap 2.5.0",
"quick-xml 0.32.0",
"serde",
"time",
@@ -5438,9 +6389,9 @@ dependencies = [
[[package]]
name = "portable-atomic"
-version = "1.9.0"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2"
+checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265"
[[package]]
name = "powerfmt"
@@ -5532,6 +6483,48 @@ dependencies = [
"yansi",
]
+[[package]]
+name = "primitive-types"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2"
+dependencies = [
+ "fixed-hash",
+ "impl-codec",
+ "uint",
+]
+
+[[package]]
+name = "proc-macro-crate"
+version = "3.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b"
+dependencies = [
+ "toml_edit",
+]
+
+[[package]]
+name = "proc-macro-error-attr2"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+]
+
+[[package]]
+name = "proc-macro-error2"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
+dependencies = [
+ "proc-macro-error-attr2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+]
+
[[package]]
name = "proc-macro2"
version = "1.0.86"
@@ -5567,7 +6560,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -5693,26 +6686,13 @@ dependencies = [
"byteorder",
]
-[[package]]
-name = "quick-protobuf-codec"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8ededb1cd78531627244d51dd0c7139fbe736c7d57af0092a76f0ffb2f56e98"
-dependencies = [
- "asynchronous-codec 0.6.2",
- "bytes",
- "quick-protobuf",
- "thiserror",
- "unsigned-varint 0.7.2",
-]
-
[[package]]
name = "quick-protobuf-codec"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15a0580ab32b169745d7a39db2ba969226ca16738931be152a3209b409de2474"
dependencies = [
- "asynchronous-codec 0.7.0",
+ "asynchronous-codec",
"bytes",
"quick-protobuf",
"thiserror",
@@ -5866,6 +6846,7 @@ dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.4",
+ "serde",
]
[[package]]
@@ -6039,7 +7020,7 @@ dependencies = [
"strum_macros",
"unicode-segmentation",
"unicode-truncate",
- "unicode-width 0.1.14",
+ "unicode-width",
]
[[package]]
@@ -6085,9 +7066,9 @@ dependencies = [
[[package]]
name = "redox_syscall"
-version = "0.5.7"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f"
+checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853"
dependencies = [
"bitflags 2.6.0",
]
@@ -6190,9 +7171,9 @@ dependencies = [
[[package]]
name = "reqwest"
-version = "0.12.8"
+version = "0.12.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b"
+checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63"
dependencies = [
"base64 0.22.1",
"bytes",
@@ -6213,7 +7194,7 @@ dependencies = [
"pin-project-lite",
"quinn",
"rustls 0.23.13",
- "rustls-pemfile 2.2.0",
+ "rustls-pemfile 2.1.3",
"rustls-pki-types",
"serde",
"serde_json",
@@ -6246,11 +7227,21 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb"
dependencies = [
- "crypto-bigint",
+ "crypto-bigint 0.4.9",
"hmac 0.12.1",
"zeroize",
]
+[[package]]
+name = "rfc6979"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
+dependencies = [
+ "hmac 0.12.1",
+ "subtle",
+]
+
[[package]]
name = "rgb"
version = "0.8.50"
@@ -6290,6 +7281,16 @@ dependencies = [
"windows-sys 0.52.0",
]
+[[package]]
+name = "rlp"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec"
+dependencies = [
+ "bytes",
+ "rustc-hex",
+]
+
[[package]]
name = "rmp"
version = "0.8.14"
@@ -6324,17 +7325,6 @@ dependencies = [
"serde_derive",
]
-[[package]]
-name = "rpassword"
-version = "7.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f"
-dependencies = [
- "libc",
- "rtoolbox",
- "windows-sys 0.48.0",
-]
-
[[package]]
name = "rtnetlink"
version = "0.10.1"
@@ -6351,15 +7341,35 @@ dependencies = [
]
[[package]]
-name = "rtoolbox"
-version = "0.0.2"
+name = "ruint"
+version = "1.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e"
+checksum = "2c3cc4c2511671f327125da14133d0c5c5d137f006a1017a16f557bc85b16286"
dependencies = [
- "libc",
- "windows-sys 0.48.0",
+ "alloy-rlp",
+ "ark-ff 0.3.0",
+ "ark-ff 0.4.2",
+ "bytes",
+ "fastrlp",
+ "num-bigint 0.4.6",
+ "num-traits",
+ "parity-scale-codec",
+ "primitive-types",
+ "proptest",
+ "rand 0.8.5",
+ "rlp",
+ "ruint-macro",
+ "serde",
+ "valuable",
+ "zeroize",
]
+[[package]]
+name = "ruint-macro"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18"
+
[[package]]
name = "rust-ini"
version = "0.19.0"
@@ -6382,13 +7392,28 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
+[[package]]
+name = "rustc-hex"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6"
+
+[[package]]
+name = "rustc_version"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee"
+dependencies = [
+ "semver 0.11.0",
+]
+
[[package]]
name = "rustc_version"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
dependencies = [
- "semver",
+ "semver 1.0.23",
]
[[package]]
@@ -6423,19 +7448,7 @@ dependencies = [
"log",
"ring 0.16.20",
"sct 0.6.1",
- "webpki 0.21.4",
-]
-
-[[package]]
-name = "rustls"
-version = "0.20.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99"
-dependencies = [
- "log",
- "ring 0.16.20",
- "sct 0.7.1",
- "webpki 0.22.4",
+ "webpki",
]
[[package]]
@@ -6464,15 +7477,6 @@ dependencies = [
"zeroize",
]
-[[package]]
-name = "rustls-pemfile"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9"
-dependencies = [
- "base64 0.13.1",
-]
-
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
@@ -6484,18 +7488,19 @@ dependencies = [
[[package]]
name = "rustls-pemfile"
-version = "2.2.0"
+version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
+checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425"
dependencies = [
+ "base64 0.22.1",
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
-version = "1.9.0"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55"
+checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0"
[[package]]
name = "rustls-webpki"
@@ -6600,7 +7605,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928"
dependencies = [
- "base16ct",
+ "base16ct 0.1.1",
"der 0.6.1",
"generic-array 0.14.7",
"pkcs8 0.9.0",
@@ -6609,25 +7614,28 @@ dependencies = [
]
[[package]]
-name = "secp256k1"
-version = "0.20.3"
+name = "sec1"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a"
+checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
- "rand 0.6.5",
- "secp256k1-sys 0.4.2",
- "serde",
+ "base16ct 0.2.0",
+ "der 0.7.9",
+ "generic-array 0.14.7",
+ "pkcs8 0.10.2",
+ "subtle",
+ "zeroize",
]
[[package]]
name = "secp256k1"
-version = "0.28.2"
+version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d24b59d129cdadea20aea4fb2352fa053712e5d713eee47d700cd4b2bc002f10"
+checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a"
dependencies = [
- "bitcoin_hashes 0.13.0",
- "rand 0.8.5",
- "secp256k1-sys 0.9.2",
+ "rand 0.6.5",
+ "secp256k1-sys",
+ "serde",
]
[[package]]
@@ -6639,15 +7647,6 @@ dependencies = [
"cc",
]
-[[package]]
-name = "secp256k1-sys"
-version = "0.9.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5d1746aae42c19d583c3c1a8c646bfad910498e2051c551a7f2e3c0c9fbb7eb"
-dependencies = [
- "cc",
-]
-
[[package]]
name = "secrecy"
version = "0.8.0"
@@ -6663,7 +7662,7 @@ version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9439a0cb3efb35e080a1576e3e00a804caab04010adc802aed88cf539b103ed"
dependencies = [
- "aes 0.8.4",
+ "aes",
"bincode",
"brotli",
"bytes",
@@ -6683,6 +7682,15 @@ dependencies = [
"xor_name",
]
+[[package]]
+name = "semver"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6"
+dependencies = [
+ "semver-parser",
+]
+
[[package]]
name = "semver"
version = "1.0.23"
@@ -6692,6 +7700,15 @@ dependencies = [
"serde",
]
+[[package]]
+name = "semver-parser"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7"
+dependencies = [
+ "pest",
+]
+
[[package]]
name = "send_wrapper"
version = "0.4.0"
@@ -6713,6 +7730,17 @@ dependencies = [
"serde_derive",
]
+[[package]]
+name = "serde-wasm-bindgen"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b"
+dependencies = [
+ "js-sys",
+ "serde",
+ "wasm-bindgen",
+]
+
[[package]]
name = "serde_bytes"
version = "0.11.15"
@@ -6730,7 +7758,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -6747,9 +7775,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
-version = "0.6.8"
+version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
+checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
dependencies = [
"serde",
]
@@ -6764,15 +7792,45 @@ dependencies = [
]
[[package]]
-name = "serde_urlencoded"
-version = "0.7.1"
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_with"
+version = "3.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817"
+dependencies = [
+ "base64 0.22.1",
+ "chrono",
+ "hex 0.4.3",
+ "indexmap 1.9.3",
+ "indexmap 2.5.0",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "serde_with_macros",
+ "time",
+]
+
+[[package]]
+name = "serde_with_macros"
+version = "3.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d"
dependencies = [
- "form_urlencoded",
- "itoa",
- "ryu",
- "serde",
+ "darling",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
]
[[package]]
@@ -6781,7 +7839,7 @@ version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [
- "indexmap 2.6.0",
+ "indexmap 2.5.0",
"itoa",
"ryu",
"serde",
@@ -6866,6 +7924,26 @@ dependencies = [
"opaque-debug 0.3.1",
]
+[[package]]
+name = "sha3"
+version = "0.10.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60"
+dependencies = [
+ "digest 0.10.7",
+ "keccak",
+]
+
+[[package]]
+name = "sha3-asm"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46"
+dependencies = [
+ "cc",
+ "cfg-if",
+]
+
[[package]]
name = "sharded-slab"
version = "0.1.7"
@@ -6875,12 +7953,6 @@ dependencies = [
"lazy_static",
]
-[[package]]
-name = "shell-words"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
-
[[package]]
name = "shlex"
version = "1.3.0"
@@ -6934,6 +8006,7 @@ version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
+ "digest 0.10.7",
"rand_core 0.6.4",
]
@@ -6943,7 +8016,7 @@ version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
dependencies = [
- "autocfg 1.4.0",
+ "autocfg 1.3.0",
]
[[package]]
@@ -6954,7 +8027,7 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "sn-node-manager"
-version = "0.10.6"
+version = "0.11.0"
dependencies = [
"assert_cmd",
"assert_fs",
@@ -6966,19 +8039,21 @@ dependencies = [
"colored",
"dirs-next",
"indicatif",
- "libp2p",
+ "libp2p 0.54.1",
"libp2p-identity",
"mockall 0.12.1",
"nix 0.27.1",
"predicates 3.1.2",
"prost 0.9.0",
"rand 0.8.5",
- "reqwest 0.12.8",
- "semver",
+ "reqwest 0.12.7",
+ "semver 1.0.23",
"serde",
"serde_json",
"service-manager",
"sn-releases",
+ "sn_build_info",
+ "sn_evm",
"sn_logging",
"sn_peers_acquisition",
"sn_protocol",
@@ -7005,8 +8080,8 @@ dependencies = [
"flate2",
"lazy_static",
"regex",
- "reqwest 0.12.8",
- "semver",
+ "reqwest 0.12.7",
+ "semver 1.0.23",
"serde_json",
"tar",
"thiserror",
@@ -7014,28 +8089,6 @@ dependencies = [
"zip",
]
-[[package]]
-name = "sn_auditor"
-version = "0.3.5"
-dependencies = [
- "blsttc",
- "clap",
- "color-eyre",
- "dirs-next",
- "futures",
- "graphviz-rust",
- "lazy_static",
- "serde",
- "serde_json",
- "sn_client",
- "sn_logging",
- "sn_peers_acquisition",
- "tiny_http",
- "tokio",
- "tracing",
- "urlencoding",
-]
-
[[package]]
name = "sn_bls_ckd"
version = "0.2.1"
@@ -7050,102 +8103,11 @@ dependencies = [
[[package]]
name = "sn_build_info"
-version = "0.1.15"
-dependencies = [
- "vergen",
-]
-
-[[package]]
-name = "sn_cli"
-version = "0.95.3"
+version = "0.1.16"
dependencies = [
- "aes 0.7.5",
- "base64 0.22.1",
- "bitcoin",
- "block-modes",
- "blsttc",
- "bytes",
"chrono",
- "clap",
- "color-eyre",
- "criterion",
- "custom_debug",
- "dialoguer",
- "dirs-next",
- "eyre",
- "futures",
- "hex 0.4.3",
- "indicatif",
- "libp2p",
- "rand 0.8.5",
- "rayon",
- "reqwest 0.12.8",
- "rmp-serde",
- "rpassword",
- "serde",
- "sn_build_info",
- "sn_client",
- "sn_logging",
- "sn_peers_acquisition",
- "sn_protocol",
- "tempfile",
- "tiny-keccak",
- "tokio",
- "tracing",
- "url",
- "walkdir",
- "xor_name",
-]
-
-[[package]]
-name = "sn_client"
-version = "0.110.4"
-dependencies = [
- "assert_matches",
- "async-trait",
- "backoff",
- "bip39",
- "blsttc",
- "bytes",
- "console_error_panic_hook",
- "crdts",
- "custom_debug",
- "dashmap",
- "dirs-next",
- "eyre",
- "futures",
- "getrandom 0.2.15",
- "hex 0.4.3",
- "itertools 0.12.1",
- "libp2p",
- "libp2p-identity",
- "petgraph",
- "prometheus-client",
- "rand 0.8.5",
- "rayon",
- "rmp-serde",
- "self_encryption",
- "serde",
- "sn_bls_ckd",
- "sn_client",
- "sn_curv",
- "sn_logging",
- "sn_networking",
- "sn_peers_acquisition",
- "sn_protocol",
- "sn_registers",
- "sn_transfers",
- "tempfile",
- "thiserror",
- "tiny-keccak",
- "tokio",
"tracing",
- "tracing-wasm",
- "wasm-bindgen",
- "wasm-bindgen-futures",
- "wasmtimer",
- "web-sys",
- "xor_name",
+ "vergen",
]
[[package]]
@@ -7169,53 +8131,44 @@ dependencies = [
"pairing-plus",
"rand 0.6.5",
"rand 0.7.3",
- "secp256k1 0.20.3",
+ "secp256k1",
"serde",
"serde_bytes",
"serde_derive",
"sha2 0.8.2",
"sha2 0.9.9",
- "sha3",
+ "sha3 0.9.1",
"thiserror",
"typenum",
"zeroize",
]
[[package]]
-name = "sn_faucet"
-version = "0.5.3"
+name = "sn_evm"
+version = "0.1.1"
dependencies = [
- "assert_fs",
- "base64 0.22.1",
- "bitcoin",
- "blsttc",
- "clap",
- "color-eyre",
- "dirs-next",
- "fs2",
- "futures",
+ "custom_debug",
+ "evmlib",
"hex 0.4.3",
- "indicatif",
- "minreq",
- "reqwest 0.12.8",
+ "lazy_static",
+ "libp2p 0.53.2",
+ "rand 0.8.5",
+ "ring 0.17.8",
+ "rmp-serde",
"serde",
"serde_json",
- "sn_build_info",
- "sn_cli",
- "sn_client",
- "sn_logging",
- "sn_peers_acquisition",
- "sn_protocol",
- "sn_transfers",
+ "tempfile",
+ "thiserror",
+ "tiny-keccak",
"tokio",
"tracing",
- "url",
- "warp",
+ "wasmtimer",
+ "xor_name",
]
[[package]]
name = "sn_logging"
-version = "0.2.36"
+version = "0.2.37"
dependencies = [
"chrono",
"color-eyre",
@@ -7240,7 +8193,7 @@ dependencies = [
[[package]]
name = "sn_metrics"
-version = "0.1.16"
+version = "0.1.17"
dependencies = [
"clap",
"color-eyre",
@@ -7254,7 +8207,7 @@ dependencies = [
[[package]]
name = "sn_networking"
-version = "0.18.4"
+version = "0.19.0"
dependencies = [
"aes-gcm-siv",
"async-trait",
@@ -7269,15 +8222,17 @@ dependencies = [
"hyper 0.14.30",
"itertools 0.12.1",
"lazy_static",
- "libp2p",
+ "libp2p 0.54.1",
"libp2p-identity",
"prometheus-client",
"quickcheck",
"rand 0.8.5",
"rayon",
"rmp-serde",
+ "self_encryption",
"serde",
"sn_build_info",
+ "sn_evm",
"sn_protocol",
"sn_registers",
"sn_transfers",
@@ -7297,36 +8252,38 @@ dependencies = [
[[package]]
name = "sn_node"
-version = "0.111.4"
+version = "0.112.0"
dependencies = [
"assert_fs",
- "assert_matches",
"async-trait",
+ "autonomi",
"blsttc",
"bytes",
"chrono",
"clap",
"color-eyre",
+ "const-hex",
"crdts",
"custom_debug",
"dirs-next",
+ "evmlib",
"eyre",
"file-rotate",
"futures",
"hex 0.4.3",
"itertools 0.12.1",
- "libp2p",
+ "libp2p 0.54.1",
"prometheus-client",
"prost 0.9.0",
"rand 0.8.5",
"rayon",
- "reqwest 0.12.8",
+ "reqwest 0.12.7",
"rmp-serde",
"self_encryption",
"serde",
"serde_json",
"sn_build_info",
- "sn_client",
+ "sn_evm",
"sn_logging",
"sn_networking",
"sn_peers_acquisition",
@@ -7335,6 +8292,7 @@ dependencies = [
"sn_service_management",
"sn_transfers",
"strum",
+ "sysinfo",
"tempfile",
"test_utils",
"thiserror",
@@ -7351,7 +8309,7 @@ dependencies = [
[[package]]
name = "sn_node_rpc_client"
-version = "0.6.31"
+version = "0.6.32"
dependencies = [
"assert_fs",
"async-trait",
@@ -7359,9 +8317,9 @@ dependencies = [
"clap",
"color-eyre",
"hex 0.4.3",
- "libp2p",
+ "libp2p 0.54.1",
"libp2p-identity",
- "sn_client",
+ "sn_build_info",
"sn_logging",
"sn_node",
"sn_peers_acquisition",
@@ -7378,13 +8336,13 @@ dependencies = [
[[package]]
name = "sn_peers_acquisition"
-version = "0.5.3"
+version = "0.5.4"
dependencies = [
"clap",
"lazy_static",
- "libp2p",
+ "libp2p 0.54.1",
"rand 0.8.5",
- "reqwest 0.12.8",
+ "reqwest 0.12.7",
"sn_protocol",
"thiserror",
"tokio",
@@ -7394,7 +8352,7 @@ dependencies = [
[[package]]
name = "sn_protocol"
-version = "0.17.11"
+version = "0.17.12"
dependencies = [
"blsttc",
"bytes",
@@ -7404,13 +8362,14 @@ dependencies = [
"dirs-next",
"hex 0.4.3",
"lazy_static",
- "libp2p",
+ "libp2p 0.54.1",
"prost 0.9.0",
"rmp-serde",
"serde",
"serde_json",
"sha2 0.10.8",
"sn_build_info",
+ "sn_evm",
"sn_registers",
"sn_transfers",
"thiserror",
@@ -7423,7 +8382,7 @@ dependencies = [
[[package]]
name = "sn_registers"
-version = "0.3.21"
+version = "0.4.0"
dependencies = [
"blsttc",
"crdts",
@@ -7440,21 +8399,21 @@ dependencies = [
[[package]]
name = "sn_service_management"
-version = "0.3.14"
+version = "0.4.0"
dependencies = [
"async-trait",
"dirs-next",
- "libp2p",
+ "libp2p 0.54.1",
"libp2p-identity",
"mockall 0.11.4",
"prost 0.9.0",
- "semver",
+ "semver 1.0.23",
"serde",
"serde_json",
"service-manager",
+ "sn_evm",
"sn_logging",
"sn_protocol",
- "sn_transfers",
"sysinfo",
"thiserror",
"tokio",
@@ -7466,7 +8425,7 @@ dependencies = [
[[package]]
name = "sn_transfers"
-version = "0.19.3"
+version = "0.20.0"
dependencies = [
"assert_fs",
"blsttc",
@@ -7478,7 +8437,7 @@ dependencies = [
"fs2",
"hex 0.4.3",
"lazy_static",
- "libp2p",
+ "libp2p 0.54.1",
"pprof",
"rand 0.8.5",
"rayon",
@@ -7509,7 +8468,7 @@ dependencies = [
"curve25519-dalek 4.1.3",
"rand_core 0.6.4",
"ring 0.17.8",
- "rustc_version",
+ "rustc_version 0.4.1",
"sha2 0.10.8",
"subtle",
]
@@ -7623,7 +8582,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustversion",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -7634,9 +8593,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "symbolic-common"
-version = "12.12.0"
+version = "12.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "366f1b4c6baf6cfefc234bbd4899535fca0b06c74443039a73f6dfb2fad88d77"
+checksum = "9fdf97c441f18a4f92425b896a4ec7a27e03631a0b1047ec4e34e9916a9a167e"
dependencies = [
"debugid",
"memmap2",
@@ -7646,9 +8605,9 @@ dependencies = [
[[package]]
name = "symbolic-demangle"
-version = "12.12.0"
+version = "12.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aba05ba5b9962ea5617baf556293720a8b2d0a282aa14ee4bf10e22efc7da8c8"
+checksum = "bc8ece6b129e97e53d1fbb3f61d33a6a9e5369b11d01228c068094d6d134eaea"
dependencies = [
"cpp_demangle",
"rustc-demangle",
@@ -7668,15 +8627,27 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.79"
+version = "2.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
+checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
+[[package]]
+name = "syn-solidity"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab661c8148c2261222a4d641ad5477fd4bea79406a99056096a0b41b35617a5"
+dependencies = [
+ "paste",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+]
+
[[package]]
name = "sync_wrapper"
version = "0.1.2"
@@ -7700,7 +8671,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -7747,9 +8718,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tar"
-version = "0.4.42"
+version = "0.4.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ff6c40d3aedb5e06b57c6f669ad17ab063dd1e63d977c6a88e7f4dfa4f04020"
+checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
dependencies = [
"filetime",
"libc",
@@ -7758,9 +8729,9 @@ dependencies = [
[[package]]
name = "tempfile"
-version = "3.13.0"
+version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b"
+checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
dependencies = [
"cfg-if",
"fastrand",
@@ -7771,12 +8742,12 @@ dependencies = [
[[package]]
name = "terminal_size"
-version = "0.4.0"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef"
+checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7"
dependencies = [
"rustix",
- "windows-sys 0.59.0",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -7787,33 +8758,37 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
[[package]]
name = "test_utils"
-version = "0.4.7"
+version = "0.4.8"
dependencies = [
+ "bytes",
"color-eyre",
"dirs-next",
- "libp2p",
+ "evmlib",
+ "libp2p 0.54.1",
+ "rand 0.8.5",
"serde",
"serde_json",
+ "sn_peers_acquisition",
]
[[package]]
name = "thiserror"
-version = "1.0.64"
+version = "1.0.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
+checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.64"
+version = "1.0.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3"
+checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -7889,17 +8864,15 @@ dependencies = [
[[package]]
name = "tiny_http"
-version = "0.12.0"
+version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82"
+checksum = "e0d6ef4e10d23c1efb862eecad25c5054429a71958b4eeef85eb5e7170b477ca"
dependencies = [
"ascii",
"chunked_transfer",
- "httpdate",
"log",
- "rustls 0.20.9",
- "rustls-pemfile 0.2.1",
- "zeroize",
+ "time",
+ "url",
]
[[package]]
@@ -7929,7 +8902,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "token_supplies"
-version = "0.1.54"
+version = "0.1.55"
dependencies = [
"dirs-next",
"reqwest 0.11.27",
@@ -7975,7 +8948,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -7986,7 +8959,7 @@ checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6"
dependencies = [
"rustls 0.19.1",
"tokio",
- "webpki 0.21.4",
+ "webpki",
]
[[package]]
@@ -8019,6 +8992,7 @@ dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
+ "tokio-util 0.7.12",
]
[[package]]
@@ -8083,11 +9057,11 @@ dependencies = [
[[package]]
name = "toml_edit"
-version = "0.22.22"
+version = "0.22.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5"
+checksum = "3b072cee73c449a636ffd6f32bd8de3a9f7119139aff882f44943ce2986dc5cf"
dependencies = [
- "indexmap 2.6.0",
+ "indexmap 2.5.0",
"serde",
"serde_spanned",
"toml_datetime",
@@ -8119,7 +9093,7 @@ dependencies = [
"tokio-rustls 0.22.0",
"tokio-stream",
"tokio-util 0.6.10",
- "tower",
+ "tower 0.4.13",
"tower-layer",
"tower-service",
"tracing",
@@ -8148,7 +9122,7 @@ dependencies = [
"prost 0.11.9",
"tokio",
"tokio-stream",
- "tower",
+ "tower 0.4.13",
"tower-layer",
"tower-service",
"tracing",
@@ -8186,6 +9160,20 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "tower"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper 0.1.2",
+ "tower-layer",
+ "tower-service",
+]
+
[[package]]
name = "tower-layer"
version = "0.3.3"
@@ -8230,7 +9218,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -8350,18 +9338,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568"
dependencies = [
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
-name = "tracing-wasm"
-version = "0.2.1"
+name = "tracing-web"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4575c663a174420fa2d78f4108ff68f65bf2fbb7dd89f33749b6e826b3626e07"
+checksum = "b9e6a141feebd51f8d91ebfd785af50fca223c570b86852166caa3b141defe7c"
dependencies = [
- "tracing",
+ "js-sys",
+ "tracing-core",
"tracing-subscriber",
"wasm-bindgen",
+ "web-sys",
]
[[package]]
@@ -8377,7 +9367,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3e785f863a3af4c800a2a669d0b64c879b538738e352607e2624d03f868dc01"
dependencies = [
"crossterm 0.27.0",
- "unicode-width 0.1.14",
+ "unicode-width",
]
[[package]]
@@ -8407,9 +9397,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ucd-trie"
-version = "0.1.7"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
+checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "uint"
@@ -8440,9 +9430,9 @@ dependencies = [
[[package]]
name = "unicode-bidi"
-version = "0.3.17"
+version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893"
+checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
[[package]]
name = "unicode-bom"
@@ -8479,7 +9469,7 @@ checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf"
dependencies = [
"itertools 0.13.0",
"unicode-segmentation",
- "unicode-width 0.1.14",
+ "unicode-width",
]
[[package]]
@@ -8489,10 +9479,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
-name = "unicode-width"
-version = "0.2.0"
+name = "unicode-xid"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "universal-hash"
@@ -8515,10 +9505,6 @@ name = "unsigned-varint"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105"
-dependencies = [
- "asynchronous-codec 0.6.2",
- "bytes",
-]
[[package]]
name = "unsigned-varint"
@@ -8730,7 +9716,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
"wasm-bindgen-shared",
]
@@ -8764,7 +9750,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -8775,16 +9761,43 @@ version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
+[[package]]
+name = "wasm-bindgen-test"
+version = "0.3.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68497a05fb21143a08a7d24fc81763384a3072ee43c44e86aad1744d6adef9d9"
+dependencies = [
+ "console_error_panic_hook",
+ "js-sys",
+ "minicov",
+ "scoped-tls",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm-bindgen-test-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-test-macro"
+version = "0.3.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b8220be1fa9e4c889b30fd207d4906657e7e90b12e0e6b0c8b8d8709f5de021"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.77",
+]
+
[[package]]
name = "wasmtimer"
-version = "0.2.0"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f656cd8858a5164932d8a90f936700860976ec21eb00e0fe2aa8cab13f6b4cf"
+checksum = "c7ed9d8b15c7fb594d72bfb4b5a276f3d2029333cd93a932f376f5937f6f80ee"
dependencies = [
"futures",
"js-sys",
"parking_lot",
"pin-utils",
+ "serde",
"slab",
"wasm-bindgen",
]
@@ -8819,16 +9832,6 @@ dependencies = [
"untrusted 0.7.1",
]
-[[package]]
-name = "webpki"
-version = "0.22.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53"
-dependencies = [
- "ring 0.17.8",
- "untrusted 0.9.0",
-]
-
[[package]]
name = "webpki-roots"
version = "0.25.4"
@@ -9123,9 +10126,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
-version = "0.6.20"
+version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b"
+checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
dependencies = [
"memchr",
]
@@ -9297,7 +10300,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -9317,7 +10320,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.77",
]
[[package]]
@@ -9326,7 +10329,7 @@ version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
dependencies = [
- "aes 0.8.4",
+ "aes",
"byteorder",
"bzip2",
"constant_time_eq",
diff --git a/Cargo.toml b/Cargo.toml
index 4cdf8b3458..779485a2c8 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -2,11 +2,11 @@
resolver = "2"
members = [
"autonomi",
- "sn_auditor",
+ "autonomi-cli",
+ "evmlib",
+ "evm_testnet",
"sn_build_info",
- "sn_cli",
- "sn_client",
- "sn_faucet",
+ "sn_evm",
"sn_logging",
"sn_metrics",
"nat-detection",
@@ -29,7 +29,7 @@ arithmetic_overflow = "forbid"
mutable_transmutes = "forbid"
no_mangle_const_items = "forbid"
unknown_crate_types = "forbid"
-unsafe_code = "forbid"
+unsafe_code = "warn"
trivial_casts = "warn"
trivial_numeric_casts = "warn"
unused_extern_crates = "warn"
diff --git a/Justfile b/Justfile
index 9452125df5..a6f6f90118 100644
--- a/Justfile
+++ b/Justfile
@@ -1,75 +1,11 @@
#!/usr/bin/env just --justfile
-release_repo := "maidsafe/safe_network"
-
-droplet-testbed:
- #!/usr/bin/env bash
-
- DROPLET_NAME="node-manager-testbed"
- REGION="lon1"
- SIZE="s-1vcpu-1gb"
- IMAGE="ubuntu-20-04-x64"
- SSH_KEY_ID="30878672"
-
- droplet_ip=$(doctl compute droplet list \
- --format Name,PublicIPv4 --no-header | grep "^$DROPLET_NAME " | awk '{ print $2 }')
-
- if [ -z "$droplet_ip" ]; then
- droplet_id=$(doctl compute droplet create $DROPLET_NAME \
- --region $REGION \
- --size $SIZE \
- --image $IMAGE \
- --ssh-keys $SSH_KEY_ID \
- --format ID \
- --no-header \
- --wait)
- if [ -z "$droplet_id" ]; then
- echo "Failed to obtain droplet ID"
- exit 1
- fi
-
- echo "Droplet ID: $droplet_id"
- echo "Waiting for droplet IP address..."
- droplet_ip=$(doctl compute droplet get $droplet_id --format PublicIPv4 --no-header)
- while [ -z "$droplet_ip" ]; do
- echo "Still waiting to obtain droplet IP address..."
- sleep 5
- droplet_ip=$(doctl compute droplet get $droplet_id --format PublicIPv4 --no-header)
- done
- fi
- echo "Droplet IP address: $droplet_ip"
-
- nc -zw1 $droplet_ip 22
- exit_code=$?
- while [ $exit_code -ne 0 ]; do
- echo "Waiting on SSH to become available..."
- sleep 5
- nc -zw1 $droplet_ip 22
- exit_code=$?
- done
-
- cargo build --release --target x86_64-unknown-linux-musl
- scp -r ./target/x86_64-unknown-linux-musl/release/safenode-manager \
- root@$droplet_ip:/root/safenode-manager
-
-kill-testbed:
- #!/usr/bin/env bash
-
- DROPLET_NAME="node-manager-testbed"
-
- droplet_id=$(doctl compute droplet list \
- --format Name,ID --no-header | grep "^$DROPLET_NAME " | awk '{ print $2 }')
-
- if [ -z "$droplet_ip" ]; then
- echo "Deleting droplet with ID $droplet_id"
- doctl compute droplet delete $droplet_id
- fi
-
-build-release-artifacts arch:
+build-release-artifacts arch nightly="false":
#!/usr/bin/env bash
set -e
arch="{{arch}}"
+ nightly="{{nightly}}"
supported_archs=(
"x86_64-pc-windows-msvc"
"x86_64-apple-darwin"
@@ -122,28 +58,29 @@ build-release-artifacts arch:
cross_container_opts="--env \"GENESIS_PK=$GENESIS_PK\" --env \"GENESIS_SK=$GENESIS_SK\" --env \"FOUNDATION_PK=$FOUNDATION_PK\" --env \"NETWORK_ROYALTIES_PK=$NETWORK_ROYALTIES_PK\" --env \"PAYMENT_FORWARD_PK=$PAYMENT_FORWARD_PK\""
export CROSS_CONTAINER_OPTS=$cross_container_opts
+ nightly_feature=""
+ if [[ "$nightly" == "true" ]]; then
+ nightly_feature="--features nightly"
+ fi
+
if [[ $arch == arm* || $arch == armv7* || $arch == aarch64* ]]; then
echo "Passing to cross CROSS_CONTAINER_OPTS=$CROSS_CONTAINER_OPTS"
cargo binstall --no-confirm cross
- cross build --release --target $arch --bin faucet --features=distribution
- cross build --release --target $arch --bin nat-detection
- cross build --release --target $arch --bin node-launchpad
- cross build --release --features="network-contacts,distribution" --target $arch --bin safe
- cross build --release --features=network-contacts --target $arch --bin safenode
- cross build --release --target $arch --bin safenode-manager
- cross build --release --target $arch --bin safenodemand
- cross build --release --target $arch --bin safenode_rpc_client
- cross build --release --target $arch --bin sn_auditor
+ cross build --release --target $arch --bin nat-detection $nightly_feature
+ cross build --release --target $arch --bin node-launchpad $nightly_feature
+ cross build --release --features=network-contacts --target $arch --bin autonomi $nightly_feature
+ cross build --release --features=network-contacts,websockets --target $arch --bin safenode $nightly_feature
+ cross build --release --target $arch --bin safenode-manager $nightly_feature
+ cross build --release --target $arch --bin safenodemand $nightly_feature
+ cross build --release --target $arch --bin safenode_rpc_client $nightly_feature
else
- cargo build --release --target $arch --bin faucet --features=distribution
- cargo build --release --target $arch --bin nat-detection
- cargo build --release --target $arch --bin node-launchpad
- cargo build --release --features="network-contacts,distribution" --target $arch --bin safe
- cargo build --release --features=network-contacts --target $arch --bin safenode
- cargo build --release --target $arch --bin safenode-manager
- cargo build --release --target $arch --bin safenodemand
- cargo build --release --target $arch --bin safenode_rpc_client
- cargo build --release --target $arch --bin sn_auditor
+ cargo build --release --target $arch --bin nat-detection $nightly_feature
+ cargo build --release --target $arch --bin node-launchpad $nightly_feature
+ cargo build --release --features=network-contacts --target $arch --bin autonomi $nightly_feature
+ cargo build --release --features=network-contacts,websockets --target $arch --bin safenode $nightly_feature
+ cargo build --release --target $arch --bin safenode-manager $nightly_feature
+ cargo build --release --target $arch --bin safenodemand $nightly_feature
+ cargo build --release --target $arch --bin safenode_rpc_client $nightly_feature
fi
find target/$arch/release -maxdepth 1 -type f -exec cp '{}' artifacts \;
@@ -176,15 +113,13 @@ make-artifacts-directory:
package-all-bins:
#!/usr/bin/env bash
set -e
- just package-bin "faucet"
just package-bin "nat-detection"
just package-bin "node-launchpad"
- just package-bin "safe"
+ just package-bin "autonomi"
just package-bin "safenode"
- just package-bin "safenode_rpc_client"
just package-bin "safenode-manager"
just package-bin "safenodemand"
- just package-bin "sn_auditor"
+ just package-bin "safenode_rpc_client"
package-bin bin version="":
#!/usr/bin/env bash
@@ -203,32 +138,27 @@ package-bin bin version="":
bin="{{bin}}"
supported_bins=(\
- "faucet" \
"nat-detection" \
"node-launchpad" \
- "safe" \
+ "autonomi" \
"safenode" \
"safenode-manager" \
"safenodemand" \
- "safenode_rpc_client" \
- "sn_auditor")
+ "safenode_rpc_client")
crate_dir_name=""
# In the case of the node manager, the actual name of the crate is `sn-node-manager`, but the
# directory it's in is `sn_node_manager`.
bin="{{bin}}"
case "$bin" in
- faucet)
- crate_dir_name="sn_faucet"
- ;;
nat-detection)
crate_dir_name="nat-detection"
;;
node-launchpad)
crate_dir_name="node-launchpad"
;;
- safe)
- crate_dir_name="sn_cli"
+ autonomi)
+ crate_dir_name="autonomi-cli"
;;
safenode)
crate_dir_name="sn_node"
@@ -242,9 +172,6 @@ package-bin bin version="":
safenode_rpc_client)
crate_dir_name="sn_node_rpc_client"
;;
- sn_auditor)
- crate_dir_name="sn_auditor"
- ;;
*)
echo "The $bin binary is not supported"
exit 1
@@ -281,15 +208,13 @@ upload-all-packaged-bins-to-s3:
set -e
binaries=(
- faucet
nat-detection
node-launchpad
- safe
+ autonomi
safenode
safenode-manager
safenode_rpc_client
safenodemand
- sn_auditor
)
for binary in "${binaries[@]}"; do
just upload-packaged-bin-to-s3 "$binary"
@@ -300,17 +225,14 @@ upload-packaged-bin-to-s3 bin_name:
set -e
case "{{bin_name}}" in
- faucet)
- bucket="sn-faucet"
- ;;
nat-detection)
bucket="nat-detection"
;;
node-launchpad)
bucket="node-launchpad"
;;
- safe)
- bucket="sn-cli"
+ autonomi)
+ bucket="autonomi-cli"
;;
safenode)
bucket="sn-node"
@@ -324,9 +246,6 @@ upload-packaged-bin-to-s3 bin_name:
safenode_rpc_client)
bucket="sn-node-rpc-client"
;;
- sn_auditor)
- bucket="sn-auditor"
- ;;
*)
echo "The {{bin_name}} binary is not supported"
exit 1
@@ -351,6 +270,59 @@ upload-packaged-bin-to-s3 bin_name:
fi
done
+delete-s3-bin bin_name version:
+ #!/usr/bin/env bash
+ set -e
+
+ case "{{bin_name}}" in
+ nat-detection)
+ bucket="nat-detection"
+ ;;
+ node-launchpad)
+ bucket="node-launchpad"
+ ;;
+ autonomi)
+ bucket="autonomi-cli"
+ ;;
+ safenode)
+ bucket="sn-node"
+ ;;
+ safenode-manager)
+ bucket="sn-node-manager"
+ ;;
+ safenodemand)
+ bucket="sn-node-manager"
+ ;;
+ safenode_rpc_client)
+ bucket="sn-node-rpc-client"
+ ;;
+ *)
+ echo "The {{bin_name}} binary is not supported"
+ exit 1
+ ;;
+ esac
+
+ architectures=(
+ "x86_64-pc-windows-msvc"
+ "x86_64-apple-darwin"
+ "aarch64-apple-darwin"
+ "x86_64-unknown-linux-musl"
+ "arm-unknown-linux-musleabi"
+ "armv7-unknown-linux-musleabihf"
+ "aarch64-unknown-linux-musl"
+ )
+
+ for arch in "${architectures[@]}"; do
+ zip_filename="{{bin_name}}-{{version}}-${arch}.zip"
+ tar_filename="{{bin_name}}-{{version}}-${arch}.tar.gz"
+ s3_zip_path="s3://$bucket/$zip_filename"
+ s3_tar_path="s3://$bucket/$tar_filename"
+ aws s3 rm "$s3_zip_path"
+ echo "deleted $s3_zip_path"
+ aws s3 rm "$s3_tar_path"
+ echo "deleted $s3_tar_path"
+ done
+
package-all-architectures:
#!/usr/bin/env bash
set -e
@@ -391,15 +363,13 @@ package-arch arch:
cd artifacts/$architecture/release
binaries=(
- faucet
nat-detection
node-launchpad
- safe
+ autonomi
safenode
safenode-manager
safenode_rpc_client
safenodemand
- sn_auditor
)
if [[ "$architecture" == *"windows"* ]]; then
@@ -412,16 +382,3 @@ package-arch arch:
fi
cd ../../..
-
-node-man-integration-tests:
- #!/usr/bin/env bash
- set -e
-
- cargo build --release --bin safenode --bin faucet --bin safenode-manager
- cargo run --release --bin safenode-manager -- local run \
- --node-path target/release/safenode \
- --faucet-path target/release/faucet
- peer=$(cargo run --release --bin safenode-manager -- local status \
- --json | jq -r .nodes[-1].listen_addr[0])
- export SAFE_PEERS=$peer
- cargo test --release --package sn-node-manager --test e2e -- --nocapture
diff --git a/README.md b/README.md
index 6addfb65e8..48751adf0e 100644
--- a/README.md
+++ b/README.md
@@ -1,26 +1,27 @@
-# The Safe Network
+# The Autonomi Network (previously Safe Network)
-[SafenetForum.org](https://safenetforum.org/)
+[Autonomi.com](https://autonomi.com/)
Own your data. Share your disk space. Get paid for doing so.
-The Data on the Safe Network is Decentralised, Autonomous, and built atop of Kademlia and
+The Data on the Autonomi Network is Decentralised, Autonomous, and built atop of Kademlia and
Libp2p.
## Table of Contents
-- [For Users](#for-Users)
+- [For Users](#for-users)
- [For Developers](#for-developers)
- [For the Technical](#for-the-technical)
-- [Using a Local Network](#Using-a-local-network)
+- [Using a Local Network](#using-a-local-network)
- [Metrics Dashboard](#metrics-dashboard)
### For Users
-- [CLI](https://github.com/maidsafe/safe_network/blob/main/sn_cli/README.md) The Command Line
+- [CLI](https://github.com/maidsafe/safe_network/blob/main/autonomi_cli/README.md) The Command Line
Interface, allowing users to interact with the network from their terminal.
- [Node](https://github.com/maidsafe//safe_network/blob/main/sn_node/README.md) The backbone of the
safe network. Nodes can be run on commodity hardware and provide storage space and validation of
transactions to the network.
+- Web App: Coming Soon!
#### Building the Node from Source
@@ -28,66 +29,52 @@ If you wish to build a version of `safenode` from source, some special considera
if you want it to connect to the current beta network.
You should build from the `stable` branch, as follows:
+
```
git checkout stable
-export FOUNDATION_PK=8c89a2230096d07b3013089ffd594b23f468e72f19672c3dc1e50747c4c954fbf54ef8e98809e2d2253b14321e2123ad
-export GENESIS_PK=93fa3c5e1b68ace4fb02845f89f1eb5ff42c64cd31ee1b908d7c3bbb236d009ae4ae9a1a16d42bc7586e88db1248494c
-export NETWORK_ROYALTIES_PK=87ec7d1e5e0252d29b1b0ac42a04d7e8daf7dd9f212a23dbc4a9ca5a6442fdab74196ef7cca150ecd6f9848d49148ed4
-export PAYMENT_FORWARD_PK=887c9371cb9a1467cd45b6f31367520ab44cc40281d52039acfd6f967bdb0c3e214bb81f2a0adcf683bd6608980a7b5f
+export FOUNDATION_PK=88a82d718d16dccc839188eddc9a46cb216667c940cd46285199458c919a170a55490db09763ae216ed25e9db78c3576
+export GENESIS_PK=aa3526db2dbc43998e0b541b8455e2ce9dd4f1cad80090e671da16e3cd11cd5e3550f74c3cefd09ad253d93cacae2320
+export NETWORK_ROYALTIES_PK=8b5463a2c8142959a7b7cfd9295587812eb07ccbe13a85865503c8004eeeb6889ccace3588dcf9f7396784d9ee48f4d5
+export PAYMENT_FORWARD_PK=87d5b511a497183c945df63ab8790a4b94cfe452d00bfbdb39e41ee861384fe0de716a224da1c6fd11356de49877dfc2
cargo build --release --features=network-contacts --bin safenode
```
-For more information about the keys, please refer to the [Keys](#keys) section below.
-
-### For Developers
-
-#### Connecting to the Beta Network
-
-##### Keys
+#### Running the Node
-Various keys in the network control where initial funds are distributed and how ongoing fees and
-royalties are collected. They are also used as part of the node version string, to determine whether
-a connecting node is compatible.
+To run a node and receive rewards, you need to specify your Ethereum address as a parameter. Rewards are paid to the specified address.
-For a client to connect to the current beta network, these keys must be set at build time:
```
-FOUNDATION_PK=8c89a2230096d07b3013089ffd594b23f468e72f19672c3dc1e50747c4c954fbf54ef8e98809e2d2253b14321e2123ad
-GENESIS_PK=93fa3c5e1b68ace4fb02845f89f1eb5ff42c64cd31ee1b908d7c3bbb236d009ae4ae9a1a16d42bc7586e88db1248494c
-NETWORK_ROYALTIES_PK=87ec7d1e5e0252d29b1b0ac42a04d7e8daf7dd9f212a23dbc4a9ca5a6442fdab74196ef7cca150ecd6f9848d49148ed4
-PAYMENT_FORWARD_PK=887c9371cb9a1467cd45b6f31367520ab44cc40281d52039acfd6f967bdb0c3e214bb81f2a0adcf683bd6608980a7b5f
+cargo run --release --bin safenode --features=network-contacts -- --rewards-address
```
-##### Features
+More options about EVM Network below.
+
+### For Developers
+
+#### Build
You should also build `safe` with the `network-contacts` and `distribution` features enabled:
+
```
cargo build --release --features="network-contacts,distribution" --bin safe
```
For `safenode`, only the `network-contacts` feature should be required:
+
```
cargo build --release --features=network-contacts --bin safenode
```
-#### Utility Scripts
+#### Main Crates
-When you start a network there are a few scripts to aid with basic processes:
-
-- `resources/scripts/claim-genesis.sh` which will claim the genesis tokens for a wallet on a launched network (if you
- have set up the foundation wallet locally by adding a `client/account_secret` and regenerating the wallet or directly
- adding the `client/wallet/main_secret_key` itself).
-- `resources/scripts/make-wallets.sh` which if you have a wallet with a balance will create a number of wallets with
- another balance. eg `resources/scripts/make-wallets.sh 5 1` will make 5 wallets with 1 token.
-- `resources/scripts/upload-random-data` will use the existing `client` to upload random data to the network.
-
-- [Client](https://github.com/maidsafe/safe_network/blob/main/sn_client/README.md) The client APIs
- allowing use of the SafeNetwork to users and developers.
-- [Registers](https://github.com/maidsafe/safe_network/blob/main/sn_registers/README.md) The CRDT
- registers structures available on the network.
+- [Autonomi API](https://github.com/maidsafe/safe_network/blob/main/autonomi/README.md) The client APIs
+ allowing use of the Autonomi Network to users and developers.
+- [Autonomi CLI](https://github.com/maidsafe/safe_network/blob/main/autonomi_cli/README.md) The Command Line
+ Interface, allowing users to interact with the network from their terminal.
+- [Node](https://github.com/maidsafe/safe_network/blob/main/sn_node/README.md) The backbone of the
+ autonomi network. Nodes can be run on commodity hardware and run the Network.
- [Node Manager](https://github.com/maidsafe/safe_network/blob/main/sn_node_manager/README.md) Use
to create a local network for development and testing.
-- [Faucet](https://github.com/maidsafe/safe_network/blob/main/sn_faucet/README.md) The local faucet
- server, used to claim genesis and request tokens from the network.
- [Node RPC](https://github.com/maidsafe/safe_network/blob/main/sn_node_rpc_client/README.md) The
RPC server used by the nodes to expose API calls to the outside world.
@@ -99,22 +86,12 @@ The `websockets` feature is available for the `sn_networking` crate, and above,
tcp over websockets.
If building for `wasm32` then `websockets` are enabled by default as this is the only method
-avilable to communicate with a network as things stand. (And that network must have `websockets`
+available to communicate with a network as things stand. (And that network must have `websockets`
enabled.)
-##### Building for wasm32
-
-- Install [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/)
-- `cd sn_client && wasm-pack build`
-
-You can then pull this package into a web app eg, to use it.
+#### Building for wasm32
-eg `await safe.get_data("/ip4/127.0.0.1/tcp/59324/ws/p2p/12D3KooWG6kyBwLVHj5hYK2SqGkP4GqrCz5gfwsvPBYic4c4TeUz","9d7e115061066126482a229822e6d68737bd67d826c269762c0f64ce87af6b4c")`
-
-#### Browser usage
-
-Browser usage is highly experimental, but the wasm32 target for `sn_client` _should_ work here.
-YMMV until stabilised.
+WASM support for the autonomi API is currently under active development. More docs coming soon.
### For the Technical
@@ -127,201 +104,73 @@ YMMV until stabilised.
- [Protocol](https://github.com/maidsafe/safe_network/blob/main/sn_protocol/README.md) The protocol
used by the safe network.
- [Transfers](https://github.com/maidsafe/safe_network/blob/main/sn_transfers/README.md) The
- transfers crate, used to send and receive tokens on the network.
+ transfers crate, used to send and receive tokens Native to the network.
+- [Registers](https://github.com/maidsafe/safe_network/blob/main/sn_registers/README.md) The
+ registers crate, used for the Register CRDT data type on the network.
- [Peers Acquisition](https://github.com/maidsafe/safe_network/blob/main/sn_peers_acquisition/README.md)
- The peers peers acqisition crate, or: how the network layer discovers bootstrap peers.
+ The peers acquisition crate, or: how the network layer discovers bootstrap peers.
- [Build Info](https://github.com/maidsafe/safe_network/blob/main/sn_build_info/README.md) Small
helper used to get the build/commit versioning info for debug purposes.
-## Using a Local Network
-
-We can explore the network's features by using multiple node processes to form a local network.
-
-The latest version of [Rust](https://www.rust-lang.org/learn/get-started) should be installed. If
-you already have an installation, use `rustup update` to get the latest version.
-
-Run all the commands from the root of this repository.
+### Using a Local Network
-### Run the Network
+We can explore the network's features by using multiple node processes to form a local network. e also need to run a
+local EVM network for our nodes and client to connect to.
Follow these steps to create a local network:
-1. Create the test network:
-
-```bash
-cargo run --bin safenode-manager --features local-discovery -- local run --build
-```
-
-2. Verify node status:
+##### 1. Prerequisites
-```bash
-cargo run --bin safenode-manager --features local-discovery -- status
-```
-
-3. Build a tokenized wallet:
-
-```bash
-cargo run --bin safe --features local-discovery -- wallet get-faucet 127.0.0.1:8000
-```
+The latest version of [Rust](https://www.rust-lang.org/learn/get-started) should be installed. If you already have an installation, use `rustup update` to get the latest version.
-The node manager's `run` command starts the node processes and a faucet process, the latter of
-which will dispense tokens for use with the network. The `status` command should show twenty-five
-running nodes. The `wallet` command retrieves some tokens, which enables file uploads.
+Run all the commands from the root of this repository.
-### Files
+If you haven't already, install Foundry. We need to have access to Anvil, which is packaged with Foundry, to run an EVM node: https://book.getfoundry.sh/getting-started/installation
-The file storage capability can be demonstrated by uploading files to the local network, then
-retrieving them.
+To collect rewards for you nodes, you will need an EVM address, you can create one using [metamask](https://metamask.io/).
-Upload a file or a directory:
+##### 2. Run a local EVM node
-```bash
-cargo run --bin safe --features local-discovery -- files upload
+```sh
+cargo run --bin evm_testnet
```
-The output will show that the upload costs some tokens.
+This creates a CSV file with the EVM network params in your data directory.
-Now download the files again:
+##### 3. Create the test network and pass the EVM params
+ `--rewards-address` _is the address where you will receive your node earnings on._
```bash
-cargo run --bin safe --features local-discovery -- files download
+cargo run --bin=safenode-manager --features=local -- local run --build --clean --rewards-address
```
-### Folders
-
-The folders storage capability can be demonstrated by storing folders on the network, making
-changes and syncing them with the stored version on the network, as well as downloading the entire
-folders hierarchy onto a local directory.
-
-All the following commands act on the current directory by default, but since we are building the
-CLI binary to run it, we will have to always provide the directory we want them to act as a path
-argument.
-When otherwise running directly an already built CLI binary, we can simply make sure we are located
-at the directory we want to act on without the need of providing the path as argument.
+The EVM Network parameters are loaded from the CSV file in your data directory automatically when the `local` feature flag is enabled (`--features=local`).
-Initialise a directory to then be able to track changes made on it, and sync them up with the
-network:
+##### 4. Verify node status
```bash
-cargo run --bin safe --features local-discovery -- folders init
+cargo run --bin safenode-manager --features local -- status
```
-Make sure you made a backup copy of the "recovery secret" generated by the above command, or the
-one you have provided when prompted.
+The node manager's `run` command starts the node processes. The `status` command should show twenty-five
+running nodes.
-If any changes are now made to files or directories within this folder (at this point all files and
-folders are considered new since it has just been initalised for tracking), before trying to push
-those changes to the network, we can get a report of the changes that have been made locally:
+##### 5. Uploading and Downloading Data
-```bash
-cargo run --bin safe --features local-discovery -- folders status
-```
-
-We can now push all local changes made to files and directories to the network, as well as pull any
-changes that could have been made to the version stored on the network since last time we synced
-with it:
-
-```bash
-cargo run --bin safe --features local-discovery -- folders sync
-```
+To upload a file or a directory, you need to set the `SECRET_KEY` environment variable to your EVM secret key:
-Now that's all stored on the network, you can download the folders onto any other path by providing
-it as the target directory to the following command (you will be prompted to enter the "recovery
-secret" you obtained when initialising the directory with `init` command):
+> When running a local network, you can use the `SECRET_KEY` printed by the `evm_testnet` command [step 2](#2-run-a-local-evm-node) as it has all the money.
```bash
-cargo run --bin safe --features local-discovery -- folders download
-```
-
-### Token Transfers
-
-Use your local wallet to demonstrate sending tokens and receiving transfers.
-
-First, get your wallet address, this address can be safely shared publicly
-
-```
-cargo run --bin safe -- wallet address
-```
-
-You can also get your balance with:
-
+SECRET_KEY= cargo run --bin autonomi --features local -- file upload
```
-cargo run --bin safe -- wallet balance
-```
-
-Now to send some tokens to an address:
-
-```
-cargo run --bin safe --features local-discovery -- wallet send 2 [address]
-```
-
-This will output a transfer as a hex string, which should be sent to the recipient.
-This transfer is encrypted to the recipient so only the recipient can read and redeem it.
-To receive a transfer, simply paste it after the wallet receive command:
-
-```
-cargo run --bin safe --features local-discovery -- wallet receive [transfer]
-```
-
-#### Out of band transaction signing
-When you want to transfer tokens from a cold storage or hardware wallet, you can create and sign
-the transaction offline. This is done to prevent the private key from being exposed to any online
-threats.
-For this type of scenarios you can create a watch-only wallet (it holds only a public key) on the
-online device, while using a hot-wallet (which holds the secret key) on a device that is offline.
-The following steps are a simple guide for performing such an operation.
+The output will print out the address at which the content was uploaded.
-Steps on the online device/computer with a watch-only wallet:
+Now to download the files again:
-1. Create a watch-only wallet using the hex-encoded public key:
- `cargo run --release --bin safe -- wowallet create `
-
-2. Deposit a cash-note, owned by the public key used above when creating, into the watch-only
- wallet:
- `cargo run --release --bin safe -- wowallet deposit --cash-note `
-
-3. Build an unsigned transaction:
- `cargo run --release --bin safe -- wowallet transaction `
-
-4. Copy the built unsigned Tx generated by the above command, and send it out-of-band to the
- desired device where the hot-wallet can be loaded.
-
-Steps on the offline device/computer with the corresponding hot-wallet:
-
-5. If you still don't have a hot-wallet created, which owns the cash-notes used to build the
- unsigned transaction, create it with the corresponding secret key:
- `cargo run --release --bin safe -- wallet create --key `
-
-6. Use the hot-wallet to sign the built transaction:
- `cargo run --release --bin safe -- wallet sign `
-
-7. Copy the signed Tx generated by the above command, and send it out-of-band back to the online
- device.
-
-Steps on the online device/computer with the watch-only wallet:
-
-8. Broadcast the signed transaction to the network using the watch-only wallet:
- `cargo run --release --bin safe -- wowallet broadcast `
-
-9. Deposit the change cash-note to the watch-only wallet:
- `cargo run --release --bin safe -- wowallet deposit `
-
-10. Send/share the output cash-note generated by the above command at step #8 to/with the
- recipient.
-
-### Auditing
-
-We can verify a spend, optionally going back to the genesis transaction:
-
-```
-cargo run --bin safe --features local-discovery -- wallet verify [--genesis] [spend address]
-```
-
-All spends from genesis can be audited:
-
-```
-cargo run --bin safe --features local-discovery -- wallet audit
+```bash
+cargo run --bin autonomi --features local -- file download
```
### Registers
@@ -332,11 +181,11 @@ their use by two users to exchange text messages in a crude chat application.
In the first terminal, using the registers example, Alice creates a register:
```
-cargo run --example registers --features=local-discovery -- --user alice --reg-nickname myregister
+cargo run --example registers --features=local -- --user alice --reg-nickname myregister
```
Alice can now write a message to the register and see anything written by anyone else. For example
-she might enter the text "hello, who's there?" which is written to the register and then shown as
+she might enter the text "Hello, who's there?" which is written to the register and then shown as
the "Latest value", in her terminal:
```
@@ -350,15 +199,15 @@ Latest value (more than one if concurrent writes were made):
--------------
Enter a blank line to receive updates, or some text to be written.
-hello, who's there?
-Writing msg (offline) to Register: 'hello, who's there?'
+Hello, who's there?
+Writing msg (offline) to Register: 'Hello, who's there?'
Syncing with SAFE in 2s...
synced!
Current total number of items in Register: 1
Latest value (more than one if concurrent writes were made):
--------------
-[alice]: hello, who's there?
+[Alice]: Hello, who's there?
--------------
Enter a blank line to receive updates, or some text to be written.
@@ -367,7 +216,7 @@ Enter a blank line to receive updates, or some text to be written.
For anyone else to write to the same register they need to know its xor address, so to communicate
with her friend Bob, Alice needs to find a way to send it to Bob. In her terminal, this is the
-value starting "50f4..." in the output above. This value it will be different each time you run the
+value starting "50f4..." in the output above. This value will be different each time you run the
example to create a register.
Having received the xor address, in another terminal Bob can access the same register to see the
@@ -375,7 +224,7 @@ message Alice has written, and he can write back by running this command with th
from Alice. (Note that the command should all be on one line):
```
-cargo run --example registers --features=local-discovery -- --user bob --reg-address 50f4c9d55aa1f4fc19149a86e023cd189e509519788b4ad8625a1ce62932d1938cf4242e029cada768e7af0123a98c25973804d84ad397ca65cb89d6580d04ff07e5b196ea86f882b925be6ade06fc8d
+cargo run --example registers --features=local -- --user bob --reg-address 50f4c9d55aa1f4fc19149a86e023cd189e509519788b4ad8625a1ce62932d1938cf4242e029cada768e7af0123a98c25973804d84ad397ca65cb89d6580d04ff07e5b196ea86f882b925be6ade06fc8d
```
After retrieving the register and displaying the message from Alice, Bob can reply and at any time,
@@ -387,7 +236,7 @@ Here's Bob writing from his terminal:
```
Latest value (more than one if concurrent writes were made):
--------------
-[alice]: hello, who's there?
+[Alice]: Hello, who's there?
--------------
Enter a blank line to receive updates, or some text to be written.
@@ -402,7 +251,7 @@ A second example, `register_inspect` allows you to view its structure and conten
the above example you again provide the address of the register. For example:
```
-cargo run --example register_inspect --features=local-discovery -- --reg-address 50f4c9d55aa1f4fc19149a86e023cd189e509519788b4ad8625a1ce62932d1938cf4242e029cada768e7af0123a98c25973804d84ad397ca65cb89d6580d04ff07e5b196ea86f882b925be6ade06fc8d
+cargo run --example register_inspect --features=local -- --reg-address 50f4c9d55aa1f4fc19149a86e023cd189e509519788b4ad8625a1ce62932d1938cf4242e029cada768e7af0123a98c25973804d84ad397ca65cb89d6580d04ff07e5b196ea86f882b925be6ade06fc8d
```
After printing a summary of the register, this example will display
@@ -522,7 +371,7 @@ Listening to royalty payment events:
```
$ cargo run --bin safenode_rpc_client -- 127.0.0.1:34416 transfers
-Listening to transfers notifications... (press Ctrl+C to exit)
+Listening to transfer notifications... (press Ctrl+C to exit)
New transfer notification received for PublicKey(0c54..5952), containing 1 cash note/s.
CashNote received with UniquePubkey(PublicKey(19ee..1580)), value: 0.000000001
@@ -535,7 +384,7 @@ The `transfers` command can provide a path for royalty payment cash notes:
```
$ cargo run --release --bin=safenode_rpc_client -- 127.0.0.1:34416 transfers ./royalties-cash-notes
-Listening to transfers notifications... (press Ctrl+C to exit)
+Listening to transfer notifications... (press Ctrl+C to exit)
Writing cash notes to: ./royalties-cash-notes
```
diff --git a/adr/libp2p/identify-interval.md b/adr/libp2p/identify-interval.md
index 59dd9db4c6..1b068c1637 100644
--- a/adr/libp2p/identify-interval.md
+++ b/adr/libp2p/identify-interval.md
@@ -8,7 +8,7 @@ Accepted
Idle nodes in a network of moderate data have a high ongoing bandwidth.
-This appears to be because of the identify polling of nodes, which occurs at the deafult libp2p rate, of once per 5 minutes.
+This appears to be because of the identify polling of nodes, which occurs at the default libp2p rate, of once per 5 minutes.
We see ~1mb/s traffic on nodes in a moderate network.
diff --git a/autonomi-cli/Cargo.toml b/autonomi-cli/Cargo.toml
new file mode 100644
index 0000000000..83adf193d2
--- /dev/null
+++ b/autonomi-cli/Cargo.toml
@@ -0,0 +1,57 @@
+[package]
+name = "autonomi-cli"
+version = "0.1.1"
+edition = "2021"
+
+[[bin]]
+name = "autonomi"
+path = "src/main.rs"
+
+[features]
+default = ["metrics"]
+local = ["sn_peers_acquisition/local", "autonomi/local"]
+metrics = ["sn_logging/process-metrics"]
+network-contacts = ["sn_peers_acquisition/network-contacts"]
+
+[[bench]]
+name = "files"
+harness = false
+
+[dependencies]
+autonomi = { path = "../autonomi", version = "0.2.0", features = [
+ "data",
+ "fs",
+ "registers",
+ "loud",
+] }
+clap = { version = "4.2.1", features = ["derive"] }
+color-eyre = "~0.6"
+dirs-next = "~2.0.0"
+indicatif = { version = "0.17.5", features = ["tokio"] }
+tokio = { version = "1.32.0", features = [
+ "io-util",
+ "macros",
+ "parking_lot",
+ "rt",
+ "sync",
+ "time",
+ "fs",
+] }
+tracing = { version = "~0.1.26" }
+sn_peers_acquisition = { path = "../sn_peers_acquisition", version = "0.5.4" }
+sn_build_info = { path = "../sn_build_info", version = "0.1.16" }
+sn_logging = { path = "../sn_logging", version = "0.2.37" }
+
+[dev-dependencies]
+autonomi = { path = "../autonomi", version = "0.2.0", features = [
+ "data",
+ "fs",
+] }
+eyre = "0.6.8"
+criterion = "0.5.1"
+tempfile = "3.6.0"
+rand = { version = "~0.8.5", features = ["small_rng"] }
+rayon = "1.8.0"
+
+[lints]
+workspace = true
diff --git a/autonomi-cli/README.md b/autonomi-cli/README.md
new file mode 100644
index 0000000000..b10d2128fb
--- /dev/null
+++ b/autonomi-cli/README.md
@@ -0,0 +1,27 @@
+# A CLI for the Autonomi Network
+
+```
+Usage: autonomi_cli [OPTIONS]
+
+Commands:
+ file Operations related to file handling
+ register Operations related to register management
+ vault Operations related to vault management
+ help Print this message or the help of the given subcommand(s)
+
+Options:
+ --log-output-dest
+ Specify the logging output destination. [default: data-dir]
+ --log-format
+ Specify the logging format.
+ --peer
+ Peer(s) to use for bootstrap, in a 'multiaddr' format containing the peer ID [env: SAFE_PEERS=]
+ --timeout
+ The maximum duration to wait for a connection to the network before timing out
+ -x, --no-verify
+ Prevent verification of data storage on the network
+ -h, --help
+ Print help (see more with '--help')
+ -V, --version
+ Print version
+```
\ No newline at end of file
diff --git a/sn_cli/benches/files.rs b/autonomi-cli/benches/files.rs
similarity index 66%
rename from sn_cli/benches/files.rs
rename to autonomi-cli/benches/files.rs
index 288801d980..f545936334 100644
--- a/sn_cli/benches/files.rs
+++ b/autonomi-cli/benches/files.rs
@@ -10,6 +10,7 @@ use criterion::{criterion_group, criterion_main, Criterion, Throughput};
use rand::{thread_rng, Rng};
use rayon::prelude::{IntoParallelIterator, ParallelIterator};
use std::{
+ collections::HashSet,
fs::File,
io::Write,
path::{Path, PathBuf},
@@ -20,18 +21,21 @@ use tempfile::tempdir;
const SAMPLE_SIZE: usize = 20;
+// Default deployer wallet of the testnet.
+const DEFAULT_WALLET_PRIVATE_KEY: &str =
+ "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
+
// This procedure includes the client startup, which will be measured by criterion as well.
// As normal user won't care much about initial client startup,
// but be more alerted on communication speed during transmission.
-// It will be better to execute bench test with `local-discovery`,
+// It will be better to execute bench test with `local`,
// to make the measurement results reflect speed improvement or regression more accurately.
-fn safe_files_upload(dir: &str) {
- let output = Command::new("./target/release/safe")
- .arg("files")
+fn autonomi_file_upload(dir: &str) -> String {
+ let autonomi_cli_path = get_cli_path();
+ let output = Command::new(autonomi_cli_path)
+ .arg("file")
.arg("upload")
.arg(dir)
- .arg("--retry-strategy") // no retries
- .arg("quick")
.output()
.expect("Failed to execute command");
@@ -39,20 +43,36 @@ fn safe_files_upload(dir: &str) {
let err = output.stderr;
let err_string = String::from_utf8(err).expect("Failed to parse error string");
panic!("Upload command executed with failing error code: {err_string:?}");
+ } else {
+ let out = output.stdout;
+ let out_string = String::from_utf8(out).expect("Failed to parse output string");
+ let address = out_string
+ .lines()
+ .find(|line| line.starts_with("At address:"))
+ .expect("Failed to find the address of the uploaded file");
+ let address = address.trim_start_matches("At address: ");
+ address.to_string()
}
}
-fn safe_files_download() {
- let output = Command::new("./target/release/safe")
- .arg("files")
- .arg("download")
- .output()
- .expect("Failed to execute command");
-
- if !output.status.success() {
- let err = output.stderr;
- let err_string = String::from_utf8(err).expect("Failed to parse error string");
- panic!("Download command executed with failing error code: {err_string:?}");
+fn autonomi_file_download(uploaded_files: HashSet) {
+ let autonomi_cli_path = get_cli_path();
+
+ let temp_dir = tempdir().expect("Failed to create temp dir");
+ for address in uploaded_files.iter() {
+ let output = Command::new(autonomi_cli_path.clone())
+ .arg("file")
+ .arg("download")
+ .arg(address)
+ .arg(temp_dir.path())
+ .output()
+ .expect("Failed to execute command");
+
+ if !output.status.success() {
+ let err = output.stderr;
+ let err_string = String::from_utf8(err).expect("Failed to parse error string");
+ panic!("Download command executed with failing error code: {err_string:?}");
+ }
}
}
@@ -71,23 +91,32 @@ fn generate_file(path: &PathBuf, file_size_mb: usize) {
assert_eq!(file_size_mb as f64, size);
}
-fn fund_cli_wallet() {
- let _ = Command::new("./target/release/safe")
- .arg("wallet")
- .arg("get-faucet")
- .arg("127.0.0.1:8000")
- .output()
- .expect("Failed to execute 'safe wallet get-faucet' command");
+fn get_cli_path() -> PathBuf {
+ let mut path = PathBuf::new();
+ if let Ok(val) = std::env::var("CARGO_TARGET_DIR") {
+ path.push(val);
+ } else {
+ path.push("target");
+ }
+ path.push("release");
+ path.push("autonomi_cli");
+ path
}
fn criterion_benchmark(c: &mut Criterion) {
// Check if the binary exists
- if !Path::new("./target/release/safe").exists() {
- eprintln!("Error: Binary ./target/release/safe does not exist. Please make sure to compile your project first");
+ let cli_path = get_cli_path();
+ if !Path::new(&cli_path).exists() {
+ eprintln!("Error: Binary {cli_path:?} does not exist. Please make sure to compile your project first");
exit(1);
}
+ if std::env::var("SECRET_KEY").is_err() {
+ std::env::set_var("SECRET_KEY", DEFAULT_WALLET_PRIVATE_KEY);
+ }
+
let sizes: [u64; 2] = [1, 10]; // File sizes in MB. Add more sizes as needed
+ let mut uploaded_files = HashSet::new();
for size in sizes.iter() {
let temp_dir = tempdir().expect("Failed to create temp dir");
@@ -102,7 +131,6 @@ fn criterion_benchmark(c: &mut Criterion) {
let path = temp_dir_path.join(format!("random_file_{size}_mb_{idx}"));
generate_file(&path, *size as usize);
});
- fund_cli_wallet();
// Wait little bit for the fund to be settled.
std::thread::sleep(Duration::from_secs(10));
@@ -118,9 +146,12 @@ fn criterion_benchmark(c: &mut Criterion) {
// Set the throughput to be reported in terms of bytes
group.throughput(Throughput::Bytes(size * 1024 * 1024));
- let bench_id = format!("safe files upload {size}mb");
+ let bench_id = format!("autonomi files upload {size}mb");
group.bench_function(bench_id, |b| {
- b.iter(|| safe_files_upload(temp_dir_path_str))
+ b.iter(|| {
+ let uploaded_address = autonomi_file_upload(temp_dir_path_str);
+ uploaded_files.insert(uploaded_address);
+ })
});
group.finish();
}
@@ -146,8 +177,10 @@ fn criterion_benchmark(c: &mut Criterion) {
// Set the throughput to be reported in terms of bytes
group.throughput(Throughput::Bytes(total_size * 1024 * 1024));
- let bench_id = "safe files download".to_string();
- group.bench_function(bench_id, |b| b.iter(safe_files_download));
+ let bench_id = "autonomi files download".to_string();
+ group.bench_function(bench_id, |b| {
+ b.iter(|| autonomi_file_download(uploaded_files.clone()))
+ });
group.finish();
}
diff --git a/autonomi-cli/src/access/data_dir.rs b/autonomi-cli/src/access/data_dir.rs
new file mode 100644
index 0000000000..af0db16c2c
--- /dev/null
+++ b/autonomi-cli/src/access/data_dir.rs
@@ -0,0 +1,19 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use color_eyre::eyre::{eyre, Context, Result};
+use std::path::PathBuf;
+
+pub fn get_client_data_dir_path() -> Result {
+ let mut home_dirs = dirs_next::data_dir()
+ .ok_or_else(|| eyre!("Failed to obtain data dir, your OS might not be supported."))?;
+ home_dirs.push("safe");
+ home_dirs.push("client");
+ std::fs::create_dir_all(home_dirs.as_path()).wrap_err("Failed to create data dir")?;
+ Ok(home_dirs)
+}
diff --git a/autonomi-cli/src/access/keys.rs b/autonomi-cli/src/access/keys.rs
new file mode 100644
index 0000000000..18310f4831
--- /dev/null
+++ b/autonomi-cli/src/access/keys.rs
@@ -0,0 +1,102 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use autonomi::client::registers::RegisterSecretKey;
+use autonomi::{get_evm_network_from_env, Wallet};
+use color_eyre::eyre::{Context, Result};
+use color_eyre::Section;
+use std::env;
+use std::fs;
+use std::path::PathBuf;
+
+const SECRET_KEY_ENV: &str = "SECRET_KEY";
+const REGISTER_SIGNING_KEY_ENV: &str = "REGISTER_SIGNING_KEY";
+
+const SECRET_KEY_FILE: &str = "secret_key";
+const REGISTER_SIGNING_KEY_FILE: &str = "register_signing_key";
+
+/// EVM wallet
+pub fn load_evm_wallet() -> Result {
+ let secret_key =
+ get_secret_key().wrap_err("The secret key is required to perform this action")?;
+ let network = get_evm_network_from_env()?;
+ let wallet = Wallet::new_from_private_key(network, &secret_key)
+ .wrap_err("Failed to load EVM wallet from key")?;
+ Ok(wallet)
+}
+
+/// EVM wallet private key
+pub fn get_secret_key() -> Result {
+ // try env var first
+ let why_env_failed = match env::var(SECRET_KEY_ENV) {
+ Ok(key) => return Ok(key),
+ Err(e) => e,
+ };
+
+ // try from data dir
+ let dir = super::data_dir::get_client_data_dir_path()
+ .wrap_err(format!("Failed to obtain secret key from env var: {why_env_failed}, reading from disk also failed as couldn't access data dir"))
+ .with_suggestion(|| format!("make sure you've provided the {SECRET_KEY_ENV} env var"))?;
+
+ // load the key from file
+ let key_path = dir.join(SECRET_KEY_FILE);
+ fs::read_to_string(&key_path)
+ .wrap_err("Failed to read secret key from file")
+ .with_suggestion(|| format!("make sure you've provided the {SECRET_KEY_ENV} env var or have the key in a file at {key_path:?}"))
+ .with_suggestion(|| "the secret key should be a hex encoded string of your evm wallet private key")
+}
+
+pub fn create_register_signing_key_file(key: RegisterSecretKey) -> Result {
+ let dir = super::data_dir::get_client_data_dir_path()
+ .wrap_err("Could not access directory to write key to")?;
+ let file_path = dir.join(REGISTER_SIGNING_KEY_FILE);
+ fs::write(&file_path, key.to_hex()).wrap_err("Could not write key to file")?;
+ Ok(file_path)
+}
+
+fn parse_register_signing_key(key_hex: &str) -> Result {
+ RegisterSecretKey::from_hex(key_hex)
+ .wrap_err("Failed to parse register signing key")
+ .with_suggestion(|| {
+ "the register signing key should be a hex encoded string of a bls secret key"
+ })
+ .with_suggestion(|| {
+ "you can generate a new secret key with the `register generate-key` subcommand"
+ })
+}
+
+pub fn get_register_signing_key() -> Result {
+ // try env var first
+ let why_env_failed = match env::var(REGISTER_SIGNING_KEY_ENV) {
+ Ok(key) => return parse_register_signing_key(&key),
+ Err(e) => e,
+ };
+
+ // try from data dir
+ let dir = super::data_dir::get_client_data_dir_path()
+ .wrap_err(format!("Failed to obtain register signing key from env var: {why_env_failed}, reading from disk also failed as couldn't access data dir"))
+ .with_suggestion(|| format!("make sure you've provided the {REGISTER_SIGNING_KEY_ENV} env var"))
+ .with_suggestion(|| "you can generate a new secret key with the `register generate-key` subcommand")?;
+
+ // load the key from file
+ let key_path = dir.join(REGISTER_SIGNING_KEY_FILE);
+ let key_hex = fs::read_to_string(&key_path)
+ .wrap_err("Failed to read secret key from file")
+ .with_suggestion(|| format!("make sure you've provided the {REGISTER_SIGNING_KEY_ENV} env var or have the key in a file at {key_path:?}"))
+ .with_suggestion(|| "you can generate a new secret key with the `register generate-key` subcommand")?;
+
+ // parse the key
+ parse_register_signing_key(&key_hex)
+}
+
+pub fn get_register_signing_key_path() -> Result {
+ let dir = super::data_dir::get_client_data_dir_path()
+ .wrap_err("Could not access directory for register signing key")?;
+ let file_path = dir.join(REGISTER_SIGNING_KEY_FILE);
+ Ok(file_path)
+}
diff --git a/sn_client/src/audit.rs b/autonomi-cli/src/access/mod.rs
similarity index 76%
rename from sn_client/src/audit.rs
rename to autonomi-cli/src/access/mod.rs
index 0d9bb8daec..ac80eeca88 100644
--- a/sn_client/src/audit.rs
+++ b/autonomi-cli/src/access/mod.rs
@@ -6,12 +6,6 @@
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
-mod dag_crawling;
-mod dag_error;
-mod spend_dag;
-
-#[cfg(test)]
-mod tests;
-
-pub use dag_error::{DagError, SpendFault};
-pub use spend_dag::{SpendDag, SpendDagGet};
+pub mod data_dir;
+pub mod keys;
+pub mod network;
diff --git a/autonomi-cli/src/access/network.rs b/autonomi-cli/src/access/network.rs
new file mode 100644
index 0000000000..f7e455dade
--- /dev/null
+++ b/autonomi-cli/src/access/network.rs
@@ -0,0 +1,21 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use autonomi::Multiaddr;
+use color_eyre::eyre::Context;
+use color_eyre::Result;
+use color_eyre::Section;
+use sn_peers_acquisition::PeersArgs;
+use sn_peers_acquisition::SAFE_PEERS_ENV;
+
+pub async fn get_peers(peers: PeersArgs) -> Result> {
+ peers.get_peers().await
+ .wrap_err("Please provide valid Network peers to connect to")
+ .with_suggestion(|| format!("make sure you've provided network peers using the --peers option or the {SAFE_PEERS_ENV} env var"))
+ .with_suggestion(|| "a peer address looks like this: /ip4/42.42.42.42/udp/4242/quic-v1/p2p/B64nodePeerIDvdjb3FAJF4ks3moreBase64CharsHere")
+}
diff --git a/autonomi-cli/src/actions/connect.rs b/autonomi-cli/src/actions/connect.rs
new file mode 100644
index 0000000000..9eccb3bbfb
--- /dev/null
+++ b/autonomi-cli/src/actions/connect.rs
@@ -0,0 +1,35 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use autonomi::Client;
+use autonomi::Multiaddr;
+use color_eyre::eyre::bail;
+use color_eyre::eyre::Result;
+use indicatif::ProgressBar;
+use std::time::Duration;
+
+pub async fn connect_to_network(peers: Vec) -> Result {
+ let progress_bar = ProgressBar::new_spinner();
+ progress_bar.enable_steady_tick(Duration::from_millis(120));
+ progress_bar.set_message("Connecting to The Autonomi Network...");
+ let new_style = progress_bar.style().tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈🔗");
+ progress_bar.set_style(new_style);
+
+ progress_bar.set_message("Connecting to The Autonomi Network...");
+
+ match Client::connect(&peers).await {
+ Ok(client) => {
+ progress_bar.finish_with_message("Connected to the Network");
+ Ok(client)
+ }
+ Err(e) => {
+ progress_bar.finish_with_message("Failed to connect to the network");
+ bail!("Failed to connect to the network: {e}")
+ }
+ }
+}
diff --git a/sn_client/src/chunks.rs b/autonomi-cli/src/actions/mod.rs
similarity index 80%
rename from sn_client/src/chunks.rs
rename to autonomi-cli/src/actions/mod.rs
index 7dbcaef92b..98ef491064 100644
--- a/sn_client/src/chunks.rs
+++ b/autonomi-cli/src/actions/mod.rs
@@ -6,8 +6,7 @@
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
-mod error;
-mod pac_man;
+mod connect;
+mod progress_bar;
-pub(crate) use self::error::{Error, Result};
-pub(crate) use pac_man::{encrypt_large, DataMapLevel};
+pub use connect::connect_to_network;
diff --git a/sn_cli/src/files.rs b/autonomi-cli/src/actions/progress_bar.rs
similarity index 73%
rename from sn_cli/src/files.rs
rename to autonomi-cli/src/actions/progress_bar.rs
index 66341f4865..5e2c6c914e 100644
--- a/sn_cli/src/files.rs
+++ b/autonomi-cli/src/actions/progress_bar.rs
@@ -6,22 +6,11 @@
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
-mod chunk_manager;
-mod download;
-mod estimate;
-mod files_uploader;
-mod upload;
-
-pub use chunk_manager::ChunkManager;
-pub use download::{download_file, download_files};
-pub use estimate::Estimator;
-pub use files_uploader::{FilesUploadStatusNotifier, FilesUploadSummary, FilesUploader};
-pub use upload::{UploadedFile, UPLOADED_FILES};
-
-use color_eyre::Result;
+use color_eyre::eyre::Result;
use indicatif::{ProgressBar, ProgressStyle};
use std::time::Duration;
+#[allow(dead_code)]
pub fn get_progress_bar(length: u64) -> Result {
let progress_bar = ProgressBar::new(length);
progress_bar.set_style(
diff --git a/autonomi-cli/src/commands.rs b/autonomi-cli/src/commands.rs
new file mode 100644
index 0000000000..4c2067aa87
--- /dev/null
+++ b/autonomi-cli/src/commands.rs
@@ -0,0 +1,168 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+mod file;
+mod register;
+mod vault;
+use std::path::PathBuf;
+
+use clap::Subcommand;
+use color_eyre::Result;
+
+use crate::opt::Opt;
+
+#[derive(Subcommand, Debug)]
+pub enum SubCmd {
+ /// Operations related to file handling.
+ File {
+ #[command(subcommand)]
+ command: FileCmd,
+ },
+
+ /// Operations related to register management.
+ Register {
+ #[command(subcommand)]
+ command: RegisterCmd,
+ },
+
+ /// Operations related to vault management.
+ Vault {
+ #[command(subcommand)]
+ command: VaultCmd,
+ },
+}
+
+#[derive(Subcommand, Debug)]
+pub enum FileCmd {
+ /// Estimate cost to upload a file.
+ Cost {
+ /// The file to estimate cost for.
+ file: String,
+ },
+
+ /// Upload a file and pay for it.
+ Upload {
+ /// The file to upload.
+ file: String,
+ },
+
+ /// Download a file from the given address.
+ Download {
+ /// The address of the file to download.
+ addr: String,
+ /// The destination file path.
+ dest_file: PathBuf,
+ },
+
+ /// List previous uploads
+ List,
+}
+
+#[derive(Subcommand, Debug)]
+pub enum RegisterCmd {
+ /// Generate a new register key.
+ GenerateKey {
+ /// Overwrite existing key if it exists
+ /// Warning: overwriting the existing key will result in loss of access to any existing registers created using that key
+ #[arg(short, long)]
+ overwrite: bool,
+ },
+
+ /// Estimate cost to register a name.
+ Cost {
+ /// The name to register.
+ name: String,
+ },
+
+ /// Create a new register with the given name and value.
+ Create {
+ /// The name of the register.
+ name: String,
+ /// The value to store in the register.
+ value: String,
+ /// Create the register with public write access.
+ #[arg(long, default_value = "false")]
+ public: bool,
+ },
+
+ /// Edit an existing register.
+ Edit {
+ /// Use the name of the register instead of the address
+ /// Note that only the owner of the register can use this shorthand as the address can be generated from the name and register key.
+ #[arg(short, long)]
+ name: bool,
+ /// The address of the register
+ /// With the name option on the address will be used as a name
+ address: String,
+ /// The new value to store in the register.
+ value: String,
+ },
+
+ /// Get the value of a register.
+ Get {
+ /// Use the name of the register instead of the address
+ /// Note that only the owner of the register can use this shorthand as the address can be generated from the name and register key.
+ #[arg(short, long)]
+ name: bool,
+ /// The address of the register
+ /// With the name option on the address will be used as a name
+ address: String,
+ },
+
+ /// List previous registers
+ List,
+}
+
+#[derive(Subcommand, Debug)]
+pub enum VaultCmd {
+ /// Estimate cost to create a vault.
+ Cost,
+
+ /// Create a vault at a deterministic address based on your `SECRET_KEY`.
+ Create,
+
+ /// Sync vault with the network, including registers and files.
+ Sync,
+}
+
+pub async fn handle_subcommand(opt: Opt) -> Result<()> {
+ let peers = crate::access::network::get_peers(opt.peers);
+ let cmd = opt.command;
+
+ match cmd {
+ SubCmd::File { command } => match command {
+ FileCmd::Cost { file } => file::cost(&file, peers.await?).await,
+ FileCmd::Upload { file } => file::upload(&file, peers.await?).await,
+ FileCmd::Download { addr, dest_file } => {
+ file::download(&addr, &dest_file, peers.await?).await
+ }
+ FileCmd::List => file::list(peers.await?),
+ },
+ SubCmd::Register { command } => match command {
+ RegisterCmd::GenerateKey { overwrite } => register::generate_key(overwrite),
+ RegisterCmd::Cost { name } => register::cost(&name, peers.await?).await,
+ RegisterCmd::Create {
+ name,
+ value,
+ public,
+ } => register::create(&name, &value, public, peers.await?).await,
+ RegisterCmd::Edit {
+ address,
+ name,
+ value,
+ } => register::edit(address, name, &value, peers.await?).await,
+ RegisterCmd::Get { address, name } => register::get(address, name, peers.await?).await,
+ RegisterCmd::List => register::list(peers.await?),
+ },
+ SubCmd::Vault { command } => match command {
+ VaultCmd::Cost => vault::cost(peers.await?),
+ VaultCmd::Create => vault::create(peers.await?),
+ VaultCmd::Sync => vault::sync(peers.await?),
+ },
+ }
+}
diff --git a/autonomi-cli/src/commands/file.rs b/autonomi-cli/src/commands/file.rs
new file mode 100644
index 0000000000..bfa4719460
--- /dev/null
+++ b/autonomi-cli/src/commands/file.rs
@@ -0,0 +1,85 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use crate::utils::collect_upload_summary;
+use autonomi::client::address::addr_to_str;
+use autonomi::client::address::str_to_addr;
+use autonomi::Multiaddr;
+use color_eyre::eyre::Context;
+use color_eyre::eyre::Result;
+use std::path::Path;
+use std::path::PathBuf;
+
+pub async fn cost(file: &str, peers: Vec) -> Result<()> {
+ let client = crate::actions::connect_to_network(peers).await?;
+
+ println!("Getting upload cost...");
+ let cost = client
+ .file_cost(&PathBuf::from(file))
+ .await
+ .wrap_err("Failed to calculate cost for file")?;
+
+ println!("Estimate cost to upload file: {file}");
+ println!("Total cost: {cost}");
+ Ok(())
+}
+pub async fn upload(path: &str, peers: Vec) -> Result<()> {
+ let wallet = crate::keys::load_evm_wallet()?;
+ let mut client = crate::actions::connect_to_network(peers).await?;
+ let event_receiver = client.enable_client_events();
+ let (upload_summary_thread, upload_completed_tx) = collect_upload_summary(event_receiver);
+
+ let path = PathBuf::from(path);
+
+ let xor_name = if path.is_dir() {
+ println!("Uploading directory: {path:?}");
+ info!("Uploading directory: {path:?}");
+ client
+ .dir_upload(&path, &wallet)
+ .await
+ .wrap_err("Failed to upload directory")?
+ } else {
+ println!("Uploading file: {path:?}");
+ info!("Uploading file: {path:?}");
+ client
+ .file_upload(&path, &wallet)
+ .await
+ .wrap_err("Failed to upload file")?
+ };
+
+ let addr = addr_to_str(xor_name);
+
+ println!("Successfully uploaded: {path:?}");
+ println!("At address: {addr}");
+ info!("Successfully uploaded: {path:?} at address: {addr}");
+ if let Ok(()) = upload_completed_tx.send(()) {
+ let summary = upload_summary_thread.await?;
+ if summary.record_count == 0 {
+ println!("All chunks already exist on the network");
+ } else {
+ println!("Number of chunks uploaded: {}", summary.record_count);
+ println!("Total cost: {} AttoTokens", summary.tokens_spent);
+ }
+ info!("Summary for upload of data {path:?} at {addr:?}: {summary:?}");
+ }
+
+ Ok(())
+}
+pub async fn download(addr: &str, dest_path: &Path, peers: Vec) -> Result<()> {
+ let client = crate::actions::connect_to_network(peers).await?;
+ let address = str_to_addr(addr).wrap_err("Failed to parse data address")?;
+
+ client.download_file_or_dir(address, dest_path).await?;
+
+ Ok(())
+}
+
+pub fn list(_peers: Vec) -> Result<()> {
+ println!("The file list feature is coming soon!");
+ Ok(())
+}
diff --git a/autonomi-cli/src/commands/register.rs b/autonomi-cli/src/commands/register.rs
new file mode 100644
index 0000000000..d559e6cc55
--- /dev/null
+++ b/autonomi-cli/src/commands/register.rs
@@ -0,0 +1,175 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use crate::utils::collect_upload_summary;
+use autonomi::client::registers::RegisterAddress;
+use autonomi::client::registers::RegisterPermissions;
+use autonomi::client::registers::RegisterSecretKey;
+use autonomi::Client;
+use autonomi::Multiaddr;
+use color_eyre::eyre::eyre;
+use color_eyre::eyre::Context;
+use color_eyre::eyre::Result;
+use color_eyre::Section;
+
+pub fn generate_key(overwrite: bool) -> Result<()> {
+ // check if the key already exists
+ let key_path = crate::keys::get_register_signing_key_path()?;
+ if key_path.exists() && !overwrite {
+ return Err(eyre!("Register key already exists at: {}", key_path.display()))
+ .with_suggestion(|| "if you want to overwrite the existing key, run the command with the --overwrite flag")
+ .with_warning(|| "overwriting the existing key might result in loss of access to any existing registers created using that key");
+ }
+
+ // generate and write a new key to file
+ let key = RegisterSecretKey::random();
+ let path = crate::keys::create_register_signing_key_file(key)
+ .wrap_err("Failed to create new register key")?;
+ println!("✅ Created new register key at: {}", path.display());
+ Ok(())
+}
+
+pub async fn cost(name: &str, peers: Vec) -> Result<()> {
+ let register_key = crate::keys::get_register_signing_key()
+ .wrap_err("The register key is required to perform this action")?;
+ let client = crate::actions::connect_to_network(peers).await?;
+
+ let cost = client
+ .register_cost(name.to_string(), register_key)
+ .await
+ .wrap_err("Failed to get cost for register")?;
+ println!("✅ The estimated cost to create a register with name {name} is: {cost}");
+ Ok(())
+}
+
+pub async fn create(name: &str, value: &str, public: bool, peers: Vec) -> Result<()> {
+ let wallet = crate::keys::load_evm_wallet()?;
+ let register_key = crate::keys::get_register_signing_key()
+ .wrap_err("The register key is required to perform this action")?;
+ let mut client = crate::actions::connect_to_network(peers).await?;
+ let event_receiver = client.enable_client_events();
+ let (upload_summary_thread, upload_completed_tx) = collect_upload_summary(event_receiver);
+
+ println!("Creating register with name: {name}");
+ let register = if public {
+ println!("With public write access");
+ let permissions = RegisterPermissions::new_anyone_can_write();
+ client
+ .register_create_with_permissions(
+ value.as_bytes().to_vec().into(),
+ name,
+ register_key,
+ permissions,
+ &wallet,
+ )
+ .await
+ .wrap_err("Failed to create register")?
+ } else {
+ println!("With private write access");
+ client
+ .register_create(
+ value.as_bytes().to_vec().into(),
+ name,
+ register_key,
+ &wallet,
+ )
+ .await
+ .wrap_err("Failed to create register")?
+ };
+
+ let address = register.address();
+
+ println!("✅ Register created at address: {address}");
+ println!("With name: {name}");
+ println!("And initial value: [{value}]");
+
+ if let Ok(()) = upload_completed_tx.send(()) {
+ let summary = upload_summary_thread.await?;
+ if summary.record_count == 0 {
+ println!("The register was already created on the network. No tokens were spent.");
+ } else {
+ println!("Total cost: {} AttoTokens", summary.tokens_spent);
+ }
+ }
+
+ Ok(())
+}
+
+pub async fn edit(address: String, name: bool, value: &str, peers: Vec) -> Result<()> {
+ let register_key = crate::keys::get_register_signing_key()
+ .wrap_err("The register key is required to perform this action")?;
+ let client = crate::actions::connect_to_network(peers).await?;
+
+ let address = if name {
+ Client::register_address(&address, ®ister_key)
+ } else {
+ RegisterAddress::from_hex(&address)
+ .wrap_err(format!("Failed to parse register address: {address}"))
+ .with_suggestion(|| {
+ "if you want to use the name as the address, run the command with the --name flag"
+ })?
+ };
+
+ println!("Getting register at address: {address}");
+ let register = client
+ .register_get(address)
+ .await
+ .wrap_err(format!("Failed to get register at address: {address}"))?;
+ println!("Found register at address: {address}");
+
+ println!("Updating register with new value: {value}");
+ client
+ .register_update(register, value.as_bytes().to_vec().into(), register_key)
+ .await
+ .wrap_err(format!("Failed to update register at address: {address}"))?;
+
+ println!("✅ Successfully updated register");
+ println!("With value: [{value}]");
+
+ Ok(())
+}
+
+pub async fn get(address: String, name: bool, peers: Vec) -> Result<()> {
+ let register_key = crate::keys::get_register_signing_key()
+ .wrap_err("The register key is required to perform this action")?;
+ let client = crate::actions::connect_to_network(peers).await?;
+
+ let address = if name {
+ Client::register_address(&address, ®ister_key)
+ } else {
+ RegisterAddress::from_hex(&address)
+ .wrap_err(format!("Failed to parse register address: {address}"))
+ .with_suggestion(|| {
+ "if you want to use the name as the address, run the command with the --name flag"
+ })?
+ };
+
+ println!("Getting register at address: {address}");
+ let register = client
+ .register_get(address)
+ .await
+ .wrap_err(format!("Failed to get register at address: {address}"))?;
+ let values = register.values();
+
+ println!("✅ Register found at address: {address}");
+ match values.as_slice() {
+ [one] => println!("With value: [{:?}]", String::from_utf8_lossy(one)),
+ _ => {
+ println!("With multiple concurrent values:");
+ for value in values.iter() {
+ println!("[{:?}]", String::from_utf8_lossy(value));
+ }
+ }
+ }
+ Ok(())
+}
+
+pub fn list(_peers: Vec) -> Result<()> {
+ println!("The register feature is coming soon!");
+ Ok(())
+}
diff --git a/autonomi-cli/src/commands/vault.rs b/autonomi-cli/src/commands/vault.rs
new file mode 100644
index 0000000000..9a8d708824
--- /dev/null
+++ b/autonomi-cli/src/commands/vault.rs
@@ -0,0 +1,25 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use autonomi::Multiaddr;
+use color_eyre::eyre::Result;
+
+pub fn cost(_peers: Vec) -> Result<()> {
+ println!("The vault feature is coming soon!");
+ Ok(())
+}
+
+pub fn create(_peers: Vec) -> Result<()> {
+ println!("The vault feature is coming soon!");
+ Ok(())
+}
+
+pub fn sync(_peers: Vec) -> Result<()> {
+ println!("The vault feature is coming soon!");
+ Ok(())
+}
diff --git a/autonomi-cli/src/main.rs b/autonomi-cli/src/main.rs
new file mode 100644
index 0000000000..de4cdcf4c4
--- /dev/null
+++ b/autonomi-cli/src/main.rs
@@ -0,0 +1,68 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+#[macro_use]
+extern crate tracing;
+
+mod access;
+mod actions;
+mod commands;
+mod opt;
+mod utils;
+
+pub use access::data_dir;
+pub use access::keys;
+pub use access::network;
+
+use clap::Parser;
+use color_eyre::Result;
+
+use opt::Opt;
+#[cfg(feature = "metrics")]
+use sn_logging::metrics::init_metrics;
+use sn_logging::{LogBuilder, LogFormat, ReloadHandle, WorkerGuard};
+use tracing::Level;
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ color_eyre::install().expect("Failed to initialise error handler");
+ let opt = Opt::parse();
+ let _log_guards = init_logging_and_metrics(&opt)?;
+ #[cfg(feature = "metrics")]
+ tokio::spawn(init_metrics(std::process::id()));
+
+ // Log the full command that was run and the git version
+ info!("\"{}\"", std::env::args().collect::>().join(" "));
+ let version = sn_build_info::git_info();
+ info!("autonomi client built with git version: {version}");
+ println!("autonomi client built with git version: {version}");
+
+ commands::handle_subcommand(opt).await?;
+
+ Ok(())
+}
+
+fn init_logging_and_metrics(opt: &Opt) -> Result<(ReloadHandle, Option)> {
+ let logging_targets = vec![
+ ("autonomi-cli".to_string(), Level::TRACE),
+ ("autonomi".to_string(), Level::TRACE),
+ ("evmlib".to_string(), Level::TRACE),
+ ("sn_evm".to_string(), Level::TRACE),
+ ("sn_networking".to_string(), Level::INFO),
+ ("sn_build_info".to_string(), Level::TRACE),
+ ("sn_logging".to_string(), Level::TRACE),
+ ("sn_peers_acquisition".to_string(), Level::TRACE),
+ ("sn_protocol".to_string(), Level::TRACE),
+ ("sn_registers".to_string(), Level::TRACE),
+ ];
+ let mut log_builder = LogBuilder::new(logging_targets);
+ log_builder.output_dest(opt.log_output_dest.clone());
+ log_builder.format(opt.log_format.unwrap_or(LogFormat::Default));
+ let guards = log_builder.initialize()?;
+ Ok(guards)
+}
diff --git a/sn_cli/src/bin/subcommands/mod.rs b/autonomi-cli/src/opt.rs
similarity index 68%
rename from sn_cli/src/bin/subcommands/mod.rs
rename to autonomi-cli/src/opt.rs
index 7a7ba11cad..8f3fb20967 100644
--- a/sn_cli/src/bin/subcommands/mod.rs
+++ b/autonomi-cli/src/opt.rs
@@ -6,17 +6,14 @@
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
-pub(crate) mod files;
-pub(crate) mod folders;
-pub(crate) mod register;
-pub(crate) mod wallet;
+use std::time::Duration;
use clap::Parser;
-use clap::Subcommand;
use color_eyre::Result;
use sn_logging::{LogFormat, LogOutputDest};
use sn_peers_acquisition::PeersArgs;
-use std::time::Duration;
+
+use crate::commands::SubCmd;
// Please do not remove the blank lines in these doc comments.
// They are used for inserting line breaks when the help menu is rendered in the UI.
@@ -33,6 +30,7 @@ pub(crate) struct Opt {
/// - Linux: $HOME/.local/share/safe/client/logs
/// - macOS: $HOME/Library/Application Support/safe/client/logs
/// - Windows: C:\Users\\AppData\Roaming\safe\client\logs
+ #[allow(rustdoc::invalid_html_tags)]
#[clap(long, value_parser = LogOutputDest::parse_from_str, verbatim_doc_comment, default_value = "data-dir")]
pub log_output_dest: LogOutputDest,
@@ -49,7 +47,7 @@ pub(crate) struct Opt {
/// Available sub commands.
#[clap(subcommand)]
- pub cmd: SubCmd,
+ pub command: SubCmd,
/// The maximum duration to wait for a connection to the network before timing out.
#[clap(long = "timeout", global = true, value_parser = |t: &str| -> Result { Ok(t.parse().map(Duration::from_secs)?) })]
@@ -61,25 +59,3 @@ pub(crate) struct Opt {
#[clap(global = true, long = "no-verify", short = 'x')]
pub no_verify: bool,
}
-
-#[derive(Subcommand, Debug)]
-pub(super) enum SubCmd {
- #[clap(name = "wallet", subcommand)]
- /// Commands for a hot-wallet management.
- /// A hot-wallet holds the secret key, thus it can be used for signing transfers/transactions.
- Wallet(wallet::hot_wallet::WalletCmds),
- #[clap(name = "wowallet", subcommand)]
- /// Commands for watch-only wallet management
- /// A watch-only wallet holds only the public key, thus it cannot be used for signing
- /// transfers/transactions, but only to query balances and broadcast offline signed transactions.
- WatchOnlyWallet(wallet::wo_wallet::WatchOnlyWalletCmds),
- #[clap(name = "files", subcommand)]
- /// Commands for file management
- Files(files::FilesCmds),
- #[clap(name = "folders", subcommand)]
- /// Commands for folders management
- Folders(folders::FoldersCmds),
- #[clap(name = "register", subcommand)]
- /// Commands for register management
- Register(register::RegisterCmds),
-}
diff --git a/autonomi-cli/src/utils.rs b/autonomi-cli/src/utils.rs
new file mode 100644
index 0000000000..5f031a3c24
--- /dev/null
+++ b/autonomi-cli/src/utils.rs
@@ -0,0 +1,56 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use autonomi::client::{Amount, ClientEvent, UploadSummary};
+
+/// Collects upload summary from the event receiver.
+/// Send a signal to the returned sender to stop collecting and to return the result via the join handle.
+pub fn collect_upload_summary(
+ mut event_receiver: tokio::sync::mpsc::Receiver,
+) -> (
+ tokio::task::JoinHandle,
+ tokio::sync::oneshot::Sender<()>,
+) {
+ let (upload_completed_tx, mut upload_completed_rx) = tokio::sync::oneshot::channel::<()>();
+ let stats_thread = tokio::spawn(async move {
+ let mut tokens_spent: Amount = Amount::from(0);
+ let mut record_count = 0;
+
+ loop {
+ tokio::select! {
+ event = event_receiver.recv() => {
+ match event {
+ Some(ClientEvent::UploadComplete(upload_summary)) => {
+ tokens_spent += upload_summary.tokens_spent;
+ record_count += upload_summary.record_count;
+ }
+ None => break,
+ }
+ }
+ _ = &mut upload_completed_rx => break,
+ }
+ }
+
+ // try to drain the event receiver in case there are any more events
+ while let Ok(event) = event_receiver.try_recv() {
+ match event {
+ ClientEvent::UploadComplete(upload_summary) => {
+ tokens_spent += upload_summary.tokens_spent;
+ record_count += upload_summary.record_count;
+ }
+ }
+ }
+
+ UploadSummary {
+ tokens_spent,
+ record_count,
+ }
+ });
+
+ (stats_thread, upload_completed_tx)
+}
diff --git a/autonomi/Cargo.toml b/autonomi/Cargo.toml
index 3c3bef4c6d..617452db53 100644
--- a/autonomi/Cargo.toml
+++ b/autonomi/Cargo.toml
@@ -3,43 +3,76 @@ authors = ["MaidSafe Developers "]
description = "Autonomi client API"
name = "autonomi"
license = "GPL-3.0"
-version = "0.1.2"
+version = "0.2.0"
edition = "2021"
homepage = "https://maidsafe.net"
readme = "README.md"
repository = "https://github.com/maidsafe/safe_network"
+[lib]
+crate-type = ["cdylib", "rlib"]
+
[features]
-default = []
-full = ["data", "files", "fs", "registers", "transfers"]
-data = ["transfers"]
-files = ["transfers"]
-fs = []
-local = ["sn_client/local-discovery"]
-registers = ["transfers"]
-transfers = []
+default = ["data"]
+full = ["data", "registers", "vault"]
+data = []
+vault = ["data"]
+fs = ["tokio/fs", "data"]
+local = ["sn_networking/local", "test_utils/local", "sn_evm/local"]
+registers = ["data"]
+loud = []
[dependencies]
bip39 = "2.0.0"
bls = { package = "blsttc", version = "8.0.1" }
bytes = { version = "1.0.1", features = ["serde"] }
-libp2p = "0.53"
+curv = { version = "0.10.1", package = "sn_curv", default-features = false, features = [
+ "num-bigint",
+] }
+eip2333 = { version = "0.2.1", package = "sn_bls_ckd" }
+const-hex = "1.12.0"
+hex = "~0.4.3"
+libp2p = "0.54.1"
rand = "0.8.5"
rmp-serde = "1.1.1"
self_encryption = "~0.30.0"
serde = { version = "1.0.133", features = ["derive", "rc"] }
-sn_client = { path = "../sn_client", version = "0.110.4" }
-sn_protocol = { version = "0.17.11", path = "../sn_protocol" }
-sn_registers = { path = "../sn_registers", version = "0.3.21" }
-sn_transfers = { path = "../sn_transfers", version = "0.19.3" }
+sn_networking = { path = "../sn_networking", version = "0.19.0" }
+sn_peers_acquisition = { path = "../sn_peers_acquisition", version = "0.5.4" }
+sn_protocol = { version = "0.17.12", path = "../sn_protocol" }
+sn_registers = { path = "../sn_registers", version = "0.4.0" }
+sn_evm = { path = "../sn_evm", version = "0.1.1" }
thiserror = "1.0.23"
-tokio = { version = "1.35.0", features = ["sync", "fs"] }
+tokio = { version = "1.35.0", features = ["sync"] }
tracing = { version = "~0.1.26" }
walkdir = "2.5.0"
xor_name = "5.0.0"
+futures = "0.3.30"
+wasm-bindgen = "0.2.93"
+wasm-bindgen-futures = "0.4.43"
+serde-wasm-bindgen = "0.6.5"
[dev-dependencies]
+eyre = "0.6.5"
+sha2 = "0.10.6"
+sn_logging = { path = "../sn_logging", version = "0.2.37" }
+sn_peers_acquisition = { path = "../sn_peers_acquisition", version = "0.5.4" }
+# Do not specify the version field. Release process expects even the local dev deps to be published.
+# Removing the version field is a workaround.
+test_utils = { path = "../test_utils" }
+tiny_http = "0.11"
+tracing-subscriber = { version = "0.3", features = ["env-filter"] }
+wasm-bindgen-test = "0.3.43"
+
+[target.'cfg(target_arch = "wasm32")'.dependencies]
+console_error_panic_hook = "0.1.7"
+evmlib = { path = "../evmlib", version = "0.1.1", features = ["wasm-bindgen"] }
+# See https://github.com/sebcrozet/instant/blob/7bd13f51f5c930239fddc0476a837870fb239ed7/README.md#using-instant-for-a-wasm-platform-where-performancenow-is-not-available
+instant = { version = "0.1", features = ["wasm-bindgen", "inaccurate"] }
+js-sys = "0.3.70"
+test_utils = { path = "../test_utils" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
+tracing-web = "0.1.3"
[lints]
workspace = true
diff --git a/autonomi/README.md b/autonomi/README.md
index e3b2291766..3b27c6b0f0 100644
--- a/autonomi/README.md
+++ b/autonomi/README.md
@@ -7,19 +7,114 @@ Connect to and build on the Autonomi network.
## Usage
-See [docs.rs/autonomi](https://docs.rs/autonomi) for usage examples.
+Add the autonomi crate to your `Cargo.toml`:
+
+```toml
+[dependencies]
+autonomi = { path = "../autonomi", version = "0.1.0" }
+```
## Running tests
-Run a local network with the `local-discovery` feature:
+### Using a local EVM testnet
+
+1. If you haven't, install Foundry, to be able to run Anvil
+ nodes: https://book.getfoundry.sh/getting-started/installation
+2. Run a local EVM node:
+
+```sh
+cargo run --bin evm_testnet
+```
+
+3. Run a local network with the `local` feature and use the local evm node.
+
+```sh
+cargo run --bin=safenode-manager --features=local -- local run --build --clean --rewards-address evm-local
+```
+
+4. Then run the tests with the `local` feature and pass the EVM params again:
+
+```sh
+$ EVM_NETWORK=local cargo test --package=autonomi --features=local
+# Or with logs
+$ RUST_LOG=autonomi EVM_NETWORK=local cargo test --package=autonomi --features=local -- --nocapture
+```
+
+### Using a live testnet or mainnet
+
+Using the hardcoded `Arbitrum One` option as an example, but you can also use the command flags of the steps above and
+point it to a live network.
+
+1. Run a local network with the `local` feature:
```sh
-cargo run --bin=safenode-manager --features=local-discovery -- local run --build --clean
+cargo run --bin=safenode-manager --features=local -- local run --build --clean --rewards-address evm-arbitrum-one
```
-Then run the tests with the `local` feature:
+2. Then run the tests with the `local` feature. Make sure that the wallet of the private key you pass has enough gas and
+ payment tokens on the network (in this case Arbitrum One):
+
```sh
-$ cargo test --package=autonomi --features=local
+$ EVM_NETWORK=arbitrum-one EVM_PRIVATE_KEY= cargo test --package=autonomi --features=local
# Or with logs
-$ RUST_LOG=autonomi cargo test --package=autonomi --features=local -- --nocapture
+$ RUST_LOG=autonomi EVM_NETWORK=arbitrum-one EVM_PRIVATE_KEY= cargo test --package=autonomi --features=local -- --nocapture
+```
+
+### WebAssembly
+
+To run a WASM test
+- Install `wasm-pack`
+- Make sure your Rust supports the `wasm32-unknown-unknown` target. (If you have `rustup`: `rustup target add wasm32-unknown-unknown`.)
+- Pass a bootstrap peer via `SAFE_PEERS`. This *has* to be the websocket address, e.g. `/ip4//tcp//ws/p2p/`.
+ - As well as the other environment variables needed for EVM payments (e.g. `RPC_URL`).
+- Optionally specify the specific test, e.g. `-- put` to run `put()` in `wasm.rs` only.
+
+Example:
+````sh
+SAFE_PEERS=/ip4//tcp//ws/p2p/ wasm-pack test --release --firefox autonomi --features=data,files --test wasm -- put
+```
+
+
+## Faucet (local)
+
+There is no faucet server, but instead you can use the `Deployer wallet private key` printed in the EVM node output to
+initialise a wallet from with almost infinite gas and payment tokens. Example:
+
+```rust
+let rpc_url = "http://localhost:54370/";
+let payment_token_address = "0x5FbDB2315678afecb367f032d93F642f64180aa3";
+let data_payments_address = "0x8464135c8F25Da09e49BC8782676a84730C318bC";
+let private_key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
+
+let network = Network::Custom(CustomNetwork::new(
+rpc_url,
+payment_token_address,
+data_payments_address,
+));
+
+let deployer_wallet = Wallet::new_from_private_key(network, private_key).unwrap();
+let receiving_wallet = Wallet::new_with_random_wallet(network);
+
+// Send 10 payment tokens (atto)
+let _ = deployer_wallet
+.transfer_tokens(receiving_wallet.address(), Amount::from(10))
+.await;
```
+
+Alternatively, you can provide the wallet address that should own all the gas and payment tokens to the EVM testnet
+startup command using the `--genesis-wallet` flag:
+
+```sh
+cargo run --bin evm_testnet -- --genesis-wallet
+```
+
+```shell
+*************************
+* Ethereum node started *
+*************************
+RPC URL: http://localhost:60093/
+Payment token address: 0x5FbDB2315678afecb367f032d93F642f64180aa3
+Chunk payments address: 0x8464135c8F25Da09e49BC8782676a84730C318bC
+Deployer wallet private key: 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
+Genesis wallet balance: (tokens: 20000000000000000000000000, gas: 9998998011366954730202)
+```
\ No newline at end of file
diff --git a/autonomi/WASM_docs.md b/autonomi/WASM_docs.md
new file mode 100644
index 0000000000..995809b8bd
--- /dev/null
+++ b/autonomi/WASM_docs.md
@@ -0,0 +1,170 @@
+## JavaScript Autonomi API Documentation
+
+Note that this is a first version and will be subject to change.
+
+### **Client**
+
+The `Client` object allows interaction with the network to store and retrieve data. Below are the available methods for the `Client` class.
+
+#### **Constructor**
+
+```javascript
+let client = await new Client([multiaddress]);
+```
+
+- **multiaddress** (Array of Strings): A list of network addresses for the client to connect to.
+
+Example:
+```javascript
+let client = await new Client(["/ip4/127.0.0.1/tcp/36075/ws/p2p/12D3KooWALb...BhDAfJY"]);
+```
+
+#### **Methods**
+
+##### **put(data, wallet)**
+
+Uploads a piece of encrypted data to the network.
+
+```javascript
+let result = await client.put(data, wallet);
+```
+
+- **data** (Uint8Array): The data to be stored.
+- **wallet** (Wallet): The wallet used to pay for the storage.
+
+Returns:
+- **result** (XorName): The XOR address of the stored data.
+
+Example:
+```javascript
+let wallet = getFundedWallet();
+let data = new Uint8Array([1, 2, 3]);
+let result = await client.put(data, wallet);
+```
+
+##### **get(data_map_addr)**
+
+Fetches encrypted data from the network using its XOR address.
+
+```javascript
+let data = await client.get(data_map_addr);
+```
+
+- **data_map_addr** (XorName): The XOR address of the data to fetch.
+
+Returns:
+- **data** (Uint8Array): The fetched data.
+
+Example:
+```javascript
+let data = await client.get(result);
+```
+
+##### **cost(data)**
+
+Gets the cost of storing the provided data on the network.
+
+```javascript
+let cost = await client.cost(data);
+```
+
+- **data** (Uint8Array): The data whose storage cost you want to calculate.
+
+Returns:
+- **cost** (AttoTokens): The calculated cost for storing the data.
+
+Example:
+```javascript
+let cost = await client.cost(new Uint8Array([1, 2, 3]));
+```
+
+---
+
+### **Wallet**
+
+The `Wallet` object represents an Ethereum wallet used for data payments.
+
+#### **Methods**
+
+##### **new_from_private_key(network, private_key)**
+
+Creates a new wallet using the given private key.
+
+```javascript
+let wallet = Wallet.new_from_private_key(network, private_key);
+```
+
+- **network** (EvmNetwork): The network to which the wallet connects.
+- **private_key** (String): The private key of the wallet.
+
+Returns:
+- **wallet** (Wallet): The created wallet.
+
+Example:
+```javascript
+let wallet = Wallet.new_from_private_key(EvmNetwork.default(), "your_private_key_here");
+```
+
+##### **address()**
+
+Gets the wallet’s address.
+
+```javascript
+let address = wallet.address();
+```
+
+Returns:
+- **address** (Address): The wallet's address.
+
+Example:
+```javascript
+let wallet = Wallet.new_from_private_key(EvmNetwork.default(), "your_private_key_here");
+let address = wallet.address();
+```
+
+---
+
+### **EvmNetwork**
+
+The `EvmNetwork` object represents the blockchain network.
+
+#### **Methods**
+
+##### **default()**
+
+Connects to the default network.
+
+```javascript
+let network = EvmNetwork.default();
+```
+
+Returns:
+- **network** (EvmNetwork): The default network.
+
+Example:
+```javascript
+let network = EvmNetwork.default();
+```
+
+---
+
+### Example Usage:
+
+```javascript
+let client = await new Client(["/ip4/127.0.0.1/tcp/36075/ws/p2p/12D3KooWALb...BhDAfJY"]);
+console.log("connected");
+
+let wallet = Wallet.new_from_private_key(EvmNetwork.default(), "your_private_key_here");
+console.log("wallet retrieved");
+
+let data = new Uint8Array([1, 2, 3]);
+let result = await client.put(data, wallet);
+console.log("Data stored at:", result);
+
+let fetchedData = await client.get(result);
+console.log("Data retrieved:", fetchedData);
+```
+
+---
+
+This documentation covers the basic usage of `Client`, `Wallet`, and `EvmNetwork` types in the JavaScript API.
\ No newline at end of file
diff --git a/autonomi/index.html b/autonomi/index.html
new file mode 100644
index 0000000000..bd806016ca
--- /dev/null
+++ b/autonomi/index.html
@@ -0,0 +1,48 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/autonomi/src/client/address.rs b/autonomi/src/client/address.rs
new file mode 100644
index 0000000000..f314952f9c
--- /dev/null
+++ b/autonomi/src/client/address.rs
@@ -0,0 +1,48 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use xor_name::XorName;
+
+#[derive(Debug, thiserror::Error)]
+pub enum DataError {
+ #[error("Invalid XorName")]
+ InvalidXorName,
+ #[error("Input address is not a hex string")]
+ InvalidHexString,
+}
+
+pub fn str_to_addr(addr: &str) -> Result {
+ let bytes = hex::decode(addr).map_err(|err| {
+ error!("Failed to decode hex string: {err:?}");
+ DataError::InvalidHexString
+ })?;
+ let xor = XorName(bytes.try_into().map_err(|err| {
+ error!("Failed to convert bytes to XorName: {err:?}");
+ DataError::InvalidXorName
+ })?);
+ Ok(xor)
+}
+
+pub fn addr_to_str(addr: XorName) -> String {
+ hex::encode(addr)
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use xor_name::XorName;
+
+ #[test]
+ fn test_xorname_to_str() {
+ let rng = &mut rand::thread_rng();
+ let xorname = XorName::random(rng);
+ let str = addr_to_str(xorname);
+ let xorname2 = str_to_addr(&str).expect("Failed to convert back to xorname");
+ assert_eq!(xorname, xorname2);
+ }
+}
diff --git a/autonomi/src/client/archive.rs b/autonomi/src/client/archive.rs
new file mode 100644
index 0000000000..f38ca24cbc
--- /dev/null
+++ b/autonomi/src/client/archive.rs
@@ -0,0 +1,66 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use std::{collections::HashMap, path::PathBuf};
+
+use super::{
+ data::DataAddr,
+ data::{GetError, PutError},
+ Client,
+};
+use bytes::Bytes;
+use serde::{Deserialize, Serialize};
+use sn_evm::EvmWallet;
+use xor_name::XorName;
+
+/// The address of an archive on the network. Points to an [`Archive`].
+pub type ArchiveAddr = XorName;
+
+/// An archive of files that containing file paths, their metadata and the files data addresses
+/// Using archives is useful for uploading entire directories to the network, only needing to keep track of a single address.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Archive {
+ pub map: HashMap,
+}
+
+impl Archive {
+ /// Deserialize from bytes.
+ pub fn from_bytes(data: &Bytes) -> Result {
+ let root: Archive = rmp_serde::from_slice(&data[..])?;
+
+ Ok(root)
+ }
+
+ /// Serialize to bytes.
+ pub fn into_bytes(&self) -> Result {
+ let root_serialized = rmp_serde::to_vec(&self)?;
+ let root_serialized = Bytes::from(root_serialized);
+
+ Ok(root_serialized)
+ }
+}
+
+impl Client {
+ /// Fetch an archive from the network
+ pub async fn archive_get(&self, addr: ArchiveAddr) -> Result {
+ let data = self.data_get(addr).await?;
+ Ok(Archive::from_bytes(&data)?)
+ }
+
+ /// Upload an archive to the network
+ pub async fn archive_put(
+ &self,
+ archive: Archive,
+ wallet: &EvmWallet,
+ ) -> Result {
+ let bytes = archive
+ .into_bytes()
+ .map_err(|e| PutError::Serialization(format!("Failed to serialize archive: {e:?}")))?;
+ self.data_put(bytes, wallet).await
+ }
+}
diff --git a/autonomi/src/client/data.rs b/autonomi/src/client/data.rs
index 3522da2251..055016f291 100644
--- a/autonomi/src/client/data.rs
+++ b/autonomi/src/client/data.rs
@@ -1,56 +1,63 @@
-use std::collections::{BTreeMap, HashSet};
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
-use crate::self_encryption::{encrypt, DataMapLevel};
-use crate::Client;
use bytes::Bytes;
-use libp2p::{
- kad::{Quorum, Record},
- PeerId,
-};
-use self_encryption::{decrypt_full_set, DataMap, EncryptedChunk};
-use sn_client::{
- networking::{GetRecordCfg, NetworkError, PutRecordCfg},
- transfers::{HotWallet, MainPubkey, NanoTokens, PaymentQuote},
- StoragePaymentResult,
-};
+use libp2p::kad::Quorum;
+use tokio::task::JoinError;
+
+use std::collections::HashSet;
+use xor_name::XorName;
+
+use crate::client::{ClientEvent, UploadSummary};
+use crate::{self_encryption::encrypt, Client};
+use sn_evm::{Amount, AttoTokens};
+use sn_evm::{EvmWallet, EvmWalletError};
+use sn_networking::{GetRecordCfg, NetworkError};
use sn_protocol::{
- storage::{
- try_deserialize_record, try_serialize_record, Chunk, ChunkAddress, RecordHeader, RecordKind,
- },
+ storage::{try_deserialize_record, Chunk, ChunkAddress, RecordHeader, RecordKind},
NetworkAddress,
};
-use sn_transfers::Payment;
-use tokio::task::{JoinError, JoinSet};
-use xor_name::XorName;
-use super::transfers::SendSpendsError;
+/// Raw Data Address (points to a DataMap)
+pub type DataAddr = XorName;
+/// Raw Chunk Address (points to a [`Chunk`])
+pub type ChunkAddr = XorName;
/// Errors that can occur during the put operation.
#[derive(Debug, thiserror::Error)]
pub enum PutError {
#[error("Failed to self-encrypt data.")]
SelfEncryption(#[from] crate::self_encryption::Error),
- #[error("Error serializing data.")]
- Serialization,
+ #[error("Error getting Vault XorName data.")]
+ VaultXorName,
#[error("A network error occurred.")]
Network(#[from] NetworkError),
- #[error("A wallet error occurred.")]
- Wallet(#[from] sn_transfers::WalletError),
#[error("Error occurred during payment.")]
PayError(#[from] PayError),
+ #[error("Failed to serialize {0}")]
+ Serialization(String),
+ #[error("A wallet error occurred.")]
+ Wallet(#[from] sn_evm::EvmError),
}
/// Errors that can occur during the pay operation.
#[derive(Debug, thiserror::Error)]
pub enum PayError {
+ #[error("Could not get store quote for: {0:?} after several retries")]
+ CouldNotGetStoreQuote(XorName),
#[error("Could not get store costs: {0:?}")]
- CouldNotGetStoreCosts(sn_client::networking::NetworkError),
+ CouldNotGetStoreCosts(NetworkError),
#[error("Could not simultaneously fetch store costs: {0:?}")]
JoinError(JoinError),
- #[error("Hot wallet error")]
- WalletError(#[from] sn_transfers::WalletError),
- #[error("Failed to send spends")]
- SendSpendsError(#[from] SendSpendsError),
+ #[error("Wallet error: {0:?}")]
+ EvmWalletError(#[from] EvmWalletError),
+ #[error("Failed to self-encrypt data.")]
+ SelfEncryption(#[from] crate::self_encryption::Error),
}
/// Errors that can occur during the get operation.
@@ -60,17 +67,19 @@ pub enum GetError {
InvalidDataMap(rmp_serde::decode::Error),
#[error("Failed to decrypt data.")]
Decryption(crate::self_encryption::Error),
+ #[error("Failed to deserialize")]
+ Deserialization(#[from] rmp_serde::decode::Error),
#[error("General networking error: {0:?}")]
- Network(#[from] sn_client::networking::NetworkError),
+ Network(#[from] NetworkError),
#[error("General protocol error: {0:?}")]
- Protocol(#[from] sn_client::protocol::Error),
+ Protocol(#[from] sn_protocol::Error),
}
impl Client {
- /// Fetch a piece of self-encrypted data from the network, by its data map
- /// XOR address.
- pub async fn get(&self, data_map_addr: XorName) -> Result {
- let data_map_chunk = self.fetch_chunk(data_map_addr).await?;
+ /// Fetch a blob of data from the network
+ pub async fn data_get(&self, addr: DataAddr) -> Result {
+ info!("Fetching data from Data Address: {addr:?}");
+ let data_map_chunk = self.chunk_get(addr).await?;
let data = self
.fetch_from_data_map_chunk(data_map_chunk.value())
.await?;
@@ -78,247 +87,131 @@ impl Client {
Ok(data)
}
- /// Get a raw chunk from the network.
- pub async fn fetch_chunk(&self, addr: XorName) -> Result {
- tracing::info!("Getting chunk: {addr:?}");
- let key = NetworkAddress::from_chunk_address(ChunkAddress::new(addr)).to_record_key();
-
- let get_cfg = GetRecordCfg {
- get_quorum: Quorum::One,
- retry_strategy: None,
- target_record: None,
- expected_holders: HashSet::new(),
- };
- let record = self.network.get_record_from_network(key, &get_cfg).await?;
- let header = RecordHeader::from_record(&record)?;
- if let RecordKind::Chunk = header.kind {
- let chunk: Chunk = try_deserialize_record(&record)?;
- Ok(chunk)
- } else {
- Err(NetworkError::RecordKindMismatch(RecordKind::Chunk).into())
- }
- }
+ /// Upload a piece of data to the network. This data will be self-encrypted.
+ /// Returns the Data Address at which the data was stored.
+ pub async fn data_put(&self, data: Bytes, wallet: &EvmWallet) -> Result {
+ let now = sn_networking::target_arch::Instant::now();
+ let (data_map_chunk, chunks) = encrypt(data)?;
+ info!(
+ "Uploading datamap chunk to the network at: {:?}",
+ data_map_chunk.address()
+ );
- /// Upload a piece of data to the network. This data will be self-encrypted,
- /// and the data map XOR address will be returned.
- pub async fn put(&mut self, data: Bytes, wallet: &mut HotWallet) -> Result {
- let now = std::time::Instant::now();
- let (map, chunks) = encrypt(data)?;
- tracing::debug!("Encryption took: {:.2?}", now.elapsed());
+ debug!("Encryption took: {:.2?}", now.elapsed());
- let map_xor_name = *map.address().xorname();
+ let map_xor_name = *data_map_chunk.address().xorname();
+ let mut xor_names = vec![map_xor_name];
- let mut xor_names = vec![];
- xor_names.push(map_xor_name);
for chunk in &chunks {
xor_names.push(*chunk.name());
}
- let StoragePaymentResult { skipped_chunks, .. } =
- self.pay(xor_names.into_iter(), wallet).await?;
-
- // TODO: Upload in parallel
- if !skipped_chunks.contains(map.name()) {
- self.upload_chunk(map, wallet).await?;
+ // Pay for all chunks + data map chunk
+ info!("Paying for {} addresses", xor_names.len());
+ let (payment_proofs, _free_chunks) = self
+ .pay(xor_names.into_iter(), wallet)
+ .await
+ .inspect_err(|err| error!("Error paying for data: {err:?}"))?;
+
+ let mut record_count = 0;
+
+ // Upload data map
+ if let Some(proof) = payment_proofs.get(&map_xor_name) {
+ debug!("Uploading data map chunk: {map_xor_name:?}");
+ self.chunk_upload_with_payment(data_map_chunk.clone(), proof.clone())
+ .await
+ .inspect_err(|err| error!("Error uploading data map chunk: {err:?}"))?;
+ record_count += 1;
}
+
+ // Upload the rest of the chunks
+ debug!("Uploading {} chunks", chunks.len());
for chunk in chunks {
- if skipped_chunks.contains(chunk.name()) {
- continue;
+ if let Some(proof) = payment_proofs.get(chunk.name()) {
+ let address = *chunk.address();
+ self.chunk_upload_with_payment(chunk, proof.clone())
+ .await
+ .inspect_err(|err| error!("Error uploading chunk {address:?} :{err:?}"))?;
+ record_count += 1;
}
- self.upload_chunk(chunk, wallet).await?;
}
- Ok(map_xor_name)
- }
-
- // Fetch and decrypt all chunks in the data map.
- async fn fetch_from_data_map(&self, data_map: &DataMap) -> Result {
- let mut encrypted_chunks = vec![];
- for info in data_map.infos() {
- let chunk = self.fetch_chunk(info.dst_hash).await?;
- let chunk = EncryptedChunk {
- index: info.index,
- content: chunk.value,
- };
- encrypted_chunks.push(chunk);
- }
-
- let data = decrypt_full_set(data_map, &encrypted_chunks)
- .map_err(|e| GetError::Decryption(crate::self_encryption::Error::SelfEncryption(e)))?;
-
- Ok(data)
- }
-
- // Unpack a wrapped data map and fetch all bytes using self-encryption.
- async fn fetch_from_data_map_chunk(&self, data_map_bytes: &Bytes) -> Result {
- let mut data_map_level: DataMapLevel =
- rmp_serde::from_slice(data_map_bytes).map_err(GetError::InvalidDataMap)?;
-
- loop {
- let data_map = match &data_map_level {
- DataMapLevel::First(map) => map,
- DataMapLevel::Additional(map) => map,
- };
-
- let data = self.fetch_from_data_map(data_map).await?;
+ if let Some(channel) = self.client_event_sender.as_ref() {
+ let tokens_spent = payment_proofs
+ .values()
+ .map(|proof| proof.quote.cost.as_atto())
+ .sum::();
- match &data_map_level {
- DataMapLevel::First(_) => break Ok(data),
- DataMapLevel::Additional(_) => {
- data_map_level =
- rmp_serde::from_slice(&data).map_err(GetError::InvalidDataMap)?;
- continue;
- }
+ let summary = UploadSummary {
+ record_count,
+ tokens_spent,
};
- }
- }
-
- pub(crate) async fn pay(
- &mut self,
- content_addrs: impl Iterator- ,
- wallet: &mut HotWallet,
- ) -> Result {
- let mut tasks = JoinSet::new();
- for content_addr in content_addrs {
- let network = self.network.clone();
- tasks.spawn(async move {
- // TODO: retry, but where?
- let cost = network
- .get_store_costs_from_network(
- NetworkAddress::from_chunk_address(ChunkAddress::new(content_addr)),
- vec![],
- )
- .await
- .map_err(PayError::CouldNotGetStoreCosts);
-
- tracing::debug!("Storecosts retrieved for {content_addr:?} {cost:?}");
- (content_addr, cost)
- });
- }
- tracing::debug!("Pending store cost tasks: {:?}", tasks.len());
-
- // collect store costs
- let mut cost_map = BTreeMap::default();
- let mut skipped_chunks = vec![];
- while let Some(res) = tasks.join_next().await {
- match res {
- Ok((content_addr, Ok(cost))) => {
- if cost.2.cost == NanoTokens::zero() {
- skipped_chunks.push(content_addr);
- tracing::debug!("Skipped existing chunk {content_addr:?}");
- } else {
- tracing::debug!("Storecost inserted into payment map for {content_addr:?}");
- let _ = cost_map.insert(content_addr, (cost.1, cost.2, cost.0.to_bytes()));
- }
- }
- Ok((content_addr, Err(err))) => {
- tracing::warn!("Cannot get store cost for {content_addr:?} with error {err:?}");
- return Err(err);
- }
- Err(e) => {
- return Err(PayError::JoinError(e));
- }
+ if let Err(err) = channel.send(ClientEvent::UploadComplete(summary)).await {
+ error!("Failed to send client event: {err:?}");
}
}
- let (storage_cost, royalty_fees) = if cost_map.is_empty() {
- (NanoTokens::zero(), NanoTokens::zero())
- } else {
- self.pay_for_records(&cost_map, wallet).await?
- };
- let res = StoragePaymentResult {
- storage_cost,
- royalty_fees,
- skipped_chunks,
- };
- Ok(res)
+ Ok(map_xor_name)
}
- async fn pay_for_records(
- &mut self,
- cost_map: &BTreeMap)>,
- wallet: &mut HotWallet,
- ) -> Result<(NanoTokens, NanoTokens), PayError> {
- // Before wallet progress, there shall be no `unconfirmed_spend_requests`
- self.resend_pending_transactions(wallet).await;
-
- let total_cost = wallet.local_send_storage_payment(cost_map)?;
-
- // send to network
- tracing::trace!("Sending storage payment transfer to the network");
- let spend_attempt_result = self
- .send_spends(wallet.unconfirmed_spend_requests().iter())
- .await;
-
- tracing::trace!("send_spends of {} chunks completed", cost_map.len(),);
+ /// Get a raw chunk from the network.
+ pub async fn chunk_get(&self, addr: ChunkAddr) -> Result {
+ info!("Getting chunk: {addr:?}");
- // Here is bit risky that for the whole bunch of spends to the chunks' store_costs and royalty_fee
- // they will get re-paid again for ALL, if any one of the payment failed to be put.
- if let Err(error) = spend_attempt_result {
- tracing::warn!("The storage payment transfer was not successfully registered in the network: {error:?}. It will be retried later.");
+ let key = NetworkAddress::from_chunk_address(ChunkAddress::new(addr)).to_record_key();
- // if we have a DoubleSpend error, lets remove the CashNote from the wallet
- if let SendSpendsError::DoubleSpendAttemptedForCashNotes(spent_cash_notes) = &error {
- for cash_note_key in spent_cash_notes {
- tracing::warn!(
- "Removing double spends CashNote from wallet: {cash_note_key:?}"
- );
- wallet.mark_notes_as_spent([cash_note_key]);
- wallet.clear_specific_spend_request(*cash_note_key);
- }
- }
+ let get_cfg = GetRecordCfg {
+ get_quorum: Quorum::One,
+ retry_strategy: None,
+ target_record: None,
+ expected_holders: HashSet::new(),
+ is_register: false,
+ };
- wallet.store_unconfirmed_spend_requests()?;
+ let record = self
+ .network
+ .get_record_from_network(key, &get_cfg)
+ .await
+ .inspect_err(|err| error!("Error fetching chunk: {err:?}"))?;
+ let header = RecordHeader::from_record(&record)?;
- return Err(PayError::SendSpendsError(error));
+ if let RecordKind::Chunk = header.kind {
+ let chunk: Chunk = try_deserialize_record(&record)?;
+ Ok(chunk)
} else {
- tracing::info!("Spend has completed: {:?}", spend_attempt_result);
- wallet.clear_confirmed_spend_requests();
+ Err(NetworkError::RecordKindMismatch(RecordKind::Chunk).into())
}
- tracing::trace!("clear up spends of {} chunks completed", cost_map.len(),);
-
- Ok(total_cost)
}
- /// Directly writes Chunks to the network in the form of immutable self encrypted chunks.
- async fn upload_chunk(&self, chunk: Chunk, wallet: &mut HotWallet) -> Result<(), PutError> {
- let xor_name = *chunk.name();
- let (payment, payee) = self.get_recent_payment_for_addr(&xor_name, wallet)?;
+ /// Get the estimated cost of storing a piece of data.
+ pub async fn data_cost(&self, data: Bytes) -> Result {
+ let now = sn_networking::target_arch::Instant::now();
+ let (data_map_chunk, chunks) = encrypt(data)?;
- self.store_chunk(chunk, payee, payment).await?;
+ debug!("Encryption took: {:.2?}", now.elapsed());
- wallet.api().remove_payment_transaction(&xor_name);
+ let map_xor_name = *data_map_chunk.address().xorname();
+ let mut content_addrs = vec![map_xor_name];
- Ok(())
- }
-
- /// Actually store a chunk to a peer.
- async fn store_chunk(
- &self,
- chunk: Chunk,
- payee: PeerId,
- payment: Payment,
- ) -> Result<(), PutError> {
- tracing::debug!("Storing chunk: {chunk:?} to {payee:?}");
-
- let key = chunk.network_address().to_record_key();
-
- let record_kind = RecordKind::ChunkWithPayment;
- let record = Record {
- key: key.clone(),
- value: try_serialize_record(&(payment, chunk.clone()), record_kind)
- .map_err(|_| PutError::Serialization)?
- .to_vec(),
- publisher: None,
- expires: None,
- };
+ for chunk in &chunks {
+ content_addrs.push(*chunk.name());
+ }
- let put_cfg = PutRecordCfg {
- put_quorum: Quorum::One,
- retry_strategy: None,
- use_put_record_to: Some(vec![payee]),
- verification: None,
- };
- Ok(self.network.put_record(record, &put_cfg).await?)
+ info!(
+ "Calculating cost of storing {} chunks. Data map chunk at: {map_xor_name:?}",
+ content_addrs.len()
+ );
+
+ let cost_map = self
+ .get_store_quotes(content_addrs.into_iter())
+ .await
+ .inspect_err(|err| error!("Error getting store quotes: {err:?}"))?;
+ let total_cost = AttoTokens::from_atto(
+ cost_map
+ .values()
+ .map(|quote| quote.2.cost.as_atto())
+ .sum::(),
+ );
+ Ok(total_cost)
}
}
diff --git a/autonomi/src/client/files.rs b/autonomi/src/client/files.rs
deleted file mode 100644
index 21fd2d8d65..0000000000
--- a/autonomi/src/client/files.rs
+++ /dev/null
@@ -1,105 +0,0 @@
-use std::{collections::HashMap, path::PathBuf};
-
-use bytes::Bytes;
-use serde::{Deserialize, Serialize};
-use sn_transfers::HotWallet;
-use walkdir::WalkDir;
-use xor_name::XorName;
-
-use crate::Client;
-
-use super::data::{GetError, PutError};
-
-/// Directory-like structure that containing file paths and their metadata.
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct Root {
- pub map: HashMap,
-}
-
-/// Structure that describes a file on the network. The actual data is stored in
-/// chunks, to be constructed with the address pointing to the data map.
-///
-/// This is similar to ['inodes'](https://en.wikipedia.org/wiki/Inode) in Unix-like filesystems.
-#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
-pub struct FilePointer {
- data_map: XorName,
- created_at: u64,
- modified_at: u64,
-}
-
-#[derive(Debug, thiserror::Error)]
-pub enum UploadError {
- #[error("Failed to recursively traverse directory")]
- WalkDir(#[from] walkdir::Error),
- #[error("Input/output failure")]
- IoError(#[from] std::io::Error),
- #[error("Failed to upload file")]
- PutError(#[from] PutError),
- #[error("Failed to fetch file")]
- GetError(#[from] GetError),
- #[error("Failed to serialize")]
- Serialization(#[from] rmp_serde::encode::Error),
- #[error("Failed to deserialize")]
- Deserialization(#[from] rmp_serde::decode::Error),
-}
-
-impl Client {
- /// Upload a directory to the network. The directory is recursively walked.
- #[cfg(feature = "fs")]
- pub async fn upload_from_dir(
- &mut self,
- path: PathBuf,
- wallet: &mut HotWallet,
- ) -> Result<(Root, XorName), UploadError> {
- let mut map = HashMap::new();
- for entry in WalkDir::new(path) {
- let entry = entry?;
- if !entry.file_type().is_file() {
- continue;
- }
- let path = entry.path().to_path_buf();
- tracing::info!("Uploading file: {path:?}");
- let file = upload_from_file(self, path.clone(), wallet).await?;
- map.insert(path, file);
- }
-
- let root = Root { map };
- let root_serialized = rmp_serde::to_vec(&root)?;
-
- let xor_name = self.put(Bytes::from(root_serialized), wallet).await?;
-
- Ok((root, xor_name))
- }
-
- /// Fetch a directory from the network.
- pub async fn fetch_root(&mut self, address: XorName) -> Result {
- let data = self.get(address).await?;
- let root: Root = rmp_serde::from_slice(&data[..])?;
-
- Ok(root)
- }
-
- /// Fetch the file pointed to by the given pointer.
- pub async fn fetch_file(&mut self, file: &FilePointer) -> Result {
- let data = self.get(file.data_map).await?;
- Ok(data)
- }
-}
-
-async fn upload_from_file(
- client: &mut Client,
- path: PathBuf,
- wallet: &mut HotWallet,
-) -> Result {
- let data = tokio::fs::read(path).await?;
- let data = Bytes::from(data);
-
- let addr = client.put(data, wallet).await?;
-
- // TODO: Set created_at and modified_at
- Ok(FilePointer {
- data_map: addr,
- created_at: 0,
- modified_at: 0,
- })
-}
diff --git a/autonomi/src/client/fs.rs b/autonomi/src/client/fs.rs
new file mode 100644
index 0000000000..674e03fc2b
--- /dev/null
+++ b/autonomi/src/client/fs.rs
@@ -0,0 +1,195 @@
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
+
+use crate::client::Client;
+use bytes::Bytes;
+use sn_evm::EvmWallet;
+use std::collections::HashMap;
+use std::path::Path;
+
+use super::archive::{Archive, ArchiveAddr};
+use super::data::{DataAddr, GetError, PutError};
+
+/// Errors that can occur during the file upload operation.
+#[cfg(feature = "fs")]
+#[derive(Debug, thiserror::Error)]
+pub enum UploadError {
+ #[error("Failed to recursively traverse directory")]
+ WalkDir(#[from] walkdir::Error),
+ #[error("Input/output failure")]
+ IoError(#[from] std::io::Error),
+ #[error("Failed to upload file")]
+ PutError(#[from] PutError),
+ #[error("Failed to fetch file")]
+ GetError(#[from] GetError),
+ #[error("Failed to serialize")]
+ Serialization(#[from] rmp_serde::encode::Error),
+ #[error("Failed to deserialize")]
+ Deserialization(#[from] rmp_serde::decode::Error),
+}
+
+#[cfg(feature = "fs")]
+/// Errors that can occur during the download operation.
+#[derive(Debug, thiserror::Error)]
+pub enum DownloadError {
+ #[error("Failed to download file")]
+ GetError(#[from] GetError),
+ #[error("IO failure")]
+ IoError(#[from] std::io::Error),
+}
+
+impl Client {
+ /// Download file from network to local file system
+ pub async fn file_download(
+ &self,
+ data_addr: DataAddr,
+ to_dest: &Path,
+ ) -> Result<(), DownloadError> {
+ let data = self.data_get(data_addr).await?;
+ if let Some(parent) = to_dest.parent() {
+ tokio::fs::create_dir_all(parent).await?;
+ }
+ tokio::fs::write(to_dest, data).await?;
+ Ok(())
+ }
+
+ /// Download directory from network to local file system
+ pub async fn dir_download(
+ &self,
+ archive_addr: ArchiveAddr,
+ to_dest: &Path,
+ ) -> Result<(), DownloadError> {
+ let archive = self.archive_get(archive_addr).await?;
+ for (path, addr) in archive.map {
+ self.file_download(addr, &to_dest.join(path)).await?;
+ }
+ Ok(())
+ }
+
+ /// Download either a file or a directory depending on the data present at the provided address.
+ pub async fn download_file_or_dir(
+ &self,
+ address: DataAddr,
+ to_dest: &Path,
+ ) -> Result<(), DownloadError> {
+ let data = self.data_get(address).await?;
+
+ if let Ok(archive) = Archive::from_bytes(&data) {
+ info!("Got an Archive from bytes, unpacking directory to {to_dest:?}");
+ for (path, addr) in archive.map {
+ let dest = to_dest.join(path);
+
+ #[cfg(feature = "loud")]
+ println!("Downloading file: {addr:?} to {dest:?}");
+
+ debug!("Downloading archived file: {addr:?} to {dest:?}");
+ self.file_download(addr, &dest).await?;
+ }
+ } else {
+ info!("The downloaded data is not an Archive, saving it as a file.");
+ #[cfg(feature = "loud")]
+ println!("Downloading file: {address:?} to {to_dest:?}");
+ if let Some(parent) = to_dest.parent() {
+ tokio::fs::create_dir_all(parent).await?;
+ }
+ tokio::fs::write(to_dest, data).await?;
+ }
+
+ Ok(())
+ }
+
+ /// Upload a directory to the network. The directory is recursively walked.
+ /// Reads all files, splits into chunks, uploads chunks, uploads datamaps, uploads archive, returns ArchiveAddr (pointing to the archive)
+ pub async fn dir_upload(
+ &self,
+ dir_path: &Path,
+ wallet: &EvmWallet,
+ ) -> Result {
+ let mut map = HashMap::new();
+
+ for entry in walkdir::WalkDir::new(dir_path) {
+ let entry = entry?;
+
+ if !entry.file_type().is_file() {
+ continue;
+ }
+
+ let path = entry.path();
+ tracing::info!("Uploading file: {path:?}");
+ #[cfg(feature = "loud")]
+ println!("Uploading file: {path:?}");
+ let file = self.file_upload(path, wallet).await?;
+
+ map.insert(path.to_path_buf(), file);
+ }
+
+ let archive = Archive { map };
+ let archive_serialized = archive.into_bytes()?;
+
+ let arch_addr = self.data_put(archive_serialized, wallet).await?;
+
+ Ok(arch_addr)
+ }
+
+ /// Upload a file to the network.
+ /// Reads file, splits into chunks, uploads chunks, uploads datamap, returns DataAddr (pointing to the datamap)
+ pub async fn file_upload(
+ &self,
+ path: &Path,
+ wallet: &EvmWallet,
+ ) -> Result {
+ let data = tokio::fs::read(path).await?;
+ let data = Bytes::from(data);
+ let addr = self.data_put(data, wallet).await?;
+ Ok(addr)
+ }
+
+ /// Get the cost to upload a file/dir to the network.
+ /// quick and dirty implementation, please refactor once files are cleanly implemented
+ pub async fn file_cost(&self, path: &Path) -> Result {
+ let mut map = HashMap::new();
+ let mut total_cost = sn_evm::Amount::ZERO;
+
+ for entry in walkdir::WalkDir::new(path) {
+ let entry = entry?;
+
+ if !entry.file_type().is_file() {
+ continue;
+ }
+
+ let path = entry.path().to_path_buf();
+ tracing::info!("Cost for file: {path:?}");
+
+ let data = tokio::fs::read(&path).await?;
+ let file_bytes = Bytes::from(data);
+ let file_cost = self.data_cost(file_bytes.clone()).await.expect("TODO");
+
+ total_cost += file_cost.as_atto();
+
+ // re-do encryption to get the correct map xorname here
+ // this code needs refactor
+ let now = sn_networking::target_arch::Instant::now();
+ let (data_map_chunk, _) = crate::self_encryption::encrypt(file_bytes).expect("TODO");
+ tracing::debug!("Encryption took: {:.2?}", now.elapsed());
+ let map_xor_name = *data_map_chunk.address().xorname();
+
+ map.insert(path, map_xor_name);
+ }
+
+ let root = Archive { map };
+ let root_serialized = rmp_serde::to_vec(&root).expect("TODO");
+
+ let archive_cost = self
+ .data_cost(Bytes::from(root_serialized))
+ .await
+ .expect("TODO");
+
+ total_cost += archive_cost.as_atto();
+ Ok(total_cost.into())
+ }
+}
diff --git a/autonomi/src/client/mod.rs b/autonomi/src/client/mod.rs
index d8e80620b0..68dfe0d50a 100644
--- a/autonomi/src/client/mod.rs
+++ b/autonomi/src/client/mod.rs
@@ -1,26 +1,43 @@
-use std::{collections::HashSet, time::Duration};
+// Copyright 2024 MaidSafe.net limited.
+//
+// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
+// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
+// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. Please review the Licences for the specific language governing
+// permissions and limitations relating to use of the SAFE Network Software.
-use libp2p::{identity::Keypair, Multiaddr};
-use sn_client::networking::{multiaddr_is_global, Network, NetworkBuilder, NetworkEvent};
-use sn_protocol::{version::IDENTIFY_PROTOCOL_STR, CLOSE_GROUP_SIZE};
-use tokio::{sync::mpsc::Receiver, time::interval};
+pub mod address;
#[cfg(feature = "data")]
-#[cfg_attr(docsrs, doc(cfg(feature = "data")))]
-mod data;
-#[cfg(feature = "files")]
-#[cfg_attr(docsrs, doc(cfg(feature = "files")))]
-mod files;
+pub mod archive;
+#[cfg(feature = "data")]
+pub mod data;
+#[cfg(feature = "fs")]
+pub mod fs;
#[cfg(feature = "registers")]
-#[cfg_attr(docsrs, doc(cfg(feature = "registers")))]
-mod registers;
-#[cfg(feature = "transfers")]
-#[cfg_attr(docsrs, doc(cfg(feature = "transfers")))]
-mod transfers;
+pub mod registers;
+#[cfg(feature = "vault")]
+pub mod vault;
+
+#[cfg(target_arch = "wasm32")]
+pub mod wasm;
+
+// private module with utility functions
+mod utils;
+
+pub use sn_evm::Amount;
+
+use libp2p::{identity::Keypair, Multiaddr};
+use sn_networking::{interval, multiaddr_is_global, Network, NetworkBuilder, NetworkEvent};
+use sn_protocol::{version::IDENTIFY_PROTOCOL_STR, CLOSE_GROUP_SIZE};
+use std::{collections::HashSet, sync::Arc, time::Duration};
+use tokio::sync::mpsc;
/// Time before considering the connection timed out.
pub const CONNECT_TIMEOUT_SECS: u64 = 20;
+const CLIENT_EVENT_CHANNEL_SIZE: usize = 100;
+
/// Represents a connection to the Autonomi network.
///
/// # Example
@@ -28,7 +45,7 @@ pub const CONNECT_TIMEOUT_SECS: u64 = 20;
/// To connect to the network, use [`Client::connect`].
///
/// ```no_run
-/// # use autonomi::Client;
+/// # use autonomi::client::Client;
/// # #[tokio::main]
/// # async fn main() -> Result<(), Box> {
/// let peers = ["/ip4/127.0.0.1/udp/1234/quic-v1".parse()?];
@@ -39,6 +56,7 @@ pub const CONNECT_TIMEOUT_SECS: u64 = 20;
#[derive(Clone)]
pub struct Client {
pub(crate) network: Network,
+ pub(crate) client_event_sender: Arc