diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 3ff9018372c9..6e27a89c3044 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -4,21 +4,41 @@ on: push: branches: - stable + workflow_dispatch: + inputs: + ref: + description: 'Ref to deploy, defaults to `unstable`' + required: false + default: 'unstable' + type: string jobs: docs: runs-on: buildjet-4vcpu-ubuntu-2204 + env: + DEPLOY_REF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || 'stable' }} steps: - # - Uses YAML anchors in the future + # Log out the ref being deployed + - name: Log Deployment Ref + if: github.event_name == 'workflow_dispatch' + run: | + echo "Deploying ref: $DEPLOY_REF" + + # Checkout the correct ref being deployed - uses: actions/checkout@v3 + with: + ref: ${{ env.DEPLOY_REF }} + - uses: actions/setup-node@v3 with: node-version: 20 check-latest: true cache: yarn + - name: Node.js version id: node run: echo "v8CppApiVersion=$(node --print "process.versions.modules")" >> $GITHUB_OUTPUT + - name: Restore dependencies uses: actions/cache@master id: cache-deps @@ -27,13 +47,14 @@ jobs: node_modules packages/*/node_modules key: ${{ runner.os }}-${{ steps.node.outputs.v8CppApiVersion }}-${{ hashFiles('**/yarn.lock', '**/package.json') }} + - name: Install & build if: steps.cache-deps.outputs.cache-hit != 'true' run: yarn install --frozen-lockfile && yarn build + - name: Build run: yarn build if: steps.cache-deps.outputs.cache-hit == 'true' - # - name: Build and collect docs run: yarn build:docs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 76d62ae576be..deaf19329872 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -160,8 +160,8 @@ jobs: check-latest: true cache: yarn - # Remove when finished debugging core dumps - - uses: './.github/actions/setup-debug-node' + # # Remove when finished debugging core dumps + # - uses: './.github/actions/setup-debug-node' - name: Restore build cache id: cache-primes-restore @@ -184,13 +184,14 @@ jobs: key: spec-test-data-${{ hashFiles('packages/validator/test/spec/params.ts') }} - name: Unit tests - id: unit_tests + # id: unit_tests # Rever to "yarn test:unit" when finished debugging core dumps - run: sudo sh -c "ulimit -c unlimited && /usr/bin/node-with-debug $(which yarn) test:unit" + # run: sudo sh -c "ulimit -c unlimited && /usr/bin/node-with-debug $(which yarn) test:unit" + run: yarn test:unit - # Remove when finished debugging core dumps - - uses: './.github/actions/core-dump' - if: ${{ failure() && steps.unit_tests.conclusion == 'failure' }} + # # Remove when finished debugging core dumps + # - uses: './.github/actions/core-dump' + # if: ${{ failure() && steps.unit_tests.conclusion == 'failure' }} - name: Upload coverage data run: yarn coverage @@ -271,9 +272,9 @@ jobs: key: ${{ runner.os }}-node-${{ matrix.node }}-${{ github.sha }} fail-on-cache-miss: true - name: Install Chrome browser - run: npx @puppeteer/browsers install chrome + run: npx @puppeteer/browsers install chromedriver@latest --path /tmp - name: Install Firefox browser - run: npx @puppeteer/browsers install firefox + run: npx @puppeteer/browsers install firefox@latest --path /tmp - name: Browser tests run: | export DISPLAY=':99.0' diff --git a/.gitignore b/.gitignore index a85d4af7794e..a0deed473c4a 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ packages/**/typedocs docs/pages/**/*-cli.md docs/pages/assets docs/pages/images +docs/pages/security.md docs/pages/lightclient-prover/lightclient.md docs/pages/lightclient-prover/prover.md docs/pages/api/api-reference.md diff --git a/RELEASE.md b/RELEASE.md index 440b1a13fe82..b38a4f8562f6 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -30,7 +30,7 @@ To start a new release, one of the Lodestar developers will communicate this via - This script may alternatively be run on the checked out `HEAD`: - `git checkout 9fceb02` - `yarn release:create-rc 1.1.0` -- Open draft PR from `rc/v1.1.0` to `stable` with title `v1.1.0 release`. +- Open draft PR from `rc/v1.1.0` to `stable` with title `chore: v1.1.0 release`. #### Manual steps (for example version `v1.1.0`, commit `9fceb02`): @@ -42,7 +42,7 @@ To start a new release, one of the Lodestar developers will communicate this via - Commit changes - `git commit -am "v1.1.0"` - `git push origin rc/v1.1.0` -- Open draft PR from `rc/v1.1.0` to `stable` with title `v1.1.0 release`. +- Open draft PR from `rc/v1.1.0` to `stable` with title `chore: v1.1.0 release`. ### 2. Tag release candidate @@ -76,13 +76,13 @@ For example: After 3-5 days of testing, is performance equal to or better than l - **Yes**: Continue to the next release step - **No**: If it a small issue fixable quickly (hot-fix)? - **Yes**: Merge fix(es) to `unstable`, push the fix(es) to `rc/v1.1.0` branch, go to step 2, incrementing the rc version - - **No**: abort the release. Close the `v1.1.0 release` PR, delete the branch, and start the whole release process over. + - **No**: abort the release. Close the `chore: v1.1.0 release` PR, delete the branch, and start the whole release process over. ### 4. Merge release candidate - Ensure step 2 testing is successful and there is sufficient consensus to release `v1.1.0`. -- Approving the `v1.1.0 release` PR means a team member marks the release as safe, after personally reviewing and / or testing it. -- Merge `v1.1.0 release` PR to stable **with "merge commit"** strategy to preserve all history. +- Approving the `chore: v1.1.0 release` PR means a team member marks the release as safe, after personally reviewing and / or testing it. +- Merge `chore: v1.1.0 release` PR to stable **with "merge commit"** strategy to preserve all history. - Merge stable `stable` into `unstable` **with merge commit** strategy. Due to branch protections in `unstable` must open a PR. If there are conflicts, those must be resolved manually. Gitflow may cause changes that conflict between stable and unstable, for example due to a hotfix that is backported. If that happens, disable branch protections in unstable, merge locally fixing conflicts, run lint + tests, push, and re-enable branch protections. ### 5. Tag stable release @@ -130,7 +130,7 @@ A similar process for a stable release is used, with the three differences. - Switch to the hotfix release branch and cherrypick the inclusion(s) from the `unstable` branch to the hotfix release. - `git checkout rc/v1.1.1` - `git cherry-pick {commit}` -- Open draft PR from `rc/v1.1.1` to `stable` with the title `v1.1.1 release`. +- Open draft PR from `rc/v1.1.1` to `stable` with the title `chore: v1.1.1 release`. #### Manual steps (for example version `v1.1.1`, commit `8eb8dce`): @@ -144,7 +144,7 @@ A similar process for a stable release is used, with the three differences. - Commit changes - `git commit -am "v1.1.1"` - `git push origin rc/v1.1.1` - Open draft PR from `rc/v1.1.1` to `stable` with the title `v1.1.1 release`. + Open draft PR from `rc/v1.1.1` to `stable` with the title `chore: v1.1.1 release`. ### 2. Tag release candidate diff --git a/dashboards/lodestar_block_processor.json b/dashboards/lodestar_block_processor.json index d1a856f2f71d..8e68d611cc0d 100644 --- a/dashboards/lodestar_block_processor.json +++ b/dashboards/lodestar_block_processor.json @@ -110,6 +110,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -192,6 +193,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -276,6 +278,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -359,6 +362,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -442,6 +446,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -525,6 +530,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -607,6 +613,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -758,7 +765,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -862,7 +869,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -942,7 +949,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -987,6 +994,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -1072,6 +1080,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1148,22 +1157,22 @@ "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", - "fillOpacity": 22, - "gradientMode": "opacity", + "fillOpacity": 0, + "gradientMode": "none", "hideFrom": { - "graph": false, "legend": false, "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, - "pointSize": 4, + "pointSize": 5, "scaleDistribution": { "type": "linear" }, - "showPoints": "never", - "spanNulls": true, + "showPoints": "auto", + "spanNulls": false, "stacking": { "group": "A", "mode": "none" @@ -1183,21 +1192,19 @@ "x": 0, "y": 50 }, - "id": 524, + "id": 534, "options": { - "graph": {}, "legend": { "calcs": [], "displayMode": "list", "placement": "bottom", - "showLegend": false + "showLegend": true }, "tooltip": { "mode": "multi", "sort": "none" } }, - "pluginVersion": "7.4.5", "targets": [ { "datasource": { @@ -1205,15 +1212,14 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "exemplar": false, - "expr": "rate(lodestar_stfn_epoch_transition_commit_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_commit_seconds_count[$rate_interval])", - "interval": "", - "legendFormat": "epoch transition", + "expr": "rate(lodestar_stfn_epoch_transition_step_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_step_seconds_count[$rate_interval])", + "instant": false, + "legendFormat": "{{step}}", "range": true, "refId": "A" } ], - "title": "Epoch transition commit step avg time", + "title": "Epoch Transition By Steps", "type": "timeseries" }, { @@ -1241,6 +1247,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1325,9 +1332,10 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, - "pointSize": 5, + "pointSize": 4, "scaleDistribution": { "type": "linear" }, @@ -1342,25 +1350,9 @@ } }, "mappings": [], - "unit": "percentunit" + "unit": "s" }, - "overrides": [ - { - "matcher": { - "id": "byName", - "options": "process block time" - }, - "properties": [ - { - "id": "color", - "value": { - "fixedColor": "orange", - "mode": "fixed" - } - } - ] - } - ] + "overrides": [] }, "gridPos": { "h": 8, @@ -1368,7 +1360,7 @@ "x": 0, "y": 58 }, - "id": 122, + "id": 524, "options": { "graph": {}, "legend": { @@ -1389,14 +1381,16 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, + "editorMode": "code", "exemplar": false, - "expr": "rate(lodestar_stfn_epoch_transition_seconds_sum[13m])", + "expr": "rate(lodestar_stfn_epoch_transition_commit_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_commit_seconds_count[$rate_interval])", "interval": "", - "legendFormat": "process block time", + "legendFormat": "epoch transition", + "range": true, "refId": "A" } ], - "title": "Epoch transition utilization rate", + "title": "Epoch transition commit step avg time", "type": "timeseries" }, { @@ -1424,6 +1418,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1523,6 +1518,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1540,20 +1536,19 @@ } }, "mappings": [], - "min": 0, - "unit": "none" + "unit": "percentunit" }, "overrides": [ { "matcher": { "id": "byName", - "options": "number of epoch transition" + "options": "process block time" }, "properties": [ { "id": "color", "value": { - "fixedColor": "yellow", + "fixedColor": "orange", "mode": "fixed" } } @@ -1567,7 +1562,7 @@ "x": 0, "y": 66 }, - "id": 124, + "id": 122, "options": { "graph": {}, "legend": { @@ -1589,13 +1584,13 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": false, - "expr": "384 * rate(lodestar_stfn_epoch_transition_seconds_count[13m])", + "expr": "rate(lodestar_stfn_epoch_transition_seconds_sum[13m])", "interval": "", - "legendFormat": "number of epoch transition", + "legendFormat": "process block time", "refId": "A" } ], - "title": "Epoch transitions / epoch", + "title": "Epoch transition utilization rate", "type": "timeseries" }, { @@ -1623,6 +1618,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1722,6 +1718,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1739,9 +1736,26 @@ } }, "mappings": [], - "unit": "s" + "min": 0, + "unit": "none" }, - "overrides": [] + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "number of epoch transition" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "yellow", + "mode": "fixed" + } + } + ] + } + ] }, "gridPos": { "h": 8, @@ -1749,7 +1763,7 @@ "x": 0, "y": 74 }, - "id": 526, + "id": 124, "options": { "graph": {}, "legend": { @@ -1770,15 +1784,14 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "editorMode": "code", - "expr": "rate(lodestar_stfn_hash_tree_root_seconds_sum[$rate_interval])\n/ on(source)\nrate(lodestar_stfn_hash_tree_root_seconds_count[$rate_interval])", + "exemplar": false, + "expr": "384 * rate(lodestar_stfn_epoch_transition_seconds_count[13m])", "interval": "", - "legendFormat": "__auto", - "range": true, + "legendFormat": "number of epoch transition", "refId": "A" } ], - "title": "State hash_tree_root avg time", + "title": "Epoch transitions / epoch", "type": "timeseries" }, { @@ -1806,6 +1819,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1901,6 +1915,91 @@ "title": "State SSZ cache miss rate on preState", "type": "timeseries" }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 22, + "gradientMode": "opacity", + "hideFrom": { + "graph": false, + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 82 + }, + "id": 526, + "options": { + "graph": {}, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "7.4.5", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_stfn_hash_tree_root_seconds_sum[$rate_interval])\n/ on(source)\nrate(lodestar_stfn_hash_tree_root_seconds_count[$rate_interval])", + "interval": "", + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "State hash_tree_root avg time", + "type": "timeseries" + }, { "collapsed": false, "datasource": { @@ -1911,7 +2010,7 @@ "h": 1, "w": 24, "x": 0, - "y": 82 + "y": 90 }, "id": 92, "panels": [], @@ -1936,7 +2035,7 @@ "h": 3, "w": 24, "x": 0, - "y": 83 + "y": 91 }, "id": 154, "options": { @@ -1948,7 +2047,7 @@ "content": "Verifies signature sets in a thread pool of workers. Must ensure that signatures are verified fast and efficiently.", "mode": "markdown" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -1989,6 +2088,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2014,7 +2114,7 @@ "h": 8, "w": 12, "x": 0, - "y": 86 + "y": 94 }, "id": 94, "options": { @@ -2069,6 +2169,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2093,7 +2194,7 @@ "h": 8, "w": 12, "x": 12, - "y": 86 + "y": 94 }, "id": 519, "options": { @@ -2150,6 +2251,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2175,7 +2277,7 @@ "h": 8, "w": 12, "x": 0, - "y": 94 + "y": 102 }, "id": 151, "options": { @@ -2236,6 +2338,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2261,7 +2364,7 @@ "h": 8, "w": 12, "x": 12, - "y": 94 + "y": 102 }, "id": 96, "options": { @@ -2322,6 +2425,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2347,7 +2451,7 @@ "h": 5, "w": 12, "x": 0, - "y": 102 + "y": 110 }, "id": 150, "options": { @@ -2408,6 +2512,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2433,7 +2538,7 @@ "h": 8, "w": 12, "x": 12, - "y": 102 + "y": 110 }, "id": 95, "options": { @@ -2494,6 +2599,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2520,7 +2626,7 @@ "h": 6, "w": 12, "x": 0, - "y": 107 + "y": 115 }, "id": 148, "options": { @@ -2591,6 +2697,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2616,7 +2723,7 @@ "h": 7, "w": 12, "x": 12, - "y": 110 + "y": 118 }, "id": 147, "options": { @@ -2677,6 +2784,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2702,7 +2810,7 @@ "h": 5, "w": 12, "x": 0, - "y": 113 + "y": 121 }, "id": 98, "options": { @@ -2759,6 +2867,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2800,7 +2909,7 @@ "h": 7, "w": 12, "x": 12, - "y": 117 + "y": 125 }, "id": 153, "options": { @@ -2870,6 +2979,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2895,7 +3005,7 @@ "h": 6, "w": 12, "x": 0, - "y": 118 + "y": 126 }, "id": 97, "options": { @@ -2937,7 +3047,7 @@ "h": 1, "w": 24, "x": 0, - "y": 124 + "y": 132 }, "id": 309, "panels": [], @@ -2977,6 +3087,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3032,7 +3143,7 @@ "h": 8, "w": 12, "x": 0, - "y": 125 + "y": 133 }, "id": 305, "options": { @@ -3088,6 +3199,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3128,7 +3240,7 @@ "h": 8, "w": 12, "x": 12, - "y": 125 + "y": 133 }, "id": 307, "options": { @@ -3195,6 +3307,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3219,7 +3332,7 @@ "h": 8, "w": 12, "x": 0, - "y": 133 + "y": 141 }, "id": 335, "options": { @@ -3286,6 +3399,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3310,7 +3424,7 @@ "h": 8, "w": 12, "x": 12, - "y": 133 + "y": 141 }, "id": 334, "options": { @@ -3351,7 +3465,7 @@ "h": 1, "w": 24, "x": 0, - "y": 141 + "y": 149 }, "id": 136, "panels": [], @@ -3393,6 +3507,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3418,7 +3533,7 @@ "h": 8, "w": 12, "x": 0, - "y": 142 + "y": 150 }, "id": 130, "options": { @@ -3477,6 +3592,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3517,7 +3633,7 @@ "h": 8, "w": 12, "x": 12, - "y": 142 + "y": 150 }, "id": 140, "options": { @@ -3577,6 +3693,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3618,7 +3735,7 @@ "h": 8, "w": 12, "x": 0, - "y": 150 + "y": 158 }, "id": 132, "options": { @@ -3701,6 +3818,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineStyle": { "fill": "solid" @@ -3745,7 +3863,7 @@ "h": 8, "w": 12, "x": 12, - "y": 150 + "y": 158 }, "id": 138, "options": { @@ -3817,6 +3935,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3866,7 +3985,7 @@ "h": 8, "w": 12, "x": 0, - "y": 158 + "y": 166 }, "id": 531, "options": { @@ -3957,6 +4076,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3981,7 +4101,7 @@ "h": 8, "w": 12, "x": 12, - "y": 158 + "y": 166 }, "id": 533, "options": { @@ -4026,7 +4146,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_block_production.json b/dashboards/lodestar_block_production.json index b999e47a33d4..96ab44c6a550 100644 --- a/dashboards/lodestar_block_production.json +++ b/dashboards/lodestar_block_production.json @@ -54,180 +54,206 @@ "liveNow": false, "panels": [ { - "type": "timeseries", - "title": "Full block production avg time with steps", + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { + "h": 1, + "w": 24, "x": 0, - "y": 1, - "w": 12, - "h": 8 + "y": 0 }, + "id": 166, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], + "title": "Block Production", + "type": "row" + }, + { "datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "opacity", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, "id": 546, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, "targets": [ { "datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "refId": "proposerSlashing", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])", - "range": true, - "instant": false, - "hide": false, "editorMode": "code", + "exemplar": false, + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"proposerSlashing\"}[$rate_interval])", + "hide": false, + "instant": false, "legendFormat": "{{step}}", - "exemplar": false + "range": true, + "refId": "proposerSlashing" }, { - "refId": "attesterSlashings", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"attesterSlashings\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "attesterSlashings" }, { - "refId": "voluntaryExits", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"voluntaryExits\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "voluntaryExits" }, { - "refId": "blsToExecutionChanges", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"blsToExecutionChanges\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "blsToExecutionChanges" }, { - "refId": "attestations", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"attestations\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"attestations\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "attestations" }, { - "refId": "eth1DataAndDeposits", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"eth1DataAndDeposits\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "eth1DataAndDeposits" }, { - "refId": "syncAggregate", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"syncAggregate\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"syncAggregate\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "syncAggregate" }, { - "refId": "executionPayload", - "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"executionPayload\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"executionPayload\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "executionPayload" } ], - "options": { - "tooltip": { - "mode": "multi", - "sort": "none" - }, - "legend": { - "showLegend": true, - "displayMode": "list", - "placement": "bottom", - "calcs": [] - } - }, - "fieldConfig": { - "defaults": { - "custom": { - "drawStyle": "line", - "lineInterpolation": "linear", - "barAlignment": 0, - "lineWidth": 1, - "fillOpacity": 30, - "gradientMode": "opacity", - "spanNulls": false, - "insertNulls": false, - "showPoints": "auto", - "pointSize": 5, - "stacking": { - "mode": "normal", - "group": "A" - }, - "axisPlacement": "auto", - "axisLabel": "", - "axisColorMode": "text", - "scaleDistribution": { - "type": "linear" - }, - "axisCenteredZero": false, - "hideFrom": { - "tooltip": false, - "viz": false, - "legend": false - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "color": { - "mode": "palette-classic" - }, - "mappings": [], - "unit": "s" - }, - "overrides": [] - }, - "transformations": [] + "title": "Full block production avg time with steps", + "transformations": [], + "type": "timeseries" }, { "datasource": { @@ -236,62 +262,62 @@ }, "fieldConfig": { "defaults": { + "color": { + "mode": "palette-classic" + }, "custom": { - "drawStyle": "line", - "lineInterpolation": "linear", + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", "barAlignment": 0, - "lineWidth": 1, + "drawStyle": "line", "fillOpacity": 30, "gradientMode": "opacity", - "spanNulls": false, + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, "insertNulls": false, - "showPoints": "auto", + "lineInterpolation": "linear", + "lineWidth": 1, "pointSize": 5, - "stacking": { - "mode": "normal", - "group": "A" - }, - "axisPlacement": "auto", - "axisLabel": "", - "axisColorMode": "text", "scaleDistribution": { "type": "linear" }, - "axisCenteredZero": false, - "hideFrom": { - "tooltip": false, - "viz": false, - "legend": false + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" }, "thresholdsStyle": { "mode": "off" } }, - "color": { - "mode": "palette-classic" - }, "mappings": [], "unit": "s" }, "overrides": [] }, "gridPos": { - "x": 12, - "y": 1, + "h": 8, "w": 12, - "h": 8 + "x": 12, + "y": 1 }, "id": 547, "options": { - "tooltip": { - "mode": "multi", - "sort": "none" - }, "legend": { - "showLegend": true, + "calcs": [], "displayMode": "list", "placement": "bottom", - "calcs": [] + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" } }, "targets": [ @@ -300,136 +326,110 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "refId": "proposerSlashing", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])", - "range": true, - "instant": false, - "hide": false, "editorMode": "code", + "exemplar": false, + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"proposerSlashing\"}[$rate_interval])", + "hide": false, + "instant": false, "legendFormat": "{{step}}", - "exemplar": false - }, - { - "refId": "attesterSlashings", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])", "range": true, - "instant": false, - "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" - }, - "hide": false, - "editorMode": "code", - "legendFormat": "{{step}}" + "refId": "proposerSlashing" }, { - "refId": "voluntaryExits", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"attesterSlashings\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "attesterSlashings" }, { - "refId": "blsToExecutionChanges", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"voluntaryExits\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "voluntaryExits" }, { - "refId": "attestations", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"attestations\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"blsToExecutionChanges\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "blsToExecutionChanges" }, { - "refId": "eth1DataAndDeposits", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"attestations\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "attestations" }, { - "refId": "syncAggregate", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"syncAggregate\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"eth1DataAndDeposits\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "eth1DataAndDeposits" }, { - "refId": "executionPayload", - "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"executionPayload\"}[$rate_interval])", - "range": true, - "instant": false, "datasource": { - "uid": "${DS_PROMETHEUS}", - "type": "prometheus" + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" }, - "hide": false, "editorMode": "code", - "legendFormat": "{{step}}" - } - ], - "title": "Blinded block production avg time with steps", - "type": "timeseries", - "transformations": [] - }, - { - "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 166, - "panels": [], - "targets": [ + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"syncAggregate\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "syncAggregate" + }, { "datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "refId": "A" + "editorMode": "code", + "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"executionPayload\"}[$rate_interval])", + "hide": false, + "instant": false, + "legendFormat": "{{step}}", + "range": true, + "refId": "executionPayload" } ], - "title": "Block Production", - "type": "row" + "title": "Blinded block production avg time with steps", + "transformations": [], + "type": "timeseries" }, { "datasource": { @@ -455,6 +455,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -540,7 +541,7 @@ "h": 8, "w": 12, "x": 0, - "y": 1 + "y": 9 }, "id": 168, "options": { @@ -611,6 +612,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -636,7 +638,7 @@ "h": 8, "w": 12, "x": 12, - "y": 1 + "y": 9 }, "id": 170, "options": { @@ -657,11 +659,13 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, + "editorMode": "code", "exemplar": false, "expr": "rate(beacon_block_production_seconds_sum[$rate_interval])\n/\nrate(beacon_block_production_seconds_count[$rate_interval])", "format": "heatmap", "interval": "", - "legendFormat": "{{instance}} - {{source}}", + "legendFormat": "{{source}}", + "range": true, "refId": "A" } ], @@ -692,6 +696,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -716,7 +721,7 @@ "h": 8, "w": 12, "x": 0, - "y": 9 + "y": 17 }, "id": 528, "options": { @@ -780,7 +785,7 @@ "h": 8, "w": 12, "x": 12, - "y": 9 + "y": 17 }, "heatmap": {}, "hideZeroBuckets": false, @@ -826,7 +831,7 @@ "unit": "s" } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "reverseYBuckets": false, "targets": [ { @@ -882,6 +887,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -906,7 +912,7 @@ "h": 8, "w": 12, "x": 0, - "y": 17 + "y": 25 }, "id": 511, "options": { @@ -1036,7 +1042,7 @@ "h": 8, "w": 12, "x": 12, - "y": 17 + "y": 25 }, "hiddenSeries": false, "id": 378, @@ -1056,7 +1062,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "pointradius": 0.5, "points": true, "renderer": "flot", @@ -1131,7 +1137,7 @@ "h": 8, "w": 12, "x": 0, - "y": 25 + "y": 33 }, "hiddenSeries": false, "id": 376, @@ -1153,7 +1159,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "pointradius": 0.5, "points": true, "renderer": "flot", @@ -1233,6 +1239,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1257,7 +1264,7 @@ "h": 8, "w": 12, "x": 12, - "y": 25 + "y": 33 }, "id": 532, "options": { @@ -1334,6 +1341,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1358,7 +1366,7 @@ "h": 7, "w": 12, "x": 0, - "y": 33 + "y": 41 }, "id": 531, "options": { @@ -1441,6 +1449,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1465,7 +1474,7 @@ "h": 7, "w": 12, "x": 12, - "y": 33 + "y": 41 }, "id": 534, "options": { @@ -1516,7 +1525,7 @@ "h": 6, "w": 12, "x": 0, - "y": 40 + "y": 48 }, "id": 535, "options": { @@ -1620,7 +1629,7 @@ "h": 8, "w": 12, "x": 12, - "y": 40 + "y": 48 }, "id": 537, "options": { @@ -1669,7 +1678,7 @@ "h": 1, "w": 24, "x": 0, - "y": 48 + "y": 56 }, "id": 541, "panels": [], @@ -1700,7 +1709,7 @@ "h": 8, "w": 12, "x": 0, - "y": 49 + "y": 57 }, "id": 543, "options": { @@ -1778,7 +1787,7 @@ "h": 8, "w": 12, "x": 12, - "y": 49 + "y": 57 }, "id": 545, "options": { @@ -1880,7 +1889,7 @@ "h": 8, "w": 12, "x": 0, - "y": 57 + "y": 65 }, "id": 539, "options": { @@ -1925,7 +1934,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_bls_thread_pool.json b/dashboards/lodestar_bls_thread_pool.json index a8021ace1102..160312a92d57 100644 --- a/dashboards/lodestar_bls_thread_pool.json +++ b/dashboards/lodestar_bls_thread_pool.json @@ -13,7 +13,10 @@ "list": [ { "builtIn": 1, - "datasource": "-- Grafana --", + "datasource": { + "type": "datasource", + "uid": "grafana" + }, "enable": true, "hide": true, "iconColor": "rgba(0, 211, 255, 1)", @@ -32,7 +35,6 @@ "fiscalYearStartMonth": 0, "graphTooltip": 1, "id": null, - "iteration": 1661342107287, "links": [ { "asDropdown": true, @@ -53,6 +55,10 @@ "panels": [ { "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 1, "w": 24, @@ -61,10 +67,23 @@ }, "id": 92, "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], "title": "BLS worker pool", "type": "row" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 3, "w": 24, @@ -73,12 +92,21 @@ }, "id": 154, "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, "content": "Verifies signature sets in a thread pool of workers. Must ensure that signatures are verified fast and efficiently.", "mode": "markdown" }, - "pluginVersion": "8.4.2", + "pluginVersion": "10.1.1", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval])", "interval": "", "legendFormat": "{{workerId}}", @@ -89,6 +117,10 @@ "type": "text" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Utilization rate = total CPU time per worker per second. Graph is stacked. This ratios should be high since BLS verification is the limiting factor in the node's throughput.", "fieldConfig": { "defaults": { @@ -96,6 +128,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -107,6 +141,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -139,7 +174,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "multi", @@ -149,6 +185,10 @@ "pluginVersion": "8.4.0-beta1", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval])", "interval": "", "legendFormat": "{{workerId}}", @@ -159,12 +199,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -176,6 +222,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -207,7 +254,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -231,6 +279,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Average sync time to validate a single signature set. Note that the set may have been verified in batch. In most normal hardware this value should be ~1-2ms", "fieldConfig": { "defaults": { @@ -238,6 +290,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -250,6 +304,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -282,8 +337,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -296,6 +352,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "sum(rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval]))/sum(rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval]))", "interval": "", "legendFormat": "pool", @@ -306,6 +366,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Raw throughput of the thread pool. How many individual signature sets are successfully validated per second", "fieldConfig": { "defaults": { @@ -313,6 +377,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -325,6 +391,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -357,8 +424,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -371,6 +439,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -381,6 +453,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Total length of the job queue. Note: this queue is not bounded", "fieldConfig": { "defaults": { @@ -388,6 +464,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -400,6 +478,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -432,8 +511,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -446,6 +526,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "lodestar_bls_thread_pool_queue_length", "interval": "", "legendFormat": "pool", @@ -456,6 +540,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "How much async time job spent waiting in the job queue before being picked up. This number should be really low <100ms to ensure signatures are validated fast.", "fieldConfig": { "defaults": { @@ -463,6 +551,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -475,6 +565,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -507,8 +598,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -521,6 +613,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_queue_job_wait_time_seconds_sum[$rate_interval])/rate(lodestar_bls_thread_pool_queue_job_wait_time_seconds_count[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -531,6 +627,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Async time from sending a message to the worker and the worker receiving it.", "fieldConfig": { "defaults": { @@ -538,6 +638,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -550,6 +652,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -584,7 +687,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "multi", @@ -621,6 +725,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "What percentage of total signature sets were verified in batch, which is an optimization to reduce verification costs by x2. For a synced node this should be ~100%", "fieldConfig": { "defaults": { @@ -628,6 +736,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -640,6 +750,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -672,8 +783,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -686,6 +798,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_batch_sigs_success_total[$rate_interval])/rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -696,6 +812,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Average signatures per set. This number is decided by the time of object submitted to the pool:\n- Sync blocks: 128\n- Aggregates: 3\n- Attestations: 1", "fieldConfig": { "defaults": { @@ -703,6 +823,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -715,6 +837,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -747,8 +870,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -758,6 +882,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_sig_sets_started_total[$rate_interval])/(rate(lodestar_bls_thread_pool_jobs_started_total[$rate_interval])>0)", "interval": "", "legendFormat": "pool", @@ -768,6 +896,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "How many individual signature sets are invalid vs (valid + invalid). We don't control this number since peers may send us invalid signatures. This number should be very low since we should ban bad peers. If it's too high the batch optimization may not be worth it.", "fieldConfig": { "defaults": { @@ -775,6 +907,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -786,6 +920,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -834,7 +969,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "multi", @@ -871,6 +1007,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Average sets per job. A set may contain +1 signatures. This number should be higher than 1 to reduce communication costs", "fieldConfig": { "defaults": { @@ -878,6 +1018,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -890,6 +1032,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -922,8 +1065,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -933,6 +1077,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_jobs_started_total[$rate_interval])/rate(lodestar_bls_thread_pool_job_groups_started_total[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -941,10 +1089,105 @@ ], "title": "BLS worker pool - sets per job", "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 42 + }, + "id": 520, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_bls_thread_pool_signature_deserialization_main_thread_time_seconds_sum[$rate_interval]) * 384", + "instant": false, + "legendFormat": "signature_deserialization", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_bls_thread_pool_pubkeys_aggregation_main_thread_time_seconds_sum[$rate_interval]) * 384", + "hide": false, + "instant": false, + "legendFormat": "pubkey_aggregation", + "range": true, + "refId": "B" + } + ], + "title": "BLS jobItemWorkReq cpu time per epoch", + "type": "timeseries" } ], "refresh": "10s", - "schemaVersion": 35, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_debug_gossipsub.json b/dashboards/lodestar_debug_gossipsub.json index eaed0c9842f6..d83c075de7be 100644 --- a/dashboards/lodestar_debug_gossipsub.json +++ b/dashboards/lodestar_debug_gossipsub.json @@ -103,6 +103,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -220,7 +221,7 @@ "text": {}, "textMode": "auto" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -289,7 +290,7 @@ "text": {}, "textMode": "value" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -341,7 +342,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -396,7 +397,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -451,7 +452,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -493,6 +494,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -703,6 +705,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -786,6 +789,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -957,6 +961,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1037,6 +1042,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -8729,7 +8735,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar", diff --git a/dashboards/lodestar_discv5.json b/dashboards/lodestar_discv5.json index 02c0a3b38956..31f115936df2 100644 --- a/dashboards/lodestar_discv5.json +++ b/dashboards/lodestar_discv5.json @@ -91,6 +91,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -170,6 +171,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -251,6 +253,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -333,6 +336,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -415,6 +419,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -496,6 +501,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2051,7 +2057,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_execution_engine.json b/dashboards/lodestar_execution_engine.json index cd72b712f9a8..2c4cadc131f1 100644 --- a/dashboards/lodestar_execution_engine.json +++ b/dashboards/lodestar_execution_engine.json @@ -103,6 +103,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -186,6 +187,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -270,6 +272,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -354,6 +357,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -438,6 +442,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -522,6 +527,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3322,7 +3328,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_libp2p.json b/dashboards/lodestar_libp2p.json index 08f500cf5a40..7c6a76ec7175 100644 --- a/dashboards/lodestar_libp2p.json +++ b/dashboards/lodestar_libp2p.json @@ -103,6 +103,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -183,6 +184,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -289,6 +291,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -368,6 +371,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -447,6 +451,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -544,6 +549,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -945,7 +951,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_multinode.json b/dashboards/lodestar_multinode.json index ac710364985b..9a8fecaf0128 100644 --- a/dashboards/lodestar_multinode.json +++ b/dashboards/lodestar_multinode.json @@ -13,7 +13,10 @@ "list": [ { "builtIn": 1, - "datasource": "-- Grafana --", + "datasource": { + "type": "datasource", + "uid": "grafana" + }, "enable": true, "hide": true, "iconColor": "rgba(0, 211, 255, 1)", @@ -51,6 +54,10 @@ "liveNow": false, "panels": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -100,7 +107,7 @@ }, "textMode": "auto" }, - "pluginVersion": "8.3.1", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -119,12 +126,18 @@ "type": "stat" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -136,6 +149,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -167,10 +181,12 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "targets": [ @@ -190,12 +206,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -207,6 +229,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -238,10 +261,12 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "targets": [ @@ -261,12 +286,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -278,6 +309,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -310,10 +342,12 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "targets": [ @@ -334,12 +368,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -351,6 +391,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -383,10 +424,12 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "targets": [ @@ -407,6 +450,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -441,7 +488,7 @@ }, "textMode": "auto" }, - "pluginVersion": "8.3.1", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -473,6 +520,10 @@ "type": "stat" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -526,6 +577,10 @@ "type": "stat" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -574,7 +629,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single" @@ -598,6 +654,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -647,7 +707,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single" @@ -672,7 +733,7 @@ } ], "refresh": "10s", - "schemaVersion": 33, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_networking.json b/dashboards/lodestar_networking.json index 77e4be04048f..e17cabf32048 100644 --- a/dashboards/lodestar_networking.json +++ b/dashboards/lodestar_networking.json @@ -104,6 +104,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -189,6 +190,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -270,6 +272,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -351,6 +354,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -534,9 +538,10 @@ "values": false }, "showUnfilled": true, - "text": {} + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -588,9 +593,10 @@ "values": false }, "showUnfilled": true, - "text": {} + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -631,6 +637,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -711,6 +718,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -829,7 +837,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -886,6 +894,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1000,6 +1009,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1138,6 +1148,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -1223,6 +1234,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -1309,6 +1321,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineStyle": { "fill": "solid" @@ -1450,7 +1463,7 @@ "unit": "short" } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "reverseYBuckets": false, "targets": [ { @@ -1508,6 +1521,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -1648,7 +1662,7 @@ "unit": "short" } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "reverseYBuckets": false, "targets": [ { @@ -1760,7 +1774,7 @@ "unit": "short" } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "reverseYBuckets": false, "targets": [ { @@ -1818,6 +1832,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -1931,6 +1946,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2015,6 +2031,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2141,7 +2158,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -2251,7 +2268,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -2347,7 +2364,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -2426,6 +2443,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2505,6 +2523,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2635,6 +2654,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2718,6 +2738,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2801,6 +2822,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2886,6 +2908,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2967,6 +2990,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3050,6 +3074,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3135,6 +3160,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3230,6 +3256,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3348,6 +3375,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3476,6 +3504,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3556,6 +3585,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3674,7 +3704,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -3717,6 +3747,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3799,86 +3830,7 @@ "tooltip": false, "viz": false }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "unit": "percentunit" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 154 - }, - "id": 540, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "multi", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "editorMode": "code", - "expr": "lodestar_gossip_validation_queue_current_drop_ratio", - "legendFormat": "{{topic}}", - "range": true, - "refId": "A" - } - ], - "title": "Drop Ratio", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3971,6 +3923,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4050,6 +4003,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4159,6 +4113,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4263,6 +4218,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4342,6 +4298,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4450,6 +4407,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4558,6 +4516,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4574,7 +4533,8 @@ "mode": "off" } }, - "mappings": [] + "mappings": [], + "unit": "percentunit" }, "overrides": [] }, @@ -4584,7 +4544,7 @@ "x": 0, "y": 187 }, - "id": 615, + "id": 624, "options": { "legend": { "calcs": [], @@ -4604,25 +4564,14 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(lodestar_gossip_attestation_use_head_block_state_count{caller=\"validateGossipAttestation\"}[$rate_interval])", - "legendFormat": "head_state", + "expr": "rate(lodestar_gossip_attestation_shuffling_cache_hit_count[$rate_interval])\n/\n(\n rate(lodestar_gossip_attestation_shuffling_cache_hit_count[$rate_interval])\n +\n (\n rate(lodestar_gossip_attestation_shuffling_cache_miss_count[$rate_interval])\n or\n vector(0)\n )\n)\nor\nvector(1)\n", + "instant": false, + "legendFormat": "hit_percentage", "range": true, "refId": "A" - }, - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "editorMode": "code", - "expr": "rate(lodestar_gossip_attestation_use_head_block_state_dialed_to_target_epoch_count{caller=\"validateGossipAttestation\"}[$rate_interval])", - "hide": false, - "legendFormat": "head_state_dialed_to_target_epoch", - "range": true, - "refId": "B" } ], - "title": "Used States", + "title": "Shuffling Cache Hit", "type": "timeseries" }, { @@ -4649,6 +4598,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4770,6 +4720,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4886,7 +4837,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -4943,6 +4894,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5071,6 +5023,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5199,6 +5152,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5303,6 +5257,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5409,6 +5364,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5489,6 +5445,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5570,6 +5527,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5651,6 +5609,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5732,6 +5691,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5815,6 +5775,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5898,6 +5859,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5979,6 +5941,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -6060,6 +6023,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -6141,6 +6105,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -6199,7 +6164,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_state_cache_regen.json b/dashboards/lodestar_state_cache_regen.json index 0d1360837d7f..62b4f6fc479f 100644 --- a/dashboards/lodestar_state_cache_regen.json +++ b/dashboards/lodestar_state_cache_regen.json @@ -13,7 +13,10 @@ "list": [ { "builtIn": 1, - "datasource": "-- Grafana --", + "datasource": { + "type": "datasource", + "uid": "grafana" + }, "enable": true, "hide": true, "iconColor": "rgba(0, 211, 255, 1)", @@ -33,7 +36,6 @@ "fiscalYearStartMonth": 0, "graphTooltip": 1, "id": null, - "iteration": 1661328981106, "links": [ { "asDropdown": true, @@ -54,6 +56,10 @@ "panels": [ { "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 1, "w": 24, @@ -62,16 +68,31 @@ }, "id": 22, "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], "title": "stateCache and stateCheckpointCache Stats", "type": "row" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -83,6 +104,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -131,7 +153,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -167,12 +190,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -184,6 +213,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -232,7 +262,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -268,12 +299,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -285,6 +322,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -316,7 +354,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -325,6 +364,10 @@ }, "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "exemplar": false, "expr": "lodestar_state_cache_size{}", "interval": "", @@ -336,12 +379,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -353,6 +402,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -384,7 +434,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -393,6 +444,10 @@ }, "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "exemplar": false, "expr": "lodestar_cp_state_cache_size{}", "interval": "", @@ -400,6 +455,10 @@ "refId": "A" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "exemplar": false, "expr": "lodestar_cp_state_epoch_size", "hide": false, @@ -412,12 +471,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -429,6 +494,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -460,7 +526,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -484,12 +551,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -501,6 +574,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -532,7 +606,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -557,6 +632,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -651,7 +730,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -700,6 +780,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -794,7 +878,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -843,6 +928,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -938,7 +1027,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -987,6 +1077,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1082,7 +1176,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1132,6 +1227,10 @@ }, { "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 1, "w": 24, @@ -1140,10 +1239,23 @@ }, "id": 40, "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], "title": "Regen call stats", "type": "row" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "", "fieldConfig": { "defaults": { @@ -1193,7 +1305,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1217,6 +1330,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1265,7 +1382,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1289,6 +1407,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1338,7 +1460,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1362,6 +1485,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1411,7 +1538,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1435,6 +1563,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1486,7 +1618,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1510,6 +1643,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1561,7 +1698,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1585,6 +1723,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1636,7 +1778,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1660,6 +1803,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1711,7 +1858,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -1736,6 +1884,10 @@ }, { "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 1, "w": 24, @@ -1744,10 +1896,23 @@ }, "id": 54, "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], "title": "Regen queue", "type": "row" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1798,8 +1963,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -1824,6 +1990,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1874,8 +2044,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -1885,6 +2056,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "12*rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])", "interval": "", "legendFormat": "regen_queue", @@ -1895,6 +2070,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -1945,8 +2124,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -1956,6 +2136,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_regen_queue_dropped_jobs_total[$rate_interval])/(rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])+rate(lodestar_regen_queue_dropped_jobs_total[$rate_interval]))", "interval": "", "legendFormat": "regen_queue", @@ -1966,6 +2150,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -2016,8 +2204,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -2027,6 +2216,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_regen_queue_job_time_seconds_sum[$rate_interval])/rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])", "interval": "", "legendFormat": "regen_queue", @@ -2037,6 +2230,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -2087,8 +2284,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -2098,6 +2296,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_regen_queue_job_wait_time_seconds_sum[$rate_interval])/rate(lodestar_regen_queue_job_wait_time_seconds_count[$rate_interval])", "interval": "", "legendFormat": "regen_queue", @@ -2108,6 +2310,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { @@ -2158,8 +2364,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -2169,6 +2376,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "lodestar_regen_queue_length", "interval": "", "legendFormat": "regen_queue", @@ -2180,7 +2391,7 @@ } ], "refresh": "10s", - "schemaVersion": 35, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_sync.json b/dashboards/lodestar_sync.json index 1a17db6da5ff..6cc82bedde47 100644 --- a/dashboards/lodestar_sync.json +++ b/dashboards/lodestar_sync.json @@ -103,6 +103,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -197,6 +198,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineStyle": { "fill": "solid" @@ -305,6 +307,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -384,6 +387,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -463,6 +467,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -542,6 +547,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -621,6 +627,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 1, @@ -1658,7 +1665,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_validator_client.json b/dashboards/lodestar_validator_client.json index 35f7f3ccc458..7cc41cffdb34 100644 --- a/dashboards/lodestar_validator_client.json +++ b/dashboards/lodestar_validator_client.json @@ -84,6 +84,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -213,7 +214,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -269,7 +270,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -323,7 +324,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -377,7 +378,7 @@ }, "textMode": "auto" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -430,7 +431,7 @@ }, "textMode": "auto" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -483,7 +484,7 @@ }, "textMode": "auto" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -521,7 +522,7 @@ "content": "_Validator metrics =D_", "mode": "markdown" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -557,6 +558,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -721,6 +723,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -869,7 +872,7 @@ "unit": "s" } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "reverseYBuckets": false, "targets": [ { @@ -983,7 +986,7 @@ "unit": "s" } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "reverseYBuckets": false, "targets": [ { @@ -2014,7 +2017,7 @@ ], "refresh": "10s", "revision": 1, - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_vm_host.json b/dashboards/lodestar_vm_host.json index 1185f7409319..7471defd0e8e 100644 --- a/dashboards/lodestar_vm_host.json +++ b/dashboards/lodestar_vm_host.json @@ -98,6 +98,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -240,6 +241,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -352,6 +354,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -449,6 +452,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -566,6 +570,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -663,6 +668,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -5585,7 +5591,7 @@ ], "refresh": "10s", "revision": 1, - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 056325e19104..270a01b311de 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -73,6 +73,7 @@ extra: nav: - Home: index.md - Introduction: introduction.md + - Security: security.md - Getting Started: - Quick Start: getting-started/quick-start.md - Installation: getting-started/installation.md @@ -121,7 +122,7 @@ nav: - Dependency Graph: contribution/depgraph.md # - Repo: contribution/repo.md - Testing: - - Overview: contribution/testing/overview.md + - Overview: contribution/testing/index.md # - Unit Tests: contribution/testing/unit-tests.md # - Integration Tests: contribution/testing/integration-tests.md # - E2E Tests: contribution/testing/e2e-tests.md diff --git a/docs/pages/beacon-management/networking.md b/docs/pages/beacon-management/networking.md index 9305b683ae47..993b1cdfda26 100644 --- a/docs/pages/beacon-management/networking.md +++ b/docs/pages/beacon-management/networking.md @@ -1,38 +1,38 @@ # Networking -Starting up Lodestar will automatically connect it to peers on the network. Peers are found through the discv5 protocol and one peers are established communications happen via gossipsub over libp2p. While not necessary, having a basic understanding of how the various protocols and transport work will help with debugging and troubleshooting as some of the more common challenges come up with [firewalls](#firewall-management) and [NAT traversal](#nat-traversal). +Starting up Lodestar will automatically connect it to peers on the network. Peers are found through the discv5 protocol and once peers are established communications happen via gossipsub over libp2p. While not necessary, having a basic understanding of how the various protocols and transport work will help with debugging and troubleshooting as some of the more common challenges come up with [firewalls](#firewall-management) and [NAT traversal](#nat-traversal). ## Networking Flags Some of the important Lodestar flags related to networking are: -- [`--discv5`](./configuration.md#--discv5) -- [`--listenAddress`](./configuration.md#--listenAddress) -- [`--port`](./configuration.md#--port) -- [`--discoveryPort`](./configuration.md#--discoveryPort) -- [`--listenAddress6`](./configuration.md#--listenAddress6) -- [`--port6`](./configuration.md#--port6) -- [`--discoveryPort6`](./configuration.md#--discoveryPort6) -- [`--bootnodes`](./configuration.md#--bootnodes) -- [`--deterministicLongLivedAttnets`](./configuration.md#--deterministicLongLivedAttnets) -- [`--subscribeAllSubnets`](./configuration.md#--subscribeAllSubnets) -- [`--disablePeerScoring`](./configuration.md#--disablePeerScoring) -- [`--enr.ip`](./configuration.md#--enr.ip) -- [`--enr.tcp`](./configuration.md#--enr.tcp) -- [`--enr.udp`](./configuration.md#--enr.udp) -- [`--enr.ip6`](./configuration.md#--enr.ip6) -- [`--enr.tcp6`](./configuration.md#--enr.tcp6) -- [`--enr.udp6`](./configuration.md#--enr.udp6) -- [`--nat`](./configuration.md#--nat) -- [`--private`](./configuration.md#`--private`) +- [`--discv5`](./beacon-cli.md#-discv5) +- [`--listenAddress`](./beacon-cli.md#-listenaddress) +- [`--port`](./beacon-cli.md#-port) +- [`--discoveryPort`](./beacon-cli.md#-discoveryport) +- [`--listenAddress6`](./beacon-cli.md#-listenaddress6) +- [`--port6`](./beacon-cli.md#-port6) +- [`--discoveryPort6`](./beacon-cli.md#-discoveryport6) +- [`--bootnodes`](./beacon-cli.md#-bootnodes) +- [`--deterministicLongLivedAttnets`](./beacon-cli.md#-deterministiclonglivedattnets) +- [`--subscribeAllSubnets`](./beacon-cli.md#-subscribeallsubnets) +- [`--disablePeerScoring`](./beacon-cli.md#-disablepeerscoring) +- [`--enr.ip`](./beacon-cli.md#-enrip) +- [`--enr.tcp`](./beacon-cli.md#-enrtcp) +- [`--enr.udp`](./beacon-cli.md#-enrudp) +- [`--enr.ip6`](./beacon-cli.md#-enrip6) +- [`--enr.tcp6`](./beacon-cli.md#-enrtcp6) +- [`--enr.udp6`](./beacon-cli.md#-enrudp6) +- [`--nat`](./beacon-cli.md#-nat) +- [`--private`](./beacon-cli.md#`-private`) ## Peer Discovery (Discv5) -In Ethereum, discv5 plays a pivotal role in the peer discovery process, facilitating nodes to find and locate each other in order to form the peer-to-peer network​. The process begins with an interaction between new nodes and bootnodes at start-up. Bootnodes are nodes with hard-coded addresses, or can be overridden via the cli flag `--bootnodes`, to bootstrap the discovery process​. Through a method called FINDNODE-NODES, a new node establishes a bond with each bootnode, and it returns a list of peers for the new node to connect to. Following this trail, the new node engages through FINDNODE-NODES with the provided peers to further establish a web of connections​. +In Ethereum, discv5 plays a pivotal role in the peer discovery process, facilitating nodes to find and locate each other in order to form the peer-to-peer network​. The process begins with an interaction between new nodes and bootnodes at start-up. Bootnodes are nodes with hard-coded addresses, or can be overridden via the cli flag [`--bootnodes`](./beacon-cli.md#-bootnodes), to bootstrap the discovery process​. Through a method called FINDNODE-NODES, a new node establishes a bond with each bootnode, and it returns a list of peers for the new node to connect to. Following this trail, the new node engages through FINDNODE-NODES with the provided peers to further establish a web of connections​. Discv5 operates as a peer advertisement medium in this network, where nodes can act as both providers and consumers of data. Every participating node in the Discv5 protocol discovers peer data from other nodes and later relays it, making the discovery process dynamic and efficient​. -Discv5 is designed to be a standalone protocol running via UDP on a dedicated port solely for peer discovery. Peer data is exchanged via self-certified, flexible peer records (ENRs). These key features cater to the Ethereum network​ and being a good peer often means running a discv5 worker​. Lodestar offers simple configuration to setup and run a bootnode independently of a beacon node. See [bootnode](./bootnode.md) for more information and configuration options. +Discv5 is designed to be a standalone protocol running via UDP on a dedicated port solely for peer discovery. Peer data is exchanged via self-certified, flexible peer records (ENRs). These key features cater to the Ethereum network​ and being a good peer often means running a discv5 worker​. Lodestar offers simple configuration to setup and run a bootnode independently of a beacon node. See the [bootnode cli](../bootnode/bootnode-cli.md) page for more information and configuration options. ## ENR diff --git a/docs/pages/beacon-management/syncing.md b/docs/pages/beacon-management/syncing.md index 21cd05d8a8a2..40b5b4ba96b5 100644 --- a/docs/pages/beacon-management/syncing.md +++ b/docs/pages/beacon-management/syncing.md @@ -2,7 +2,7 @@ Syncing an Ethereum node involves obtaining a copy of the blockchain data from other peers in the network to reach a consistent state. This process is crucial for new nodes or nodes that have been offline and need to catch up with the network's current state. Syncing can be performed for both the execution layer and the beacon chain, although the focus here will be primarily on the beacon chain. -Lodestar allows for several methods of syncing however the recommended method is `checkpoint sync` as it is the fastest and least resource intensive. It is generally a good idea to sync via a [`--checkpointSyncUrl`](./configuration.md#--checkpointSyncUrl). If starting at a specific point is necessary specify the [`--checkpointState`](./configuration.md#--checkpointState) that should be where the sync begins. +Lodestar allows for several methods of syncing however the recommended method is `checkpoint sync` as it is the fastest and least resource intensive. It is generally a good idea to sync via a [`--checkpointSyncUrl`](./beacon-cli.md#-checkpointsyncurl). If starting at a specific point is necessary specify the [`--checkpointState`](./beacon-cli.md#-checkpointstate) that should be where the sync begins. ## Weak Subjectivity @@ -36,7 +36,7 @@ The implementation of the different syncing styles in Lodestar are actually one There are several flags that can be used to configure the sync process. -- [`--checkpointSyncUrl`](./configuration.md#--checkpointSyncUrl) -- [`--checkpointState`](./configuration.md#--checkpointState) -- [`--wssCheckpoint`](./configuration.md#--wssCheckpoint) -- [`--forceCheckpointSync`](./configuration.md#--forceCheckpointSync) +- [`--checkpointSyncUrl`](./beacon-cli.md#-checkpointsyncurl) +- [`--checkpointState`](./beacon-cli.md#-checkpointstate) +- [`--wssCheckpoint`](./beacon-cli.md#-wsscheckpoint) +- [`--forceCheckpointSync`](./beacon-cli.md#-forcecheckpointsync) diff --git a/docs/pages/contribution/testing/end-to-end-tests.md b/docs/pages/contribution/testing/end-to-end-tests.md new file mode 100644 index 000000000000..3f405128c7cb --- /dev/null +++ b/docs/pages/contribution/testing/end-to-end-tests.md @@ -0,0 +1,3 @@ +# End-To-End Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/contribution/testing/performance-tests.md b/docs/pages/contribution/testing/performance-tests.md index e69de29bb2d1..6e2d9c86319b 100644 --- a/docs/pages/contribution/testing/performance-tests.md +++ b/docs/pages/contribution/testing/performance-tests.md @@ -0,0 +1,3 @@ +# Performance Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/contribution/testing/simulation-tests.md b/docs/pages/contribution/testing/simulation-tests.md index ed36d1351307..c1059e5c4177 100644 --- a/docs/pages/contribution/testing/simulation-tests.md +++ b/docs/pages/contribution/testing/simulation-tests.md @@ -1,4 +1,4 @@ -# Simulation Testing +# Simulation Tests "Sim" testing for Lodestar is the most comprehensive, and complex, testing that is run. The goal is to fully simulate a testnet and to actuate the code in a way that closely mimics what will happen when turning on Lodestar in the wild. This is a very complex task and requires a lot of moving parts to work together. The following sections will describe the various components and how they work together. @@ -54,7 +54,7 @@ GETH_DOCKER_IMAGE=ethereum/client-go:v1.11.6 \ ## Sim Test Infrastructure -When setting up and running the simulations, interactions with the nodes is through the published node API's. All functionality is actuated via http request and by "plugging in" this way it is possible to run the nodes in a stand-alone fashion, as they would be run in production, but to still achieve a tightly monitored and controlled environment. If code needs to be executed on a "class by class" basis or with mocking involved then the test is not a simulation test and would fall into one of the other testing categories. See the [Testing](../testing.md) page for more information on the other types of tests available for Lodestar. +When setting up and running the simulations, interactions with the nodes is through the published node API's. All functionality is actuated via http request and by "plugging in" this way it is possible to run the nodes in a stand-alone fashion, as they would be run in production, but to still achieve a tightly monitored and controlled environment. If code needs to be executed on a "class by class" basis or with mocking involved then the test is not a simulation test and would fall into one of the other testing categories. See the [Testing Overview](./index.md) page for more information on the other types of tests available for Lodestar. ### Simulation Environment diff --git a/docs/pages/contribution/testing/spec-tests.md b/docs/pages/contribution/testing/spec-tests.md index e69de29bb2d1..b7a65dafd072 100644 --- a/docs/pages/contribution/testing/spec-tests.md +++ b/docs/pages/contribution/testing/spec-tests.md @@ -0,0 +1,3 @@ +# Specification Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/contribution/testing/unit-tests.md b/docs/pages/contribution/testing/unit-tests.md index e69de29bb2d1..cbf4b4ae2264 100644 --- a/docs/pages/contribution/testing/unit-tests.md +++ b/docs/pages/contribution/testing/unit-tests.md @@ -0,0 +1,3 @@ +# Unit Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/data-retention.md b/docs/pages/data-retention.md index c8512858441f..41daa8dc458d 100644 --- a/docs/pages/data-retention.md +++ b/docs/pages/data-retention.md @@ -6,7 +6,7 @@ There are several processes that need to store data for Lodestar. These data set ```bash $executionDir # this changes depending on the execution client - └── execution-db + └── execution-db $dataDir # specified by --dataDir on the beacon command ├── .log_rotate_audit.json @@ -49,6 +49,6 @@ Configuring your node to store and prune data is key to success. On average you `keystores`, `keystore-cache` and `peerstore` are not usually very large and are not expected to grow much during normal operation. -Logs can also become quite large so please check out the section on [log management](../logging-and-metrics/log-management.md) for more information. +Logs can also become quite large so please check out the section on [log management](./logging-and-metrics/log-management.md) for more information. -There is really only one flag that is needed to manage the data for Lodestar, [`--dataDir`](./configuration.md#--dataDir). Other than that handling log management is really the heart of the data management story. Beacon node data is what it is. Depending on the execution client that is chosen, there may be flags to help with data storage growth but that is outside the scope of this document. +There is really only one flag that is needed to manage the data for Lodestar, [`--dataDir`](./beacon-management/beacon-cli.md#-datadir). Other than that handling log management is really the heart of the data management story. Beacon node data is what it is. Depending on the execution client that is chosen, there may be flags to help with data storage growth but that is outside the scope of this document. diff --git a/docs/pages/getting-started/installation.md b/docs/pages/getting-started/installation.md index 61ecb5b128ef..4fdfc3e82367 100644 --- a/docs/pages/getting-started/installation.md +++ b/docs/pages/getting-started/installation.md @@ -90,4 +90,4 @@ See [Command Line Reference](./../reference/cli.md) for further information. !!! danger For mainnet (production) usage, we only recommend installing with docker due to [NPM supply chain attacks](https://hackaday.com/2021/10/22/supply-chain-attack-npm-library-used-by-facebook-and-others-was-compromised/). Until a [safer installation method has been found](https://github.com/ChainSafe/lodestar/issues/3596), do not use this install method except for experimental purposes only. - \ No newline at end of file + diff --git a/docs/pages/getting-started/starting-a-node.md b/docs/pages/getting-started/starting-a-node.md index 46b6f2e456c8..dd11381bde10 100644 --- a/docs/pages/getting-started/starting-a-node.md +++ b/docs/pages/getting-started/starting-a-node.md @@ -14,7 +14,7 @@ Make sure Lodestar is installed in your local environment, following the chosen ./lodestar --help ``` -For a complete list of beacon node CLI commands and options, see the [Command Line Reference](../../reference/cli/) +For a complete list of beacon node CLI commands and options, see the [`beacon` CLI Command](../beacon-management/beacon-cli.md) section. To select a known testnet or mainnet, use the `--network` flag. `mainnet` is selected by default, and a list of available networks is listed with the `--help` flag. Setting the `--network` flag will conveniently configure the beacon node or validator client for the selected network. For power users, any configuration option should be able to be overridden. @@ -181,4 +181,4 @@ Apr-20 15:16:17.017[] info: Synced - slot: 6264979 - head: 0xde9 6. Peer info: Current total number of outbound or inbound peers, for e.g.: `peers: 27` -For more insight into how a Lodestar beacon node is functioning, you may setup lodestar metrics and use the prepared Grafana dashboards that are found in the repository. Check out our section on [Prometheus and Grafana](./prometheus-grafana.md) for more details. +For more insight into how a Lodestar beacon node is functioning, you may setup lodestar metrics and use the prepared Grafana dashboards that are found in the repository. Check out our section on [Prometheus and Grafana](../logging-and-metrics/prometheus-grafana.md) for more details. diff --git a/docs/pages/google0c42298b7ec08b7e.html b/docs/pages/google0c42298b7ec08b7e.html new file mode 100644 index 000000000000..7edebde149af --- /dev/null +++ b/docs/pages/google0c42298b7ec08b7e.html @@ -0,0 +1 @@ +google-site-verification: google0c42298b7ec08b7e.html \ No newline at end of file diff --git a/docs/pages/index.md b/docs/pages/index.md index 82674eb89fe8..4af149a7a0ef 100644 --- a/docs/pages/index.md +++ b/docs/pages/index.md @@ -1,19 +1,19 @@ ![lodestar logo](assets/lodestar_icon_text_black_stroke.png) -## Welcome to the Lodestar documentation! +## Welcome to the Lodestar documentation > **Lodestar is an open-source Ethereum Consensus client and Typescript ecosystem, maintained by ChainSafe Systems** ### Getting started -- Follow the installation method for [source install](install/source.md), [NPM install](install/npm.md), or [Docker install](install/docker.md) to install Lodestar. Or use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart). -- Use [Lodestar libraries](libraries) in your next Ethereum Typescript project. -- Run a beacon node on [mainnet or a public testnet](usage/beacon-management.md). -- Utilize the whole stack by [starting a local testnet](usage/local). -- View the Lodestar [CLI commands and options](https://chainsafe.github.io/lodestar/reference/cli/) -- Prospective contributors can read the [contributing section](https://chainsafe.github.io/lodestar/contributing/) to understand how we develop and test on Lodestar. +- Follow the installation method for [source install](./getting-started/installation.md/#build-from-source) or [Docker install](./getting-started/installation.md/#docker-installation) to install Lodestar. Or use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart). +- Use [Lodestar libraries](./supporting-libraries/index.md) in your next Ethereum Typescript project. +- Run a beacon node on [mainnet or a public testnet](./getting-started/starting-a-node.md). +- Utilize the whole stack by [starting a local testnet](./advanced-topics/setting-up-a-testnet.md). +- View the Lodestar [CLI commands and options](./beacon-management/beacon-cli.md) +- Prospective contributors can read the [contributing section](./contribution/getting-started.md) to understand how we develop and test on Lodestar. - If you have questions [submit an issue](https://github.com/ChainSafe/lodestar/issues/new) or join us on [Discord](https://discord.gg/yjyvFRP)! -- Please note our [security policy](https://github.com/ChainSafe/lodestar/blob/unstable/SECURITY.md). +- Please note our [security policy](./security.md). - Sign up to our [mailing list](https://chainsafe.typeform.com/lodestar) for announcements and any critical information about Lodestar. ## Specifications diff --git a/docs/pages/introduction.md b/docs/pages/introduction.md index f8fe03386c0a..776b018641b8 100644 --- a/docs/pages/introduction.md +++ b/docs/pages/introduction.md @@ -10,11 +10,11 @@ In Ethereum's Proof of Stake (PoS) model, validators replace miners from the Pro In an effort to promote client diversity there are several beacon-nodes being developed. Each is programmed in a different language and by a different team. The following is a list of the current beacon-node clients: -[Lodestar](https://chainsafe.io/lodestar.html) -[Prysm](https://prysmaticlabs.com/) -[Lighthouse](https://lighthouse.sigmaprime.io/) -[Teku](https://consensys.net/knowledge-base/ethereum-2/teku/) -[Nimbus](https://nimbus.team/) +- [Lodestar](https://chainsafe.io/lodestar.html) +- [Prysm](https://prysmaticlabs.com/) +- [Lighthouse](https://lighthouse.sigmaprime.io/) +- [Teku](https://consensys.net/knowledge-base/ethereum-2/teku/) +- [Nimbus](https://nimbus.team/) ## Why Client Diversity? diff --git a/docs/pages/logging-and-metrics/log-management.md b/docs/pages/logging-and-metrics/log-management.md index e69de29bb2d1..a0ee1d5fec07 100644 --- a/docs/pages/logging-and-metrics/log-management.md +++ b/docs/pages/logging-and-metrics/log-management.md @@ -0,0 +1,3 @@ +# Log Management + +Check back soon for more information!! diff --git a/docs/pages/reference/cli.md b/docs/pages/reference/cli.md new file mode 100644 index 000000000000..1b57913b99fc --- /dev/null +++ b/docs/pages/reference/cli.md @@ -0,0 +1,8 @@ +# Page relocated + +_**Welcome! This page has been moved. Please checkout our new docs layout from the Table of Contents! Below are some helpful links to the CLI pages that were split out from this original document**_ + +- [Beacon Node CLI](../beacon-management/beacon-cli.md) +- [Validator CLI](../validator-management/validator-cli.md) +- [Bootnode CLI](../bootnode/bootnode-cli.md) +- [Light Client CLI](../lightclient-prover/lightclient-cli.md) diff --git a/docs/pages/supporting-libraries/index.md b/docs/pages/supporting-libraries/index.md index eb1e7821db18..555294393ec1 100644 --- a/docs/pages/supporting-libraries/index.md +++ b/docs/pages/supporting-libraries/index.md @@ -6,7 +6,7 @@ - [`@chainsafe/js-libp2p-noise`](https://github.com/NodeFactoryIo/js-libp2p-noise) - [Noise](https://noiseprotocol.org/noise.html) handshake for `js-libp2p` - [`@chainsafe/js-libp2p-gossipsub`](https://github.com/ChainSafe/js-libp2p-gossipsub) - [Gossipsub](https://github.com/libp2p/specs/tree/master/pubsub/gossipsub) protocol for `js-libp2p` -- [@chainsafe/libp2p-yamux](https://github.com/ChainSafe/libp2p-yamux) +- [`@chainsafe/libp2p-yamux`](https://github.com/ChainSafe/js-libp2p-yamux) ### Discv5 @@ -14,14 +14,14 @@ ## Serialization and Hashing -- [`ssz`](https://github.com/ChainSafe/ssz) - Simple Serialize (SSZ) -- [`persistent-merkle-tree`](https://github.com/ChainSafe/persistent-merkle-tree) - binary merkle tree implemented as a [persistent data structure](https://en.wikipedia.org/wiki/Persistent_data_structure) -- [`as-sha256`](https://github.com/ChainSafe/as-sha256) - Small AssemblyScript implementation of SHA256 +- [`@chainsafe/ssz`](https://github.com/ChainSafe/ssz) - Simple Serialize (SSZ) +- [`@chainsafe/persistent-merkle-tree`](https://github.com/ChainSafe/persistent-merkle-tree) - binary merkle tree implemented as a [persistent data structure](https://en.wikipedia.org/wiki/Persistent_data_structure) +- [`@chainsafe/as-sha256`](https://github.com/ChainSafe/as-sha256) - Small AssemblyScript implementation of SHA256 ## BLS -- [`bls`](https://github.com/ChainSafe/bls) - Isomorphic Ethereum Consensus BLS sign / verify / aggregate -- [`blst-ts`](https://github.com/ChainSafe/blst) - Node specific Ethereum Consensus BLS sign / verify / aggregate -- [`bls-keystore`](https://github.com/ChainSafe/bls-keystore) - store / retrieve a BLS secret key from an [EIP-2335](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2335.md) JSON keystore -- [`bls-keygen`](https://github.com/ChainSafe/bls-keygen) - utility functions to generate BLS secret keys, following [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) -- [`bls-hd-key`](https://github.com/ChainSafe/bls-hd-key) - low level [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) functionality +- [`@chainsafe/bls`](https://github.com/ChainSafe/bls) - Isomorphic Ethereum Consensus BLS sign / verify / aggregate +- [`@chainsafe/blst-ts`](https://github.com/ChainSafe/blst-ts) - Node specific Ethereum Consensus BLS sign / verify / aggregate +- [`@chainsafe/bls-keystore`](https://github.com/ChainSafe/bls-keystore) - store / retrieve a BLS secret key from an [EIP-2335](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2335.md) JSON keystore +- [`@chainsafe/bls-keygen`](https://github.com/ChainSafe/bls-keygen) - utility functions to generate BLS secret keys, following [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) +- [`@chainsafe/bls-hd-key`](https://github.com/ChainSafe/bls-hd-key) - low level [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) functionality diff --git a/lerna.json b/lerna.json index bb0c43fed5fe..487caa95b0a2 100644 --- a/lerna.json +++ b/lerna.json @@ -4,7 +4,7 @@ ], "npmClient": "yarn", "useNx": true, - "version": "1.13.0", + "version": "1.14.0", "stream": true, "command": { "version": { diff --git a/package.json b/package.json index 158ac2affe68..8e6dad1fdea2 100644 --- a/package.json +++ b/package.json @@ -53,8 +53,8 @@ "@types/sinon-chai": "^3.2.9", "@typescript-eslint/eslint-plugin": "6.7.2", "@typescript-eslint/parser": "6.7.2", - "@vitest/coverage-v8": "^1.0.1", - "@vitest/browser": "^1.0.1", + "@vitest/coverage-v8": "^1.1.0", + "@vitest/browser": "^1.1.0", "c8": "^8.0.1", "chai": "^4.3.8", "chai-as-promised": "^7.1.1", @@ -97,17 +97,18 @@ "ts-node": "^10.9.1", "typescript": "^5.2.2", "typescript-docs-verifier": "^2.5.0", - "vite-plugin-node-polyfills": "^0.17.0", - "vite-plugin-top-level-await": "^1.3.1", - "vitest": "^1.0.2", + "vite-plugin-node-polyfills": "^0.18.0", + "vite-plugin-top-level-await": "^1.4.1", + "vitest": "^1.1.0", "vitest-when": "^0.3.0", "wait-port": "^1.1.0", - "webdriverio": "^8.24.12", + "webdriverio": "^8.27.0", "webpack": "^5.88.2" }, "resolutions": { "dns-over-http-resolver": "^2.1.1", "chai": "^4.3.10", - "loupe": "^2.3.6" + "loupe": "^2.3.6", + "vite": "^5.0.0" } } diff --git a/packages/api/package.json b/packages/api/package.json index 2dfcbc73b65c..b7708ac5aa26 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -71,10 +71,10 @@ "dependencies": { "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/config": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", "eventsource": "^2.0.2", "qs": "^6.11.1" }, diff --git a/packages/api/src/beacon/routes/beacon/block.ts b/packages/api/src/beacon/routes/beacon/block.ts index 53ebb93692dc..b56006fe4191 100644 --- a/packages/api/src/beacon/routes/beacon/block.ts +++ b/packages/api/src/beacon/routes/beacon/block.ts @@ -1,17 +1,7 @@ import {ContainerType} from "@chainsafe/ssz"; import {ForkName} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; -import { - phase0, - allForks, - Slot, - Root, - ssz, - RootHex, - deneb, - isSignedBlockContents, - isSignedBlindedBlockContents, -} from "@lodestar/types"; +import {phase0, allForks, Slot, Root, ssz, RootHex, deneb, isSignedBlockContents} from "@lodestar/types"; import { RoutesData, @@ -30,10 +20,7 @@ import { import {HttpStatusCode} from "../../../utils/client/httpStatusCode.js"; import {parseAcceptHeader, writeAcceptHeader} from "../../../utils/acceptHeader.js"; import {ApiClientResponse, ResponseFormat} from "../../../interfaces.js"; -import { - allForksSignedBlockContentsReqSerializer, - allForksSignedBlindedBlockContentsReqSerializer, -} from "../../../utils/routes.js"; +import {allForksSignedBlockContentsReqSerializer} from "../../../utils/routes.js"; // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes @@ -207,7 +194,7 @@ export type Api = { * Publish a signed blinded block by submitting it to the mev relay and patching in the block * transactions beacon node gets in response. */ - publishBlindedBlock(blindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents): Promise< + publishBlindedBlock(blindedBlock: allForks.SignedBlindedBeaconBlock): Promise< ApiClientResponse< { [HttpStatusCode.OK]: void; @@ -218,7 +205,7 @@ export type Api = { >; publishBlindedBlockV2( - blindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents, + blindedBlockOrContents: allForks.SignedBlindedBeaconBlock, opts: {broadcastValidation?: BroadcastValidation} ): Promise< ApiClientResponse< @@ -315,16 +302,9 @@ export function getReqSerializers(config: ChainForkConfig): ReqSerializers config.getBlindedForkTypes(data.message.slot).SignedBeaconBlock; - const AllForksSignedBlindedBlockOrContents: TypeJson = { - toJson: (data) => - isSignedBlindedBlockContents(data) - ? allForksSignedBlindedBlockContentsReqSerializer(getSignedBlindedBeaconBlockType).toJson(data) - : getSignedBlindedBeaconBlockType(data).toJson(data), - - fromJson: (data) => - (data as {signed_blinded_block: unknown}).signed_blinded_block !== undefined - ? allForksSignedBlindedBlockContentsReqSerializer(getSignedBlindedBeaconBlockType).fromJson(data) - : getSignedBlindedBeaconBlockType(data as allForks.SignedBlindedBeaconBlock).fromJson(data), + const AllForksSignedBlindedBlock: TypeJson = { + toJson: (data) => getSignedBlindedBeaconBlockType(data).toJson(data), + fromJson: (data) => getSignedBlindedBeaconBlockType(data as allForks.SignedBlindedBeaconBlock).fromJson(data), }; return { @@ -353,14 +333,14 @@ export function getReqSerializers(config: ChainForkConfig): ReqSerializers ({ - body: AllForksSignedBlindedBlockOrContents.toJson(item), + body: AllForksSignedBlindedBlock.toJson(item), query: {broadcast_validation: broadcastValidation}, }), parseReq: ({body, query}) => [ - AllForksSignedBlindedBlockOrContents.fromJson(body), + AllForksSignedBlindedBlock.fromJson(body), {broadcastValidation: query.broadcast_validation as BroadcastValidation}, ], schema: { diff --git a/packages/api/src/beacon/routes/validator.ts b/packages/api/src/beacon/routes/validator.ts index f5ae20937a0e..0746797cbf0e 100644 --- a/packages/api/src/beacon/routes/validator.ts +++ b/packages/api/src/beacon/routes/validator.ts @@ -1,5 +1,5 @@ import {ContainerType, fromHexString, toHexString, Type} from "@chainsafe/ssz"; -import {ForkName, ForkBlobs, isForkBlobs, isForkExecution, ForkPreBlobs} from "@lodestar/params"; +import {ForkName, ForkBlobs, isForkBlobs, isForkExecution, ForkPreBlobs, ForkExecution} from "@lodestar/params"; import { allForks, altair, @@ -13,12 +13,14 @@ import { Slot, ssz, UintNum64, + UintBn64, ValidatorIndex, RootHex, StringType, SubcommitteeIndex, Wei, Gwei, + ProducedBlockSource, } from "@lodestar/types"; import {ApiClientResponse} from "../../interfaces.js"; import {HttpStatusCode} from "../../utils/client/httpStatusCode.js"; @@ -37,7 +39,7 @@ import { TypeJson, } from "../../utils/index.js"; import {fromU64Str, fromGraffitiHex, toU64Str, U64Str, toGraffitiHex} from "../../utils/serdes.js"; -import {allForksBlockContentsResSerializer, allForksBlindedBlockContentsResSerializer} from "../../utils/routes.js"; +import {allForksBlockContentsResSerializer} from "../../utils/routes.js"; import {ExecutionOptimistic} from "./beacon/block.js"; export enum BuilderSelection { @@ -52,21 +54,24 @@ export enum BuilderSelection { export type ExtraProduceBlockOps = { feeRecipient?: string; builderSelection?: BuilderSelection; + builderBoostFactor?: UintBn64; strictFeeRecipientCheck?: boolean; + blindedLocal?: boolean; }; export type ProduceBlockOrContentsRes = {executionPayloadValue: Wei; consensusBlockValue: Gwei} & ( | {data: allForks.BeaconBlock; version: ForkPreBlobs} | {data: allForks.BlockContents; version: ForkBlobs} ); -export type ProduceBlindedBlockOrContentsRes = {executionPayloadValue: Wei; consensusBlockValue: Gwei} & ( - | {data: allForks.BlindedBeaconBlock; version: ForkPreBlobs} - | {data: allForks.BlindedBlockContents; version: ForkBlobs} -); +export type ProduceBlindedBlockRes = {executionPayloadValue: Wei; consensusBlockValue: Gwei} & { + data: allForks.BlindedBeaconBlock; + version: ForkExecution; +}; -export type ProduceFullOrBlindedBlockOrContentsRes = +export type ProduceFullOrBlindedBlockOrContentsRes = {executionPayloadSource: ProducedBlockSource} & ( | (ProduceBlockOrContentsRes & {executionPayloadBlinded: false}) - | (ProduceBlindedBlockOrContentsRes & {executionPayloadBlinded: true}); + | (ProduceBlindedBlockRes & {executionPayloadBlinded: true}) +); // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes @@ -287,7 +292,7 @@ export type Api = { ): Promise< ApiClientResponse< { - [HttpStatusCode.OK]: ProduceBlindedBlockOrContentsRes; + [HttpStatusCode.OK]: ProduceBlindedBlockRes; }, HttpStatusCode.BAD_REQUEST | HttpStatusCode.SERVICE_UNAVAILABLE > @@ -484,7 +489,9 @@ export type ReqTypes = { skip_randao_verification?: boolean; fee_recipient?: string; builder_selection?: string; + builder_boost_factor?: string; strict_fee_recipient_check?: boolean; + blinded_local?: boolean; }; }; produceBlindedBlock: {params: {slot: number}; query: {randao_reveal: string; graffiti: string}}; @@ -551,7 +558,9 @@ export function getReqSerializers(): ReqSerializers { fee_recipient: opts?.feeRecipient, skip_randao_verification: skipRandaoVerification, builder_selection: opts?.builderSelection, + builder_boost_factor: opts?.builderBoostFactor?.toString(), strict_fee_recipient_check: opts?.strictFeeRecipientCheck, + blinded_local: opts?.blindedLocal, }, }), parseReq: ({params, query}) => [ @@ -562,7 +571,9 @@ export function getReqSerializers(): ReqSerializers { { feeRecipient: query.fee_recipient, builderSelection: query.builder_selection as BuilderSelection, + builderBoostFactor: parseBuilderBoostFactor(query.builder_boost_factor), strictFeeRecipientCheck: query.strict_fee_recipient_check, + blindedLocal: query.blinded_local, }, ], schema: { @@ -573,7 +584,9 @@ export function getReqSerializers(): ReqSerializers { fee_recipient: Schema.String, skip_randao_verification: Schema.Boolean, builder_selection: Schema.String, + builder_boost_factor: Schema.String, strict_fee_recipient_check: Schema.Boolean, + blinded_local: Schema.Boolean, }, }, }; @@ -721,13 +734,11 @@ export function getReturnTypes(): ReturnTypes { isForkBlobs(fork) ? allForksBlockContentsResSerializer(fork) : ssz[fork].BeaconBlock ) ) as TypeJson; - const produceBlindedBlockOrContents = WithBlockValues( - WithVersion((fork: ForkName) => - isForkBlobs(fork) - ? allForksBlindedBlockContentsResSerializer(fork) - : ssz.allForksBlinded[isForkExecution(fork) ? fork : ForkName.bellatrix].BeaconBlock + const produceBlindedBlock = WithBlockValues( + WithVersion( + (fork: ForkName) => ssz.allForksBlinded[isForkExecution(fork) ? fork : ForkName.bellatrix].BeaconBlock ) - ) as TypeJson; + ) as TypeJson; return { getAttesterDuties: WithDependentRootExecutionOptimistic(ArrayOf(AttesterDuty)), @@ -741,24 +752,36 @@ export function getReturnTypes(): ReturnTypes { if (data.executionPayloadBlinded) { return { execution_payload_blinded: true, - ...(produceBlindedBlockOrContents.toJson(data) as Record), + execution_payload_source: data.executionPayloadSource, + ...(produceBlindedBlock.toJson(data) as Record), }; } else { return { execution_payload_blinded: false, + execution_payload_source: data.executionPayloadSource, ...(produceBlockOrContents.toJson(data) as Record), }; } }, fromJson: (data) => { - if ((data as {execution_payload_blinded: true}).execution_payload_blinded) { - return {executionPayloadBlinded: true, ...produceBlindedBlockOrContents.fromJson(data)}; + const executionPayloadBlinded = (data as {execution_payload_blinded: boolean}).execution_payload_blinded; + if (executionPayloadBlinded === undefined) { + throw Error(`Invalid executionPayloadBlinded=${executionPayloadBlinded} for fromJson deserialization`); + } + + // extract source from the data and assign defaults in the spec complaint manner if not present in response + const executionPayloadSource = + (data as {execution_payload_source: ProducedBlockSource}).execution_payload_source ?? + (executionPayloadBlinded ? ProducedBlockSource.builder : ProducedBlockSource.engine); + + if (executionPayloadBlinded) { + return {executionPayloadBlinded, executionPayloadSource, ...produceBlindedBlock.fromJson(data)}; } else { - return {executionPayloadBlinded: false, ...produceBlockOrContents.fromJson(data)}; + return {executionPayloadBlinded, executionPayloadSource, ...produceBlockOrContents.fromJson(data)}; } }, }, - produceBlindedBlock: produceBlindedBlockOrContents, + produceBlindedBlock, produceAttestationData: ContainerData(ssz.phase0.AttestationData), produceSyncCommitteeContribution: ContainerData(ssz.altair.SyncCommitteeContribution), @@ -768,3 +791,7 @@ export function getReturnTypes(): ReturnTypes { getLiveness: jsonType("snake"), }; } + +function parseBuilderBoostFactor(builderBoostFactorInput?: string | number | bigint): bigint | undefined { + return builderBoostFactorInput !== undefined ? BigInt(builderBoostFactorInput) : undefined; +} diff --git a/packages/api/src/beacon/server/validator.ts b/packages/api/src/beacon/server/validator.ts index 6bf446e05a16..5d6c22557060 100644 --- a/packages/api/src/beacon/server/validator.ts +++ b/packages/api/src/beacon/server/validator.ts @@ -4,6 +4,28 @@ import {ServerRoutes, getGenericJsonServer} from "../../utils/server/index.js"; import {ServerApi} from "../../interfaces.js"; export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerRoutes { - // All routes return JSON, use a server auto-generator - return getGenericJsonServer, ReqTypes>({routesData, getReturnTypes, getReqSerializers}, config, api); + const reqSerializers = getReqSerializers(); + const returnTypes = getReturnTypes(); + + // Most of routes return JSON, use a server auto-generator + const serverRoutes = getGenericJsonServer, ReqTypes>( + {routesData, getReturnTypes, getReqSerializers}, + config, + api + ); + return { + ...serverRoutes, + produceBlockV3: { + ...serverRoutes.produceBlockV3, + handler: async (req, res) => { + const response = await api.produceBlockV3(...reqSerializers.produceBlockV3.parseReq(req)); + void res.header("Eth-Consensus-Version", response.version); + void res.header("Eth-Execution-Payload-Blinded", response.executionPayloadBlinded); + void res.header("Eth-Execution-Payload-Value", response.executionPayloadValue); + void res.header("Eth-Consensus-Block-Value", response.consensusBlockValue); + + return returnTypes.produceBlockV3.toJson(response); + }, + }, + }; } diff --git a/packages/api/src/builder/routes.ts b/packages/api/src/builder/routes.ts index 0136f1deeac4..6f5a55f0dcff 100644 --- a/packages/api/src/builder/routes.ts +++ b/packages/api/src/builder/routes.ts @@ -34,7 +34,7 @@ export type Api = { HttpStatusCode.NOT_FOUND | HttpStatusCode.BAD_REQUEST > >; - submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlockOrContents): Promise< + submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise< ApiClientResponse< { [HttpStatusCode.OK]: { diff --git a/packages/api/src/keymanager/routes.ts b/packages/api/src/keymanager/routes.ts index 09f5e7610604..48f928e86100 100644 --- a/packages/api/src/keymanager/routes.ts +++ b/packages/api/src/keymanager/routes.ts @@ -72,6 +72,10 @@ export type GasLimitData = { pubkey: string; gasLimit: number; }; +export type BuilderBoostFactorData = { + pubkey: string; + builderBoostFactor: bigint; +}; export type SignerDefinition = { pubkey: PubkeyHex; @@ -247,6 +251,27 @@ export type Api = { > >; + getBuilderBoostFactor( + pubkey: string + ): Promise>; + setBuilderBoostFactor( + pubkey: string, + builderBoostFactor: bigint + ): Promise< + ApiClientResponse< + {[HttpStatusCode.OK]: void; [HttpStatusCode.NO_CONTENT]: void}, + HttpStatusCode.UNAUTHORIZED | HttpStatusCode.FORBIDDEN | HttpStatusCode.NOT_FOUND + > + >; + deleteBuilderBoostFactor( + pubkey: string + ): Promise< + ApiClientResponse< + {[HttpStatusCode.OK]: void; [HttpStatusCode.NO_CONTENT]: void}, + HttpStatusCode.UNAUTHORIZED | HttpStatusCode.FORBIDDEN | HttpStatusCode.NOT_FOUND + > + >; + /** * Create a signed voluntary exit message for an active validator, identified by a public key known to the validator * client. This endpoint returns a `SignedVoluntaryExit` object, which can be used to initiate voluntary exit via the @@ -290,6 +315,10 @@ export const routesData: RoutesData = { setGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "POST", statusOk: 202}, deleteGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "DELETE", statusOk: 204}, + getBuilderBoostFactor: {url: "/eth/v1/validator/{pubkey}/builder_boost_factor", method: "GET"}, + setBuilderBoostFactor: {url: "/eth/v1/validator/{pubkey}/builder_boost_factor", method: "POST", statusOk: 202}, + deleteBuilderBoostFactor: {url: "/eth/v1/validator/{pubkey}/builder_boost_factor", method: "DELETE", statusOk: 204}, + signVoluntaryExit: {url: "/eth/v1/validator/{pubkey}/voluntary_exit", method: "POST"}, }; @@ -326,6 +355,10 @@ export type ReqTypes = { setGasLimit: {params: {pubkey: string}; body: {gas_limit: string}}; deleteGasLimit: {params: {pubkey: string}}; + getBuilderBoostFactor: {params: {pubkey: string}}; + setBuilderBoostFactor: {params: {pubkey: string}; body: {builder_boost_factor: string}}; + deleteBuilderBoostFactor: {params: {pubkey: string}}; + signVoluntaryExit: {params: {pubkey: string}; query: {epoch?: number}}; }; @@ -423,6 +456,33 @@ export function getReqSerializers(): ReqSerializers { params: {pubkey: Schema.StringRequired}, }, }, + + getBuilderBoostFactor: { + writeReq: (pubkey) => ({params: {pubkey}}), + parseReq: ({params: {pubkey}}) => [pubkey], + schema: { + params: {pubkey: Schema.StringRequired}, + }, + }, + setBuilderBoostFactor: { + writeReq: (pubkey, builderBoostFactor) => ({ + params: {pubkey}, + body: {builder_boost_factor: builderBoostFactor.toString(10)}, + }), + parseReq: ({params: {pubkey}, body: {builder_boost_factor}}) => [pubkey, BigInt(builder_boost_factor)], + schema: { + params: {pubkey: Schema.StringRequired}, + body: Schema.Object, + }, + }, + deleteBuilderBoostFactor: { + writeReq: (pubkey) => ({params: {pubkey}}), + parseReq: ({params: {pubkey}}) => [pubkey], + schema: { + params: {pubkey: Schema.StringRequired}, + }, + }, + signVoluntaryExit: { writeReq: (pubkey, epoch) => ({params: {pubkey}, query: epoch !== undefined ? {epoch} : {}}), parseReq: ({params: {pubkey}, query: {epoch}}) => [pubkey, epoch], @@ -455,6 +515,15 @@ export function getReturnTypes(): ReturnTypes { {jsonCase: "eth2"} ) ), + getBuilderBoostFactor: ContainerData( + new ContainerType( + { + pubkey: stringType, + builderBoostFactor: ssz.UintBn64, + }, + {jsonCase: "eth2"} + ) + ), signVoluntaryExit: ContainerData(ssz.phase0.SignedVoluntaryExit), }; } diff --git a/packages/api/src/utils/client/metrics.ts b/packages/api/src/utils/client/metrics.ts index c8bc3c0637a4..65089e92e7ec 100644 --- a/packages/api/src/utils/client/metrics.ts +++ b/packages/api/src/utils/client/metrics.ts @@ -1,49 +1,9 @@ +import {Gauge, GaugeExtra, Histogram} from "@lodestar/utils"; + export type Metrics = { - requestTime: Histogram<"routeId">; - streamTime: Histogram<"routeId">; - requestErrors: Gauge<"routeId">; - requestToFallbacks: Gauge<"routeId">; - urlsScore: Gauge<"urlIndex">; + requestTime: Histogram<{routeId: string}>; + streamTime: Histogram<{routeId: string}>; + requestErrors: Gauge<{routeId: string}>; + requestToFallbacks: Gauge<{routeId: string}>; + urlsScore: GaugeExtra<{urlIndex: number}>; }; - -type LabelValues = Partial>; -type CollectFn = (metric: Gauge) => void; - -export interface Gauge { - /** - * Increment gauge for given labels - * @param labels Object with label keys and values - * @param value The value to increment with - */ - inc(labels: LabelValues, value?: number): void; - - /** - * Increment gauge - * @param value The value to increment with - */ - inc(value?: number): void; - - /** - * Set gauge value for labels - * @param labels Object with label keys and values - * @param value The value to set - */ - set(labels: LabelValues, value: number): void; - - /** - * Set gauge value - * @param value The value to set - */ - set(value: number): void; - - addCollect(collectFn: CollectFn): void; -} - -export interface Histogram { - /** - * Start a timer where the value in seconds will observed - * @param labels Object with label keys and values - * @return Function to invoke when timer should be stopped - */ - startTimer(labels?: LabelValues): (labels?: LabelValues) => number; -} diff --git a/packages/api/src/utils/routes.ts b/packages/api/src/utils/routes.ts index 213a561efd58..77d177f7b24c 100644 --- a/packages/api/src/utils/routes.ts +++ b/packages/api/src/utils/routes.ts @@ -11,12 +11,14 @@ export function allForksSignedBlockContentsReqSerializer( return { toJson: (data) => ({ signed_block: blockSerializer(data.signedBlock).toJson(data.signedBlock), - signed_blob_sidecars: ssz.deneb.SignedBlobSidecars.toJson(data.signedBlobSidecars), + kzg_proofs: ssz.deneb.KZGProofs.toJson(data.kzgProofs), + blobs: ssz.deneb.Blobs.toJson(data.blobs), }), - fromJson: (data: {signed_block: unknown; signed_blob_sidecars: unknown}) => ({ + fromJson: (data: {signed_block: unknown; kzg_proofs: unknown; blobs: unknown}) => ({ signedBlock: blockSerializer(data.signed_block as allForks.SignedBeaconBlock).fromJson(data.signed_block), - signedBlobSidecars: ssz.deneb.SignedBlobSidecars.fromJson(data.signed_blob_sidecars), + kzgProofs: ssz.deneb.KZGProofs.fromJson(data.kzg_proofs), + blobs: ssz.deneb.Blobs.fromJson(data.blobs), }), }; } @@ -25,44 +27,13 @@ export function allForksBlockContentsResSerializer(fork: ForkBlobs): TypeJson ({ block: (ssz.allForks[fork].BeaconBlock as allForks.AllForksSSZTypes["BeaconBlock"]).toJson(data.block), - blob_sidecars: ssz.deneb.BlobSidecars.toJson(data.blobSidecars), + kzg_proofs: ssz.deneb.KZGProofs.toJson(data.kzgProofs), + blobs: ssz.deneb.Blobs.toJson(data.blobs), }), - fromJson: (data: {block: unknown; blob_sidecars: unknown}) => ({ + fromJson: (data: {block: unknown; blob_sidecars: unknown; kzg_proofs: unknown; blobs: unknown}) => ({ block: ssz.allForks[fork].BeaconBlock.fromJson(data.block), - blobSidecars: ssz.deneb.BlobSidecars.fromJson(data.blob_sidecars), - }), - }; -} - -export function allForksSignedBlindedBlockContentsReqSerializer( - blockSerializer: (data: allForks.SignedBlindedBeaconBlock) => TypeJson -): TypeJson { - return { - toJson: (data) => ({ - signed_blinded_block: blockSerializer(data.signedBlindedBlock).toJson(data.signedBlindedBlock), - signed_blinded_blob_sidecars: ssz.deneb.SignedBlindedBlobSidecars.toJson(data.signedBlindedBlobSidecars), - }), - - fromJson: (data: {signed_blinded_block: unknown; signed_blinded_blob_sidecars: unknown}) => ({ - signedBlindedBlock: blockSerializer(data.signed_blinded_block as allForks.SignedBlindedBeaconBlock).fromJson( - data.signed_blinded_block - ), - signedBlindedBlobSidecars: ssz.deneb.SignedBlindedBlobSidecars.fromJson(data.signed_blinded_blob_sidecars), - }), - }; -} - -export function allForksBlindedBlockContentsResSerializer(fork: ForkBlobs): TypeJson { - return { - toJson: (data) => ({ - blinded_block: (ssz.allForksBlinded[fork].BeaconBlock as allForks.AllForksBlindedSSZTypes["BeaconBlock"]).toJson( - data.blindedBlock - ), - blinded_blob_sidecars: ssz.deneb.BlindedBlobSidecars.toJson(data.blindedBlobSidecars), - }), - fromJson: (data: {blinded_block: unknown; blinded_blob_sidecars: unknown}) => ({ - blindedBlock: ssz.allForksBlinded[fork].BeaconBlock.fromJson(data.blinded_block), - blindedBlobSidecars: ssz.deneb.BlindedBlobSidecars.fromJson(data.blinded_blob_sidecars), + kzgProofs: ssz.deneb.KZGProofs.fromJson(data.kzg_proofs), + blobs: ssz.deneb.Blobs.fromJson(data.blobs), }), }; } diff --git a/packages/api/test/unit/beacon/oapiSpec.test.ts b/packages/api/test/unit/beacon/oapiSpec.test.ts index c1abd32cb591..1a300eba6f36 100644 --- a/packages/api/test/unit/beacon/oapiSpec.test.ts +++ b/packages/api/test/unit/beacon/oapiSpec.test.ts @@ -6,7 +6,7 @@ import {OpenApiFile} from "../../utils/parseOpenApiSpec.js"; import {routes} from "../../../src/beacon/index.js"; import {ReqSerializers} from "../../../src/utils/types.js"; import {Schema} from "../../../src/utils/schema.js"; -import {runTestCheckAgainstSpec} from "../../utils/checkAgainstSpec.js"; +import {IgnoredProperty, runTestCheckAgainstSpec} from "../../utils/checkAgainstSpec.js"; import {fetchOpenApiSpec} from "../../utils/fetchOpenApiSpec.js"; // Import all testData and merge below import {testData as beaconTestData} from "./testData/beacon.js"; @@ -23,7 +23,7 @@ import {testData as validatorTestData} from "./testData/validator.js"; // eslint-disable-next-line @typescript-eslint/naming-convention const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const version = "v2.3.0"; +const version = "v2.4.2"; const openApiFile: OpenApiFile = { url: `https://github.com/ethereum/beacon-APIs/releases/download/${version}/beacon-node-oapi.json`, filepath: path.join(__dirname, "../../../oapi-schemas/beacon-node-oapi.json"), @@ -84,11 +84,105 @@ const testDatas = { ...validatorTestData, }; +const ignoredOperations = [ + /* missing route */ + /* https://github.com/ChainSafe/lodestar/issues/5694 */ + "getSyncCommitteeRewards", + "getBlockRewards", + "getAttestationsRewards", + "getDepositSnapshot", // Won't fix for now, see https://github.com/ChainSafe/lodestar/issues/5697 + "getBlindedBlock", // https://github.com/ChainSafe/lodestar/issues/5699 + "getNextWithdrawals", // https://github.com/ChainSafe/lodestar/issues/5696 + "getDebugForkChoice", // https://github.com/ChainSafe/lodestar/issues/5700 + /* https://github.com/ChainSafe/lodestar/issues/6080 */ + "getLightClientBootstrap", + "getLightClientUpdatesByRange", + "getLightClientFinalityUpdate", + "getLightClientOptimisticUpdate", + "getPoolBLSToExecutionChanges", + "submitPoolBLSToExecutionChange", +]; + +const ignoredProperties: Record = { + /* + https://github.com/ChainSafe/lodestar/issues/5693 + missing finalized + */ + getStateRoot: {response: ["finalized"]}, + getStateFork: {response: ["finalized"]}, + getStateFinalityCheckpoints: {response: ["finalized"]}, + getStateValidators: {response: ["finalized"]}, + getStateValidator: {response: ["finalized"]}, + getStateValidatorBalances: {response: ["finalized"]}, + getEpochCommittees: {response: ["finalized"]}, + getEpochSyncCommittees: {response: ["finalized"]}, + getStateRandao: {response: ["finalized"]}, + getBlockHeaders: {response: ["finalized"]}, + getBlockHeader: {response: ["finalized"]}, + getBlockV2: {response: ["finalized"]}, + getBlockRoot: {response: ["finalized"]}, + getBlockAttestations: {response: ["finalized"]}, + getStateV2: {response: ["finalized"]}, + + /* + https://github.com/ChainSafe/lodestar/issues/6168 + /query/syncing_status - must be integer + */ + getHealth: {request: ["query.syncing_status"]}, + + /** + * https://github.com/ChainSafe/lodestar/issues/6185 + * - must have required property 'query' + */ + getBlobSidecars: {request: ["query"]}, + + /* + https://github.com/ChainSafe/lodestar/issues/4638 + /query - must have required property 'skip_randao_verification' + */ + produceBlockV2: {request: ["query.skip_randao_verification"]}, + produceBlindedBlock: {request: ["query.skip_randao_verification"]}, +}; + const openApiJson = await fetchOpenApiSpec(openApiFile); -runTestCheckAgainstSpec(openApiJson, routesData, reqSerializers, returnTypes, testDatas, { - // TODO: Investigate why schema validation fails otherwise - routesDropOneOf: ["produceBlockV2", "produceBlindedBlock", "publishBlindedBlock"], -}); +runTestCheckAgainstSpec( + openApiJson, + routesData, + reqSerializers, + returnTypes, + testDatas, + { + // TODO: Investigate why schema validation fails otherwise (see https://github.com/ChainSafe/lodestar/issues/6187) + routesDropOneOf: [ + "produceBlockV2", + "produceBlockV3", + "produceBlindedBlock", + "publishBlindedBlock", + "publishBlindedBlockV2", + ], + }, + ignoredOperations, + ignoredProperties +); + +const ignoredTopics = [ + /* + https://github.com/ChainSafe/lodestar/issues/6167 + eventTestData[bls_to_execution_change] does not match spec's example + */ + "bls_to_execution_change", + /* + https://github.com/ChainSafe/lodestar/issues/6170 + Error: Invalid slot=0 fork=phase0 for lightclient fork types + */ + "light_client_finality_update", + "light_client_optimistic_update", + /* + https://github.com/ethereum/beacon-APIs/pull/379 + SyntaxError: Unexpected non-whitespace character after JSON at position 629 (line 1 column 630) + */ + "payload_attributes", +]; // eventstream types are defined as comments in the description of "examples". // The function runTestCheckAgainstSpec() can't handle those, so the custom code before: @@ -113,7 +207,9 @@ describe("eventstream event data", () => { const eventSerdes = routes.events.getEventSerdes(config); const knownTopics = new Set(Object.values(routes.events.eventTypes)); - for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {})) { + for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {}).filter( + ([topic]) => !ignoredTopics.includes(topic) + )) { it(topic, () => { if (!knownTopics.has(topic)) { throw Error(`topic ${topic} not implemented`); @@ -130,7 +226,6 @@ describe("eventstream event data", () => { if (testEvent == null) { throw Error(`No eventTestData for ${topic}`); } - const testEventJson = eventSerdes.toJson({ type: topic as routes.events.EventType, message: testEvent, diff --git a/packages/api/test/unit/beacon/testData/validator.ts b/packages/api/test/unit/beacon/testData/validator.ts index b827bad0be90..2688f2080eba 100644 --- a/packages/api/test/unit/beacon/testData/validator.ts +++ b/packages/api/test/unit/beacon/testData/validator.ts @@ -1,5 +1,5 @@ import {ForkName} from "@lodestar/params"; -import {ssz} from "@lodestar/types"; +import {ssz, ProducedBlockSource} from "@lodestar/types"; import {Api} from "../../../../src/beacon/routes/validator.js"; import {GenericServerTestCases} from "../../../utils/genericServerTest.js"; @@ -50,7 +50,13 @@ export const testData: GenericServerTestCases = { randaoReveal, graffiti, undefined, - {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + { + feeRecipient, + builderSelection: undefined, + strictFeeRecipientCheck: undefined, + blindedLocal: undefined, + builderBoostFactor: 100n, + }, ] as unknown as GenericServerTestCases["produceBlock"]["args"], res: {data: ssz.phase0.BeaconBlock.defaultValue()}, }, @@ -60,7 +66,13 @@ export const testData: GenericServerTestCases = { randaoReveal, graffiti, undefined, - {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + { + feeRecipient, + builderSelection: undefined, + strictFeeRecipientCheck: undefined, + blindedLocal: undefined, + builderBoostFactor: 100n, + }, ] as unknown as GenericServerTestCases["produceBlockV2"]["args"], res: { data: ssz.altair.BeaconBlock.defaultValue(), @@ -75,7 +87,13 @@ export const testData: GenericServerTestCases = { randaoReveal, graffiti, true, - {feeRecipient, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + { + feeRecipient, + builderSelection: undefined, + strictFeeRecipientCheck: undefined, + blindedLocal: undefined, + builderBoostFactor: 100n, + }, ], res: { data: ssz.altair.BeaconBlock.defaultValue(), @@ -83,6 +101,7 @@ export const testData: GenericServerTestCases = { executionPayloadValue: ssz.Wei.defaultValue(), consensusBlockValue: ssz.Gwei.defaultValue(), executionPayloadBlinded: false, + executionPayloadSource: ProducedBlockSource.engine, }, }, produceBlindedBlock: { @@ -91,7 +110,13 @@ export const testData: GenericServerTestCases = { randaoReveal, graffiti, undefined, - {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + { + feeRecipient, + builderSelection: undefined, + strictFeeRecipientCheck: undefined, + blindedLocal: undefined, + builderBoostFactor: 100n, + }, ] as unknown as GenericServerTestCases["produceBlindedBlock"]["args"], res: { data: ssz.bellatrix.BlindedBeaconBlock.defaultValue(), diff --git a/packages/api/test/unit/keymanager/testData.ts b/packages/api/test/unit/keymanager/testData.ts index a4fc72fc8e2d..2c66610c8733 100644 --- a/packages/api/test/unit/keymanager/testData.ts +++ b/packages/api/test/unit/keymanager/testData.ts @@ -13,6 +13,7 @@ const pubkeyRand = "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff const ethaddressRand = "0xabcf8e0d4e9587369b2301d0790347320302cc09"; const graffitiRandUtf8 = "636861696e736166652f6c6f64657374"; const gasLimitRand = 30_000_000; +const builderBoostFactorRand = BigInt(100); export const testData: GenericServerTestCases = { listKeys: { @@ -99,4 +100,16 @@ export const testData: GenericServerTestCases = { args: [pubkeyRand, 1], res: {data: ssz.phase0.SignedVoluntaryExit.defaultValue()}, }, + getBuilderBoostFactor: { + args: [pubkeyRand], + res: {data: {pubkey: pubkeyRand, builderBoostFactor: builderBoostFactorRand}}, + }, + setBuilderBoostFactor: { + args: [pubkeyRand, builderBoostFactorRand], + res: undefined, + }, + deleteBuilderBoostFactor: { + args: [pubkeyRand], + res: undefined, + }, }; diff --git a/packages/api/test/utils/checkAgainstSpec.ts b/packages/api/test/utils/checkAgainstSpec.ts index 01e7df255db2..ed65279bca22 100644 --- a/packages/api/test/utils/checkAgainstSpec.ts +++ b/packages/api/test/utils/checkAgainstSpec.ts @@ -1,16 +1,16 @@ import Ajv, {ErrorObject} from "ajv"; import {expect, describe, beforeAll, it} from "vitest"; import {ReqGeneric, ReqSerializer, ReturnTypes, RouteDef} from "../../src/utils/types.js"; -import {applyRecursively, OpenApiJson, parseOpenApiSpec, ParseOpenApiSpecOpts} from "./parseOpenApiSpec.js"; +import {applyRecursively, JsonSchema, OpenApiJson, parseOpenApiSpec, ParseOpenApiSpecOpts} from "./parseOpenApiSpec.js"; import {GenericServerTestCases} from "./genericServerTest.js"; const ajv = new Ajv({ - // strict: true, - // strictSchema: true, + strict: true, + strictTypes: false, // TODO Enable once beacon-APIs is fixed. See https://github.com/ChainSafe/lodestar/issues/6206 allErrors: true, }); -// TODO: Still necessary? +// Ensure embedded schema 'example' do not fail validation ajv.addKeyword({ keyword: "example", validate: () => true, @@ -19,17 +19,69 @@ ajv.addKeyword({ ajv.addFormat("hex", /^0x[a-fA-F0-9]+$/); +/** + * A set of properties that will be ignored during tests execution. + * This allows for a black-list mechanism to have a test pass while some part of the spec is not yet implemented. + * + * Properties can be nested using dot notation, following JSONPath semantic. + * + * Example: + * - query + * - query.skip_randao_verification + */ +export type IgnoredProperty = { + /** + * Properties to ignore in the request schema + */ + request?: string[]; + /** + * Properties to ignore in the response schema + */ + response?: string[]; +}; + +/** + * Recursively remove a property from a schema + * + * @param schema Schema to remove a property from + * @param property JSONPath like property to remove from the schema + */ +function deleteNested(schema: JsonSchema | undefined, property: string): void { + const properties = schema?.properties; + if (property.includes(".")) { + // Extract first segment, keep the rest as dotted + const [key, ...rest] = property.split("."); + deleteNested(properties?.[key], rest.join(".")); + } else { + // Remove property from 'required' + if (schema?.required) { + schema.required = schema.required?.filter((e) => property !== e); + } + // Remove property from 'properties' + delete properties?.[property]; + } +} + export function runTestCheckAgainstSpec( openApiJson: OpenApiJson, routesData: Record, reqSerializers: Record>, returnTypes: Record[string]>, testDatas: Record[string]>, - opts?: ParseOpenApiSpecOpts + opts?: ParseOpenApiSpecOpts, + ignoredOperations: string[] = [], + ignoredProperties: Record = {} ): void { const openApiSpec = parseOpenApiSpec(openApiJson, opts); for (const [operationId, routeSpec] of openApiSpec.entries()) { + const isIgnored = ignoredOperations.some((id) => id === operationId); + if (isIgnored) { + continue; + } + + const ignoredProperty = ignoredProperties[operationId]; + describe(operationId, () => { const {requestSchema, responseOkSchema} = routeSpec; const routeId = operationId; @@ -68,7 +120,15 @@ export function runTestCheckAgainstSpec( stringifyProperties((reqJson as ReqGeneric).params ?? {}); stringifyProperties((reqJson as ReqGeneric).query ?? {}); - // Validate response + const ignoredProperties = ignoredProperty?.request; + if (ignoredProperties) { + // Remove ignored properties from schema validation + for (const property of ignoredProperties) { + deleteNested(routeSpec.requestSchema, property); + } + } + + // Validate request validateSchema(routeSpec.requestSchema, reqJson, "request"); }); } @@ -87,6 +147,13 @@ export function runTestCheckAgainstSpec( } } + const ignoredProperties = ignoredProperty?.response; + if (ignoredProperties) { + // Remove ignored properties from schema validation + for (const property of ignoredProperties) { + deleteNested(routeSpec.responseOkSchema, property); + } + } // Validate response validateSchema(responseOkSchema, resJson, "response"); }); diff --git a/packages/api/test/utils/parseOpenApiSpec.ts b/packages/api/test/utils/parseOpenApiSpec.ts index 5faf0082012d..84b024e5950e 100644 --- a/packages/api/test/utils/parseOpenApiSpec.ts +++ b/packages/api/test/utils/parseOpenApiSpec.ts @@ -11,7 +11,7 @@ type RouteUrl = string; /** "get" | "post" */ type HttpMethod = string; -type JsonSchema = { +export type JsonSchema = { type: "object"; properties?: Record; required?: string[]; diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index b29169777549..68ff28c7d9e2 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -79,8 +79,8 @@ "test": "yarn test:unit && yarn test:e2e", "test:unit:minimal": "vitest --run --segfaultRetry 3 --dir test/unit/ --coverage", "test:unit:mainnet": "LODESTAR_PRESET=mainnet nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit-mainnet/**/*.test.ts'", - "test:unit": "yarn test:unit:minimal && yarn test:unit:mainnet", - "test:e2e": "LODESTAR_PRESET=minimal vitest --run --segfaultRetry 3 --poolOptions.threads.singleThread --dir test/e2e", + "test:unit": "wrapper() { yarn test:unit:minimal $@ && yarn test:unit:mainnet $@; }; wrapper", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --segfaultRetry 3 --poolOptions.threads.singleThread true --dir test/e2e", "test:sim": "mocha 'test/sim/**/*.test.ts'", "test:sim:merge-interop": "mocha 'test/sim/merge-interop.test.ts'", "test:sim:mergemock": "mocha 'test/sim/mergemock.test.ts'", @@ -119,18 +119,18 @@ "@libp2p/peer-id-factory": "^3.0.4", "@libp2p/prometheus-metrics": "^2.0.7", "@libp2p/tcp": "8.0.8", - "@lodestar/api": "^1.13.0", - "@lodestar/config": "^1.13.0", - "@lodestar/db": "^1.13.0", - "@lodestar/fork-choice": "^1.13.0", - "@lodestar/light-client": "^1.13.0", - "@lodestar/logger": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/reqresp": "^1.13.0", - "@lodestar/state-transition": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", - "@lodestar/validator": "^1.13.0", + "@lodestar/api": "^1.14.0", + "@lodestar/config": "^1.14.0", + "@lodestar/db": "^1.14.0", + "@lodestar/fork-choice": "^1.14.0", + "@lodestar/light-client": "^1.14.0", + "@lodestar/logger": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/reqresp": "^1.14.0", + "@lodestar/state-transition": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", + "@lodestar/validator": "^1.14.0", "@multiformats/multiaddr": "^12.1.3", "@types/datastore-level": "^3.0.0", "buffer-xor": "^2.0.2", @@ -145,7 +145,7 @@ "jwt-simple": "0.5.6", "libp2p": "0.46.12", "multiformats": "^11.0.1", - "prom-client": "^14.2.0", + "prom-client": "^15.1.0", "qs": "^6.11.1", "snappyjs": "^0.7.0", "strict-event-emitter-types": "^2.0.0", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index c54e040ceb06..89565426426e 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,17 +1,13 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {routes, ServerApi, ResponseFormat} from "@lodestar/api"; -import { - computeTimeAtSlot, - parseSignedBlindedBlockOrContents, - reconstructFullBlockOrContents, - DataAvailableStatus, -} from "@lodestar/state-transition"; +import {computeTimeAtSlot, reconstructFullBlockOrContents} from "@lodestar/state-transition"; import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import {sleep, toHex} from "@lodestar/utils"; import {allForks, deneb, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; import {BlockSource, getBlockInput, ImportBlockOpts, BlockInput} from "../../../../chain/blocks/types.js"; import {promiseAllMaybeAsync} from "../../../../util/promises.js"; import {isOptimisticBlock} from "../../../../util/forkChoice.js"; +import {computeBlobSidecars} from "../../../../util/blobs.js"; import {BlockError, BlockErrorCode} from "../../../../chain/errors/index.js"; import {OpSource} from "../../../../metrics/validatorMonitor.js"; import {NetworkEvent} from "../../../../network/index.js"; @@ -46,22 +42,23 @@ export function getBeaconBlockApi({ opts: PublishBlockOpts = {} ) => { const seenTimestampSec = Date.now() / 1000; - let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, signedBlobs: deneb.SignedBlobSidecars; + let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, blobSidecars: deneb.BlobSidecars; if (isSignedBlockContents(signedBlockOrContents)) { - ({signedBlock, signedBlobSidecars: signedBlobs} = signedBlockOrContents); + ({signedBlock} = signedBlockOrContents); + blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents); blockForImport = getBlockInput.postDeneb( config, signedBlock, BlockSource.api, - signedBlobs.map((sblob) => sblob.message), + blobSidecars, // don't bundle any bytes for block and blobs null, - signedBlobs.map(() => null) + blobSidecars.map(() => null) ); } else { signedBlock = signedBlockOrContents; - signedBlobs = []; + blobSidecars = []; // TODO: Once API supports submitting data as SSZ, replace null with blockBytes blockForImport = getBlockInput.preDeneb(config, signedBlock, BlockSource.api, null); } @@ -121,19 +118,13 @@ export function getBeaconBlockApi({ } try { - await verifyBlocksInEpoch.call( - chain as BeaconChain, - parentBlock, - [blockForImport], - [DataAvailableStatus.available], - { - ...opts, - verifyOnly: true, - skipVerifyBlockSignatures: true, - skipVerifyExecutionPayload: true, - seenTimestampSec, - } - ); + await verifyBlocksInEpoch.call(chain as BeaconChain, parentBlock, [blockForImport], { + ...opts, + verifyOnly: true, + skipVerifyBlockSignatures: true, + skipVerifyExecutionPayload: true, + seenTimestampSec, + }); } catch (error) { chain.logger.error("Consensus checks failed while publishing the block", valLogMeta, error as Error); chain.persistInvalidSszValue( @@ -195,18 +186,15 @@ export function getBeaconBlockApi({ } throw e; }), - ...signedBlobs.map((signedBlob) => () => network.publishBlobSidecar(signedBlob)), + ...blobSidecars.map((blobSidecar) => () => network.publishBlobSidecar(blobSidecar)), ]; await promiseAllMaybeAsync(publishPromises); }; const publishBlindedBlock: ServerApi["publishBlindedBlock"] = async ( - signedBlindedBlockOrContents, + signedBlindedBlock, opts: PublishBlockOpts = {} ) => { - const {signedBlindedBlock, signedBlindedBlobSidecars} = - parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents); - const slot = signedBlindedBlock.message.slot; const blockRoot = toHex( chain.config @@ -217,27 +205,31 @@ export function getBeaconBlockApi({ // Either the payload/blobs are cached from i) engine locally or ii) they are from the builder // // executionPayload can be null or a real payload in locally produced so check for presence of root - const source = chain.producedBlockRoot.has(blockRoot) ? ProducedBlockSource.engine : ProducedBlockSource.builder; - - const executionPayload = chain.producedBlockRoot.get(blockRoot) ?? null; - const blobSidecars = executionPayload - ? chain.producedBlobSidecarsCache.get(toHex(executionPayload.blockHash)) - : undefined; - const blobs = blobSidecars ? blobSidecars.map((blobSidecar) => blobSidecar.blob) : null; - - chain.logger.debug("Assembling blinded block for publishing", {source, blockRoot, slot}); + const executionPayload = chain.producedBlockRoot.get(blockRoot); + if (executionPayload !== undefined) { + const source = ProducedBlockSource.engine; + chain.logger.debug("Reconstructing signedBlockOrContents", {blockRoot, slot, source}); + + const contents = executionPayload + ? chain.producedContentsCache.get(toHex(executionPayload.blockHash)) ?? null + : null; + const signedBlockOrContents = reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents}); + + chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); + return publishBlock(signedBlockOrContents, opts); + } else { + const source = ProducedBlockSource.builder; + chain.logger.debug("Reconstructing signedBlockOrContents", {blockRoot, slot, source}); - const signedBlockOrContents = - source === ProducedBlockSource.engine - ? reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs}) - : await reconstructBuilderBlockOrContents(chain, signedBlindedBlockOrContents); + const signedBlockOrContents = await reconstructBuilderBlockOrContents(chain, signedBlindedBlock); - // the full block is published by relay and it's possible that the block is already known to us - // by gossip - // - // see: https://github.com/ChainSafe/lodestar/issues/5404 - chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); - return publishBlock(signedBlockOrContents, {...opts, ignoreIfKnown: true}); + // the full block is published by relay and it's possible that the block is already known to us + // by gossip + // + // see: https://github.com/ChainSafe/lodestar/issues/5404 + chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); + return publishBlock(signedBlockOrContents, {...opts, ignoreIfKnown: true}); + } }; return { @@ -431,13 +423,13 @@ export function getBeaconBlockApi({ async function reconstructBuilderBlockOrContents( chain: ApiModules["chain"], - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents + signedBlindedBlock: allForks.SignedBlindedBeaconBlock ): Promise { const executionBuilder = chain.executionBuilder; if (!executionBuilder) { throw Error("exeutionBuilder required to publish SignedBlindedBeaconBlock"); } - const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlockOrContents); + const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlock); return signedBlockOrContents; } diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts index 8f92fa483908..f0f076c6c13a 100644 --- a/packages/beacon-node/src/api/impl/validator/index.ts +++ b/packages/beacon-node/src/api/impl/validator/index.ts @@ -9,7 +9,6 @@ import { computeEpochAtSlot, getCurrentSlot, beaconBlockToBlinded, - blobSidecarsToBlinded, } from "@lodestar/state-transition"; import { GENESIS_SLOT, @@ -20,6 +19,7 @@ import { isForkExecution, ForkSeq, } from "@lodestar/params"; +import {MAX_BUILDER_BOOST_FACTOR} from "@lodestar/validator"; import { Root, Slot, @@ -31,11 +31,11 @@ import { allForks, BLSSignature, isBlindedBeaconBlock, - isBlindedBlockContents, + isBlockContents, phase0, } from "@lodestar/types"; import {ExecutionStatus} from "@lodestar/fork-choice"; -import {toHex, racePromisesWithCutoff, RaceEvent} from "@lodestar/utils"; +import {toHex, racePromisesWithCutoff, RaceEvent, gweiToWei} from "@lodestar/utils"; import {AttestationError, AttestationErrorCode, GossipAction, SyncCommitteeError} from "../../../chain/errors/index.js"; import {validateApiAggregateAndProof} from "../../../chain/validation/index.js"; import {ZERO_HASH} from "../../../constants/index.js"; @@ -50,7 +50,7 @@ import {RegenCaller} from "../../../chain/regen/index.js"; import {getValidatorStatus} from "../beacon/state/utils.js"; import {validateGossipFnRetryUnknownRoot} from "../../../network/processor/gossipHandlers.js"; import {SCHEDULER_LOOKAHEAD_FACTOR} from "../../../chain/prepareNextSlot.js"; -import {ChainEvent, CheckpointHex} from "../../../chain/index.js"; +import {ChainEvent, CheckpointHex, CommonBlockBody} from "../../../chain/index.js"; import {computeSubnetForCommitteesAtSlot, getPubkeysForIndices} from "./utils.js"; /** @@ -280,15 +280,24 @@ export function getValidatorApi({ ); } - const produceBlindedBlockOrContents = async function produceBlindedBlockOrContents( + const produceBuilderBlindedBlock = async function produceBuilderBlindedBlock( slot: Slot, randaoReveal: BLSSignature, graffiti: string, // as of now fee recipient checks can not be performed because builder does not return bid recipient { skipHeadChecksAndUpdate, - }: Omit & {skipHeadChecksAndUpdate?: boolean} = {} - ): Promise { + commonBlockBody, + }: Omit & { + skipHeadChecksAndUpdate?: boolean; + commonBlockBody?: CommonBlockBody; + } = {} + ): Promise { + const version = config.getForkName(slot); + if (!isForkExecution(version)) { + throw Error(`Invalid fork=${version} for produceBuilderBlindedBlock`); + } + const source = ProducedBlockSource.builder; metrics?.blockProductionRequests.inc({source}); @@ -318,6 +327,7 @@ export function getValidatorApi({ slot, randaoReveal, graffiti: toGraffitiBuffer(graffiti || ""), + commonBlockBody, }); metrics?.blockProductionSuccess.inc({source}); @@ -329,31 +339,17 @@ export function getValidatorApi({ root: toHexString(config.getBlindedForkTypes(slot).BeaconBlock.hashTreeRoot(block)), }); - const version = config.getForkName(block.slot); if (chain.opts.persistProducedBlocks) { void chain.persistBlock(block, "produced_builder_block"); } - if (isForkBlobs(version)) { - const blockHash = toHex((block as bellatrix.BlindedBeaconBlock).body.executionPayloadHeader.blockHash); - const blindedBlobSidecars = chain.producedBlindedBlobSidecarsCache.get(blockHash); - if (blindedBlobSidecars === undefined) { - throw Error("blobSidecars missing in cache"); - } - return { - data: {blindedBlock: block, blindedBlobSidecars} as allForks.BlindedBlockContents, - version, - executionPayloadValue, - consensusBlockValue, - }; - } else { - return {data: block, version, executionPayloadValue, consensusBlockValue}; - } + + return {data: block, version, executionPayloadValue, consensusBlockValue}; } finally { if (timer) timer({source}); } }; - const produceFullBlockOrContents = async function produceFullBlockOrContents( + const produceEngineFullBlockOrContents = async function produceEngineFullBlockOrContents( slot: Slot, randaoReveal: BLSSignature, graffiti: string, @@ -361,8 +357,12 @@ export function getValidatorApi({ feeRecipient, strictFeeRecipientCheck, skipHeadChecksAndUpdate, - }: Omit & {skipHeadChecksAndUpdate?: boolean} = {} - ): Promise { + commonBlockBody, + }: Omit & { + skipHeadChecksAndUpdate?: boolean; + commonBlockBody?: CommonBlockBody; + } = {} + ): Promise { const source = ProducedBlockSource.engine; metrics?.blockProductionRequests.inc({source}); @@ -380,11 +380,12 @@ export function getValidatorApi({ let timer; try { timer = metrics?.blockProductionTime.startTimer(); - const {block, executionPayloadValue, consensusBlockValue} = await chain.produceBlock({ + const {block, executionPayloadValue, consensusBlockValue, shouldOverrideBuilder} = await chain.produceBlock({ slot, randaoReveal, graffiti: toGraffitiBuffer(graffiti || ""), feeRecipient, + commonBlockBody, }); const version = config.getForkName(block.slot); if (strictFeeRecipientCheck && feeRecipient && isForkExecution(version)) { @@ -407,218 +408,280 @@ export function getValidatorApi({ } if (isForkBlobs(version)) { const blockHash = toHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash); - const blobSidecars = chain.producedBlobSidecarsCache.get(blockHash); - if (blobSidecars === undefined) { - throw Error("blobSidecars missing in cache"); + const contents = chain.producedContentsCache.get(blockHash); + if (contents === undefined) { + throw Error("contents missing in cache"); } + return { - data: {block, blobSidecars} as allForks.BlockContents, + data: {block, ...contents} as allForks.BlockContents, version, executionPayloadValue, consensusBlockValue, + shouldOverrideBuilder, }; } else { - return {data: block, version, executionPayloadValue, consensusBlockValue}; + return {data: block, version, executionPayloadValue, consensusBlockValue, shouldOverrideBuilder}; } } finally { if (timer) timer({source}); } }; - const produceBlockV3: ServerApi["produceBlockV3"] = async function produceBlockV3( - slot, - randaoReveal, - graffiti, - // TODO deneb: skip randao verification - _skipRandaoVerification?: boolean, - {feeRecipient, builderSelection, strictFeeRecipientCheck}: routes.validator.ExtraProduceBlockOps = {} - ) { - notWhileSyncing(); - await waitForSlot(slot); // Must never request for a future slot > currentSlot - - // Process the queued attestations in the forkchoice for correct head estimation - // forkChoice.updateTime() might have already been called by the onSlot clock - // handler, in which case this should just return. - chain.forkChoice.updateTime(slot); - chain.recomputeForkChoiceHead(); - - const fork = config.getForkName(slot); - // set some sensible opts - builderSelection = builderSelection ?? routes.validator.BuilderSelection.MaxProfit; - const isBuilderEnabled = - ForkSeq[fork] >= ForkSeq.bellatrix && - chain.executionBuilder !== undefined && - builderSelection !== routes.validator.BuilderSelection.ExecutionOnly; - - logger.verbose("Assembling block with produceBlockV3 ", { - fork, - builderSelection, + const produceEngineOrBuilderBlock: ServerApi["produceBlockV3"] = + async function produceEngineOrBuilderBlock( slot, - isBuilderEnabled, - strictFeeRecipientCheck, - }); - // Start calls for building execution and builder blocks - const blindedBlockPromise = isBuilderEnabled - ? // can't do fee recipient checks as builder bid doesn't return feeRecipient as of now - produceBlindedBlockOrContents(slot, randaoReveal, graffiti, { - feeRecipient, - // skip checking and recomputing head in these individual produce calls - skipHeadChecksAndUpdate: true, - }).catch((e) => { - logger.error("produceBlindedBlockOrContents failed to produce block", {slot}, e); - return null; - }) - : null; + randaoReveal, + graffiti, + // TODO deneb: skip randao verification + _skipRandaoVerification?: boolean, + { + feeRecipient, + builderSelection, + builderBoostFactor, + strictFeeRecipientCheck, + }: routes.validator.ExtraProduceBlockOps = {} + ) { + notWhileSyncing(); + await waitForSlot(slot); // Must never request for a future slot > currentSlot - const fullBlockPromise = - // At any point either the builder or execution or both flows should be active. - // - // Ideally such a scenario should be prevented on startup, but proposerSettingsFile or keymanager - // configurations could cause a validator pubkey to have builder disabled with builder selection builder only - // (TODO: independently make sure such an options update is not successful for a validator pubkey) - // - // So if builder is disabled ignore builder selection of builderonly if caused by user mistake - !isBuilderEnabled || builderSelection !== routes.validator.BuilderSelection.BuilderOnly - ? // TODO deneb: builderSelection needs to be figured out if to be done beacon side - // || builderSelection !== BuilderSelection.BuilderOnly - produceFullBlockOrContents(slot, randaoReveal, graffiti, { + // Process the queued attestations in the forkchoice for correct head estimation + // forkChoice.updateTime() might have already been called by the onSlot clock + // handler, in which case this should just return. + chain.forkChoice.updateTime(slot); + chain.recomputeForkChoiceHead(); + + const fork = config.getForkName(slot); + // set some sensible opts + // builderSelection will be deprecated and will run in mode MaxProfit if builder is enabled + // and the actual selection will be determined using builderBoostFactor passed by the validator + builderSelection = builderSelection ?? routes.validator.BuilderSelection.MaxProfit; + builderBoostFactor = builderBoostFactor ?? BigInt(100); + if (builderBoostFactor > MAX_BUILDER_BOOST_FACTOR) { + throw new ApiError(400, `Invalid builderBoostFactor=${builderBoostFactor} > MAX_BUILDER_BOOST_FACTOR`); + } + + const isBuilderEnabled = + ForkSeq[fork] >= ForkSeq.bellatrix && + chain.executionBuilder !== undefined && + builderSelection !== routes.validator.BuilderSelection.ExecutionOnly; + + const loggerContext = { + fork, + builderSelection, + slot, + isBuilderEnabled, + strictFeeRecipientCheck, + // winston logger doesn't like bigint + builderBoostFactor: `${builderBoostFactor}`, + }; + + logger.verbose("Assembling block with produceEngineOrBuilderBlock", loggerContext); + const commonBlockBody = await chain.produceCommonBlockBody({ + slot, + randaoReveal, + graffiti: toGraffitiBuffer(graffiti || ""), + }); + logger.debug("Produced common block body", loggerContext); + + // Start calls for building execution and builder blocks + const blindedBlockPromise = isBuilderEnabled + ? // can't do fee recipient checks as builder bid doesn't return feeRecipient as of now + produceBuilderBlindedBlock(slot, randaoReveal, graffiti, { feeRecipient, - strictFeeRecipientCheck, // skip checking and recomputing head in these individual produce calls skipHeadChecksAndUpdate: true, + commonBlockBody, }).catch((e) => { - logger.error("produceFullBlockOrContents failed to produce block", {slot}, e); + logger.error("produceBuilderBlindedBlock failed to produce block", {slot}, e); return null; }) : null; - let blindedBlock, fullBlock; - if (blindedBlockPromise !== null && fullBlockPromise !== null) { - // reference index of promises in the race - const promisesOrder = [ProducedBlockSource.builder, ProducedBlockSource.engine]; - [blindedBlock, fullBlock] = await racePromisesWithCutoff< - routes.validator.ProduceBlockOrContentsRes | routes.validator.ProduceBlindedBlockOrContentsRes | null - >( - [blindedBlockPromise, fullBlockPromise], - BLOCK_PRODUCTION_RACE_CUTOFF_MS, - BLOCK_PRODUCTION_RACE_TIMEOUT_MS, - // Callback to log the race events for better debugging capability - (event: RaceEvent, delayMs: number, index?: number) => { - const eventRef = index !== undefined ? {source: promisesOrder[index]} : {}; - logger.verbose("Block production race (builder vs execution)", { - event, - ...eventRef, - delayMs, - cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS, - timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS, - slot, - }); + const fullBlockPromise = + // At any point either the builder or execution or both flows should be active. + // + // Ideally such a scenario should be prevented on startup, but proposerSettingsFile or keymanager + // configurations could cause a validator pubkey to have builder disabled with builder selection builder only + // (TODO: independently make sure such an options update is not successful for a validator pubkey) + // + // So if builder is disabled ignore builder selection of builderonly if caused by user mistake + !isBuilderEnabled || builderSelection !== routes.validator.BuilderSelection.BuilderOnly + ? // TODO deneb: builderSelection needs to be figured out if to be done beacon side + // || builderSelection !== BuilderSelection.BuilderOnly + produceEngineFullBlockOrContents(slot, randaoReveal, graffiti, { + feeRecipient, + strictFeeRecipientCheck, + // skip checking and recomputing head in these individual produce calls + skipHeadChecksAndUpdate: true, + commonBlockBody, + }).catch((e) => { + logger.error("produceEngineFullBlockOrContents failed to produce block", {slot}, e); + return null; + }) + : null; + + let blindedBlock, fullBlock; + if (blindedBlockPromise !== null && fullBlockPromise !== null) { + // reference index of promises in the race + const promisesOrder = [ProducedBlockSource.builder, ProducedBlockSource.engine]; + [blindedBlock, fullBlock] = await racePromisesWithCutoff< + | ((routes.validator.ProduceBlockOrContentsRes | routes.validator.ProduceBlindedBlockRes) & { + shouldOverrideBuilder?: boolean; + }) + | null + >( + [blindedBlockPromise, fullBlockPromise], + BLOCK_PRODUCTION_RACE_CUTOFF_MS, + BLOCK_PRODUCTION_RACE_TIMEOUT_MS, + // Callback to log the race events for better debugging capability + (event: RaceEvent, delayMs: number, index?: number) => { + const eventRef = index !== undefined ? {source: promisesOrder[index]} : {}; + logger.verbose("Block production race (builder vs execution)", { + event, + ...eventRef, + delayMs, + cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS, + timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS, + slot, + }); + } + ); + if (blindedBlock instanceof Error) { + // error here means race cutoff exceeded + logger.error("Failed to produce builder block", {slot}, blindedBlock); + blindedBlock = null; } - ); - if (blindedBlock instanceof Error) { - // error here means race cutoff exceeded - logger.error("Failed to produce builder block", {slot}, blindedBlock); - blindedBlock = null; - } - if (fullBlock instanceof Error) { - logger.error("Failed to produce execution block", {slot}, fullBlock); + if (fullBlock instanceof Error) { + logger.error("Failed to produce execution block", {slot}, fullBlock); + fullBlock = null; + } + } else if (blindedBlockPromise !== null && fullBlockPromise === null) { + blindedBlock = await blindedBlockPromise; fullBlock = null; + } else if (blindedBlockPromise === null && fullBlockPromise !== null) { + blindedBlock = null; + fullBlock = await fullBlockPromise; + } else { + throw Error( + `Internal Error: Neither builder nor execution proposal flow activated isBuilderEnabled=${isBuilderEnabled} builderSelection=${builderSelection}` + ); } - } else if (blindedBlockPromise !== null && fullBlockPromise === null) { - blindedBlock = await blindedBlockPromise; - fullBlock = null; - } else if (blindedBlockPromise === null && fullBlockPromise !== null) { - blindedBlock = null; - fullBlock = await fullBlockPromise; - } else { - throw Error( - `Internal Error: Neither builder nor execution proposal flow activated isBuilderEnabled=${isBuilderEnabled} builderSelection=${builderSelection}` - ); - } - - const builderPayloadValue = blindedBlock?.executionPayloadValue ?? BigInt(0); - const enginePayloadValue = fullBlock?.executionPayloadValue ?? BigInt(0); - const consensusBlockValueBuilder = blindedBlock?.consensusBlockValue ?? BigInt(0); - const consensusBlockValueEngine = fullBlock?.consensusBlockValue ?? BigInt(0); - const blockValueBuilder = builderPayloadValue + consensusBlockValueBuilder; - const blockValueEngine = enginePayloadValue + consensusBlockValueEngine; - - let selectedSource: ProducedBlockSource | null = null; - - if (fullBlock && blindedBlock) { - switch (builderSelection) { - case routes.validator.BuilderSelection.MaxProfit: { - if (blockValueEngine >= blockValueBuilder) { - selectedSource = ProducedBlockSource.engine; - } else { - selectedSource = ProducedBlockSource.builder; + const builderPayloadValue = blindedBlock?.executionPayloadValue ?? BigInt(0); + const enginePayloadValue = fullBlock?.executionPayloadValue ?? BigInt(0); + const consensusBlockValueBuilder = blindedBlock?.consensusBlockValue ?? BigInt(0); + const consensusBlockValueEngine = fullBlock?.consensusBlockValue ?? BigInt(0); + + const blockValueBuilder = builderPayloadValue + gweiToWei(consensusBlockValueBuilder); // Total block value is in wei + const blockValueEngine = enginePayloadValue + gweiToWei(consensusBlockValueEngine); // Total block value is in wei + + let executionPayloadSource: ProducedBlockSource | null = null; + const shouldOverrideBuilder = fullBlock?.shouldOverrideBuilder ?? false; + + // handle the builder override case separately + if (shouldOverrideBuilder === true) { + executionPayloadSource = ProducedBlockSource.engine; + logger.info("Selected engine block as censorship suspected in builder blocks", { + // winston logger doesn't like bigint + enginePayloadValue: `${enginePayloadValue}`, + consensusBlockValueEngine: `${consensusBlockValueEngine}`, + blockValueEngine: `${blockValueEngine}`, + shouldOverrideBuilder, + slot, + }); + } else if (fullBlock && blindedBlock) { + switch (builderSelection) { + case routes.validator.BuilderSelection.MaxProfit: { + if ( + // explicitly handle the two special values mentioned in spec for builder preferred / engine preferred + builderBoostFactor !== MAX_BUILDER_BOOST_FACTOR && + (builderBoostFactor === BigInt(0) || + blockValueEngine >= (blockValueBuilder * builderBoostFactor) / BigInt(100)) + ) { + executionPayloadSource = ProducedBlockSource.engine; + } else { + executionPayloadSource = ProducedBlockSource.builder; + } + break; } - break; - } - case routes.validator.BuilderSelection.ExecutionOnly: { - selectedSource = ProducedBlockSource.engine; - break; - } + case routes.validator.BuilderSelection.ExecutionOnly: { + executionPayloadSource = ProducedBlockSource.engine; + break; + } - // For everything else just select the builder - default: { - selectedSource = ProducedBlockSource.builder; + // For everything else just select the builder + default: { + executionPayloadSource = ProducedBlockSource.builder; + } } + logger.info(`Selected executionPayloadSource=${executionPayloadSource} block`, { + builderSelection, + // winston logger doesn't like bigint + builderBoostFactor: `${builderBoostFactor}`, + enginePayloadValue: `${enginePayloadValue}`, + builderPayloadValue: `${builderPayloadValue}`, + consensusBlockValueEngine: `${consensusBlockValueEngine}`, + consensusBlockValueBuilder: `${consensusBlockValueBuilder}`, + blockValueEngine: `${blockValueEngine}`, + blockValueBuilder: `${blockValueBuilder}`, + shouldOverrideBuilder, + slot, + }); + } else if (fullBlock && !blindedBlock) { + executionPayloadSource = ProducedBlockSource.engine; + logger.info("Selected engine block: no builder block produced", { + // winston logger doesn't like bigint + enginePayloadValue: `${enginePayloadValue}`, + consensusBlockValueEngine: `${consensusBlockValueEngine}`, + blockValueEngine: `${blockValueEngine}`, + shouldOverrideBuilder, + slot, + }); + } else if (blindedBlock && !fullBlock) { + executionPayloadSource = ProducedBlockSource.builder; + logger.info("Selected builder block: no engine block produced", { + // winston logger doesn't like bigint + builderPayloadValue: `${builderPayloadValue}`, + consensusBlockValueBuilder: `${consensusBlockValueBuilder}`, + blockValueBuilder: `${blockValueBuilder}`, + shouldOverrideBuilder, + slot, + }); } - logger.verbose(`Selected ${selectedSource} block`, { - builderSelection, - // winston logger doesn't like bigint - enginePayloadValue: `${enginePayloadValue}`, - builderPayloadValue: `${builderPayloadValue}`, - consensusBlockValueEngine: `${consensusBlockValueEngine}`, - consensusBlockValueBuilder: `${consensusBlockValueBuilder}`, - blockValueEngine: `${blockValueEngine}`, - blockValueBuilder: `${blockValueBuilder}`, - slot, - }); - } else if (fullBlock && !blindedBlock) { - selectedSource = ProducedBlockSource.engine; - logger.verbose("Selected engine block: no builder block produced", { - // winston logger doesn't like bigint - enginePayloadValue: `${enginePayloadValue}`, - consensusBlockValueEngine: `${consensusBlockValueEngine}`, - blockValueEngine: `${blockValueEngine}`, - slot, - }); - } else if (blindedBlock && !fullBlock) { - selectedSource = ProducedBlockSource.builder; - logger.verbose("Selected builder block: no engine block produced", { - // winston logger doesn't like bigint - builderPayloadValue: `${builderPayloadValue}`, - consensusBlockValueBuilder: `${consensusBlockValueBuilder}`, - blockValueBuilder: `${blockValueBuilder}`, - slot, - }); - } - if (selectedSource === null) { - throw Error(`Failed to produce engine or builder block for slot=${slot}`); - } + if (executionPayloadSource === null) { + throw Error(`Failed to produce engine or builder block for slot=${slot}`); + } - if (selectedSource === ProducedBlockSource.engine) { - return {...fullBlock, executionPayloadBlinded: false} as routes.validator.ProduceBlockOrContentsRes & { - executionPayloadBlinded: false; - }; - } else { - return {...blindedBlock, executionPayloadBlinded: true} as routes.validator.ProduceBlindedBlockOrContentsRes & { - executionPayloadBlinded: true; - }; - } - }; + if (executionPayloadSource === ProducedBlockSource.engine) { + return { + ...fullBlock, + executionPayloadBlinded: false, + executionPayloadSource, + } as routes.validator.ProduceBlockOrContentsRes & { + executionPayloadBlinded: false; + executionPayloadSource: ProducedBlockSource; + }; + } else { + return { + ...blindedBlock, + executionPayloadBlinded: true, + executionPayloadSource, + } as routes.validator.ProduceBlindedBlockRes & { + executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; + }; + } + }; const produceBlock: ServerApi["produceBlock"] = async function produceBlock( slot, randaoReveal, graffiti ) { - const producedData = await produceFullBlockOrContents(slot, randaoReveal, graffiti); + const producedData = await produceEngineFullBlockOrContents(slot, randaoReveal, graffiti); if (isForkBlobs(producedData.version)) { throw Error(`Invalid call to produceBlock for deneb+ fork=${producedData.version}`); } else { @@ -628,45 +691,85 @@ export function getValidatorApi({ } }; - const produceBlindedBlock: ServerApi["produceBlindedBlock"] = - async function produceBlindedBlock(slot, randaoReveal, graffiti) { - const producedData = await produceBlockV3(slot, randaoReveal, graffiti); - let blindedProducedData: routes.validator.ProduceBlindedBlockOrContentsRes; - - if (isForkBlobs(producedData.version)) { - if (isBlindedBlockContents(producedData.data as allForks.FullOrBlindedBlockContents)) { - blindedProducedData = producedData as routes.validator.ProduceBlindedBlockOrContentsRes; - } else { - // - const {block, blobSidecars} = producedData.data as allForks.BlockContents; - const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]); - const blindedBlobSidecars = blobSidecarsToBlinded(blobSidecars); + const produceEngineOrBuilderBlindedBlock: ServerApi["produceBlindedBlock"] = + async function produceEngineOrBuilderBlindedBlock(slot, randaoReveal, graffiti) { + const {data, executionPayloadValue, consensusBlockValue, version} = await produceEngineOrBuilderBlock( + slot, + randaoReveal, + graffiti + ); + if (!isForkExecution(version)) { + throw Error(`Invalid fork=${version} for produceEngineOrBuilderBlindedBlock`); + } + const executionPayloadBlinded = true; + + if (isBlockContents(data)) { + const {block} = data; + const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]); + return {executionPayloadValue, consensusBlockValue, data: blindedBlock, executionPayloadBlinded, version}; + } else if (isBlindedBeaconBlock(data)) { + return {executionPayloadValue, consensusBlockValue, data, executionPayloadBlinded, version}; + } else { + const blindedBlock = beaconBlockToBlinded(config, data as allForks.AllForksExecution["BeaconBlock"]); + return {executionPayloadValue, consensusBlockValue, data: blindedBlock, executionPayloadBlinded, version}; + } + }; - blindedProducedData = { - ...producedData, - data: {blindedBlock, blindedBlobSidecars}, - } as routes.validator.ProduceBlindedBlockOrContentsRes; - } + const produceBlockV3: ServerApi["produceBlockV3"] = async function produceBlockV3( + slot, + randaoReveal, + graffiti, + skipRandaoVerification?: boolean, + opts: routes.validator.ExtraProduceBlockOps = {} + ) { + const produceBlockEngineOrBuilderRes = await produceEngineOrBuilderBlock( + slot, + randaoReveal, + graffiti, + skipRandaoVerification, + opts + ); + + if (opts.blindedLocal === true && ForkSeq[produceBlockEngineOrBuilderRes.version] >= ForkSeq.bellatrix) { + if (produceBlockEngineOrBuilderRes.executionPayloadBlinded) { + return produceBlockEngineOrBuilderRes; } else { - if (isBlindedBeaconBlock(producedData.data)) { - blindedProducedData = producedData as routes.validator.ProduceBlindedBlockOrContentsRes; - } else { - const block = producedData.data; + if (isBlockContents(produceBlockEngineOrBuilderRes.data)) { + const {block} = produceBlockEngineOrBuilderRes.data; const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]); - blindedProducedData = { - ...producedData, + return { + ...produceBlockEngineOrBuilderRes, + data: blindedBlock, + executionPayloadBlinded: true, + } as routes.validator.ProduceBlindedBlockRes & { + executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; + }; + } else { + const blindedBlock = beaconBlockToBlinded( + config, + produceBlockEngineOrBuilderRes.data as allForks.AllForksExecution["BeaconBlock"] + ); + return { + ...produceBlockEngineOrBuilderRes, data: blindedBlock, - } as routes.validator.ProduceBlindedBlockOrContentsRes; + executionPayloadBlinded: true, + } as routes.validator.ProduceBlindedBlockRes & { + executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; + }; } } - return blindedProducedData; - }; + } else { + return produceBlockEngineOrBuilderRes; + } + }; return { produceBlock, - produceBlockV2: produceFullBlockOrContents, + produceBlockV2: produceEngineFullBlockOrContents, produceBlockV3, - produceBlindedBlock, + produceBlindedBlock: produceEngineOrBuilderBlindedBlock, async produceAttestationData(committeeIndex, slot) { notWhileSyncing(); diff --git a/packages/beacon-node/src/api/rest/activeSockets.ts b/packages/beacon-node/src/api/rest/activeSockets.ts index ba8a35c80119..9f1b0f1a78a3 100644 --- a/packages/beacon-node/src/api/rest/activeSockets.ts +++ b/packages/beacon-node/src/api/rest/activeSockets.ts @@ -1,12 +1,11 @@ import http, {Server} from "node:http"; import {Socket} from "node:net"; -import {waitFor} from "@lodestar/utils"; -import {IGauge} from "../../metrics/index.js"; +import {Gauge, GaugeExtra, waitFor} from "@lodestar/utils"; export type SocketMetrics = { - activeSockets: IGauge; - socketsBytesRead: IGauge; - socketsBytesWritten: IGauge; + activeSockets: GaugeExtra; + socketsBytesRead: Gauge; + socketsBytesWritten: Gauge; }; // Use relatively short timeout to speed up shutdown diff --git a/packages/beacon-node/src/api/rest/base.ts b/packages/beacon-node/src/api/rest/base.ts index 4503bfe20e47..3ddb5354a897 100644 --- a/packages/beacon-node/src/api/rest/base.ts +++ b/packages/beacon-node/src/api/rest/base.ts @@ -3,9 +3,8 @@ import fastify, {FastifyInstance} from "fastify"; import fastifyCors from "@fastify/cors"; import bearerAuthPlugin from "@fastify/bearer-auth"; import {RouteConfig} from "@lodestar/api/beacon/server"; -import {ErrorAborted, Logger} from "@lodestar/utils"; +import {ErrorAborted, Gauge, Histogram, Logger} from "@lodestar/utils"; import {isLocalhostIP} from "../../util/ip.js"; -import {IGauge, IHistogram} from "../../metrics/index.js"; import {ApiError, NodeIsSyncing} from "../impl/errors.js"; import {HttpActiveSocketsTracker, SocketMetrics} from "./activeSockets.js"; @@ -25,9 +24,9 @@ export type RestApiServerModules = { }; export type RestApiServerMetrics = SocketMetrics & { - requests: IGauge<"operationId">; - responseTime: IHistogram<"operationId">; - errors: IGauge<"operationId">; + requests: Gauge<{operationId: string}>; + responseTime: Histogram<{operationId: string}>; + errors: Gauge<{operationId: string}>; }; /** @@ -90,6 +89,11 @@ export class RestApiServer { metrics?.requests.inc({operationId}); }); + server.addHook("preHandler", async (req, _res) => { + const {operationId} = req.routeConfig as RouteConfig; + this.logger.debug(`Exec ${req.id as string} ${req.ip} ${operationId}`); + }); + // Log after response server.addHook("onResponse", async (req, res) => { const {operationId} = req.routeConfig as RouteConfig; diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index feaddfbad39d..12b43359fa4e 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -7,7 +7,6 @@ import { computeStartSlotAtEpoch, isStateValidatorsNodesPopulated, RootCache, - kzgCommitmentToVersionedHash, } from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; import {ForkChoiceError, ForkChoiceErrorCode, EpochDifference, AncestorStatus} from "@lodestar/fork-choice"; @@ -16,6 +15,7 @@ import {ZERO_HASH_HEX} from "../../constants/index.js"; import {toCheckpointHex} from "../stateCache/index.js"; import {isOptimisticBlock} from "../../util/forkChoice.js"; import {isQueueErrorAborted} from "../../util/queue/index.js"; +import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {ChainEvent, ReorgEventData} from "../emitter.js"; import {REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC} from "../reprocess.js"; import type {BeaconChain} from "../chain.js"; diff --git a/packages/beacon-node/src/chain/blocks/index.ts b/packages/beacon-node/src/chain/blocks/index.ts index 569fd0771022..8f4c7fa5f0f1 100644 --- a/packages/beacon-node/src/chain/blocks/index.ts +++ b/packages/beacon-node/src/chain/blocks/index.ts @@ -58,11 +58,7 @@ export async function processBlocks( } try { - const {relevantBlocks, dataAvailabilityStatuses, parentSlots, parentBlock} = verifyBlocksSanityChecks( - this, - blocks, - opts - ); + const {relevantBlocks, parentSlots, parentBlock} = verifyBlocksSanityChecks(this, blocks, opts); // No relevant blocks, skip verifyBlocksInEpoch() if (relevantBlocks.length === 0 || parentBlock === null) { @@ -72,13 +68,8 @@ export async function processBlocks( // Fully verify a block to be imported immediately after. Does not produce any side-effects besides adding intermediate // states in the state cache through regen. - const {postStates, proposerBalanceDeltas, segmentExecStatus} = await verifyBlocksInEpoch.call( - this, - parentBlock, - relevantBlocks, - dataAvailabilityStatuses, - opts - ); + const {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus} = + await verifyBlocksInEpoch.call(this, parentBlock, relevantBlocks, opts); // If segmentExecStatus has lvhForkchoice then, the entire segment should be invalid // and we need to further propagate diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts index 5f1ac8833578..aff5a64c9929 100644 --- a/packages/beacon-node/src/chain/blocks/types.ts +++ b/packages/beacon-node/src/chain/blocks/types.ts @@ -1,14 +1,13 @@ -import {toHexString} from "@chainsafe/ssz"; import {CachedBeaconStateAllForks, computeEpochAtSlot, DataAvailableStatus} from "@lodestar/state-transition"; import {MaybeValidExecutionStatus} from "@lodestar/fork-choice"; -import {allForks, deneb, Slot, RootHex} from "@lodestar/types"; +import {allForks, deneb, Slot} from "@lodestar/types"; import {ForkSeq, MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; -import {pruneSetToMax} from "@lodestar/utils"; export enum BlockInputType { preDeneb = "preDeneb", postDeneb = "postDeneb", + blobsPromise = "blobsPromise", } /** Enum to represent where blocks come from */ @@ -19,9 +18,18 @@ export enum BlockSource { byRoot = "req_resp_by_root", } +export enum GossipedInputType { + block = "block", + blob = "blob", +} + +export type BlobsCache = Map; +export type BlockInputBlobs = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]}; + export type BlockInput = {block: allForks.SignedBeaconBlock; source: BlockSource; blockBytes: Uint8Array | null} & ( | {type: BlockInputType.preDeneb} - | {type: BlockInputType.postDeneb; blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]} + | ({type: BlockInputType.postDeneb} & BlockInputBlobs) + | {type: BlockInputType.blobsPromise; blobsCache: BlobsCache; availabilityPromise: Promise} ); export function blockRequiresBlobs(config: ChainForkConfig, blockSlot: Slot, clockSlot: Slot): boolean { @@ -32,125 +40,7 @@ export function blockRequiresBlobs(config: ChainForkConfig, blockSlot: Slot, clo ); } -export enum GossipedInputType { - block = "block", - blob = "blob", -} -type GossipedBlockInput = - | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null} - | {type: GossipedInputType.blob; signedBlob: deneb.SignedBlobSidecar; blobBytes: Uint8Array | null}; -type BlockInputCacheType = { - block?: allForks.SignedBeaconBlock; - blockBytes?: Uint8Array | null; - blobs: Map; - blobsBytes: Map; -}; - -const MAX_GOSSIPINPUT_CACHE = 5; -// ssz.deneb.BlobSidecars.elementType.fixedSize; -const BLOBSIDECAR_FIXED_SIZE = 131256; - export const getBlockInput = { - blockInputCache: new Map(), - - getGossipBlockInput( - config: ChainForkConfig, - gossipedInput: GossipedBlockInput - ): - | {blockInput: BlockInput; blockInputMeta: {pending: null; haveBlobs: number; expectedBlobs: number}} - | {blockInput: null; blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}} - | {blockInput: null; blockInputMeta: {pending: GossipedInputType.blob; haveBlobs: number; expectedBlobs: number}} { - let blockHex; - let blockCache; - - if (gossipedInput.type === GossipedInputType.block) { - const {signedBlock, blockBytes} = gossipedInput; - - blockHex = toHexString( - config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message) - ); - blockCache = this.blockInputCache.get(blockHex) ?? { - blobs: new Map(), - blobsBytes: new Map(), - }; - - blockCache.block = signedBlock; - blockCache.blockBytes = blockBytes; - } else { - const {signedBlob, blobBytes} = gossipedInput; - blockHex = toHexString(signedBlob.message.blockRoot); - blockCache = this.blockInputCache.get(blockHex); - - // If a new entry is going to be inserted, prune out old ones - if (blockCache === undefined) { - pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE); - blockCache = {blobs: new Map(), blobsBytes: new Map()}; - } - - // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions - blockCache.blobs.set(signedBlob.message.index, signedBlob.message); - // easily splice out the unsigned message as blob is a fixed length type - blockCache.blobsBytes.set(signedBlob.message.index, blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null); - } - - this.blockInputCache.set(blockHex, blockCache); - const {block: signedBlock, blockBytes} = blockCache; - - if (signedBlock !== undefined) { - // block is available, check if all blobs have shown up - const {slot, body} = signedBlock.message; - const {blobKzgCommitments} = body as deneb.BeaconBlockBody; - const blockInfo = `blockHex=${blockHex}, slot=${slot}`; - - if (blobKzgCommitments.length < blockCache.blobs.size) { - throw Error( - `Received more blobs=${blockCache.blobs.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` - ); - } - if (blobKzgCommitments.length === blockCache.blobs.size) { - const blobSidecars = []; - const blobsBytes = []; - - for (let index = 0; index < blobKzgCommitments.length; index++) { - const blobSidecar = blockCache.blobs.get(index); - if (blobSidecar === undefined) { - throw Error(`Missing blobSidecar at index=${index} for ${blockInfo}`); - } - blobSidecars.push(blobSidecar); - blobsBytes.push(blockCache.blobsBytes.get(index) ?? null); - } - - return { - // TODO freetheblobs: collate and add serialized data for the postDeneb blockinput - blockInput: getBlockInput.postDeneb( - config, - signedBlock, - BlockSource.gossip, - blobSidecars, - blockBytes ?? null, - blobsBytes - ), - blockInputMeta: {pending: null, haveBlobs: blockCache.blobs.size, expectedBlobs: blobKzgCommitments.length}, - }; - } else { - return { - blockInput: null, - blockInputMeta: { - pending: GossipedInputType.blob, - haveBlobs: blockCache.blobs.size, - expectedBlobs: blobKzgCommitments.length, - }, - }; - } - } else { - // will need to wait for the block to showup - return { - blockInput: null, - blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blockCache.blobs.size, expectedBlobs: null}, - }; - } - }, - preDeneb( config: ChainForkConfig, block: allForks.SignedBeaconBlock, @@ -188,6 +78,27 @@ export const getBlockInput = { blobsBytes, }; }, + + blobsPromise( + config: ChainForkConfig, + block: allForks.SignedBeaconBlock, + source: BlockSource, + blobsCache: BlobsCache, + blockBytes: Uint8Array | null, + availabilityPromise: Promise + ): BlockInput { + if (config.getForkSeq(block.message.slot) < ForkSeq.deneb) { + throw Error(`Pre Deneb block slot ${block.message.slot}`); + } + return { + type: BlockInputType.blobsPromise, + block, + source, + blobsCache, + blockBytes, + availabilityPromise, + }; + }, }; export enum AttestationImportOpt { diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index 72db1d801b48..94a42a39a6ae 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -5,7 +5,7 @@ import { isStateValidatorsNodesPopulated, DataAvailableStatus, } from "@lodestar/state-transition"; -import {bellatrix} from "@lodestar/types"; +import {bellatrix, deneb} from "@lodestar/types"; import {ForkName} from "@lodestar/params"; import {ProtoBlock, ExecutionStatus} from "@lodestar/fork-choice"; import {ChainForkConfig} from "@lodestar/config"; @@ -14,13 +14,14 @@ import {BlockError, BlockErrorCode} from "../errors/index.js"; import {BlockProcessOpts} from "../options.js"; import {RegenCaller} from "../regen/index.js"; import type {BeaconChain} from "../chain.js"; -import {BlockInput, ImportBlockOpts} from "./types.js"; +import {BlockInput, ImportBlockOpts, BlockInputType} from "./types.js"; import {POS_PANDA_MERGE_TRANSITION_BANNER} from "./utils/pandaMergeTransitionBanner.js"; import {CAPELLA_OWL_BANNER} from "./utils/ownBanner.js"; import {DENEB_BLOWFISH_BANNER} from "./utils/blowfishBanner.js"; import {verifyBlocksStateTransitionOnly} from "./verifyBlocksStateTransitionOnly.js"; import {verifyBlocksSignatures} from "./verifyBlocksSignatures.js"; import {verifyBlocksExecutionPayload, SegmentExecStatus} from "./verifyBlocksExecutionPayloads.js"; +import {verifyBlocksDataAvailability} from "./verifyBlocksDataAvailability.js"; import {writeBlockInputToDb} from "./writeBlockInputToDb.js"; /** @@ -38,12 +39,12 @@ export async function verifyBlocksInEpoch( this: BeaconChain, parentBlock: ProtoBlock, blocksInput: BlockInput[], - dataAvailabilityStatuses: DataAvailableStatus[], opts: BlockProcessOpts & ImportBlockOpts ): Promise<{ postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]; segmentExecStatus: SegmentExecStatus; + dataAvailabilityStatuses: DataAvailableStatus[]; }> { const blocks = blocksInput.map(({block}) => block); if (blocks.length === 0) { @@ -88,7 +89,12 @@ export async function verifyBlocksInEpoch( try { // batch all I/O operations to reduce overhead - const [segmentExecStatus, {postStates, proposerBalanceDeltas}] = await Promise.all([ + const [ + segmentExecStatus, + {dataAvailabilityStatuses, availableTime}, + {postStates, proposerBalanceDeltas, verifyStateTime}, + {verifySignaturesTime}, + ] = await Promise.all([ // Execution payloads opts.skipVerifyExecutionPayload !== true ? verifyBlocksExecutionPayload(this, parentBlock, blocks, preState0, abortController.signal, opts) @@ -98,12 +104,16 @@ export async function verifyBlocksInEpoch( mergeBlockFound: null, } as SegmentExecStatus), + // data availability for the blobs + verifyBlocksDataAvailability(this, blocksInput, opts), + // Run state transition only // TODO: Ensure it yields to allow flushing to workers and engine API verifyBlocksStateTransitionOnly( preState0, blocksInput, - dataAvailabilityStatuses, + // hack availability for state transition eval as availability is separately determined + blocks.map(() => DataAvailableStatus.available), this.logger, this.metrics, abortController.signal, @@ -113,7 +123,7 @@ export async function verifyBlocksInEpoch( // All signatures at once opts.skipVerifyBlockSignatures !== true ? verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts) - : Promise.resolve(), + : Promise.resolve({verifySignaturesTime: Date.now()}), // ideally we want to only persist blocks after verifying them however the reality is there are // rarely invalid blocks we'll batch all I/O operation here to reduce the overhead if there's @@ -151,7 +161,35 @@ export async function verifyBlocksInEpoch( } } - return {postStates, proposerBalanceDeltas, segmentExecStatus}; + if (segmentExecStatus.execAborted === null) { + const {executionStatuses, executionTime} = segmentExecStatus; + if ( + blocksInput.length === 1 && + // gossip blocks have seenTimestampSec + opts.seenTimestampSec !== undefined && + blocksInput[0].type !== BlockInputType.preDeneb && + executionStatuses[0] === ExecutionStatus.Valid + ) { + // Find the max time when the block was actually verified + const fullyVerifiedTime = Math.max(executionTime, verifyStateTime, verifySignaturesTime); + const recvTofullyVerifedTime = fullyVerifiedTime / 1000 - opts.seenTimestampSec; + this.metrics?.gossipBlock.receivedToFullyVerifiedTime.observe(recvTofullyVerifedTime); + + const verifiedToBlobsAvailabiltyTime = Math.max(availableTime - fullyVerifiedTime, 0) / 1000; + const numBlobs = (blocksInput[0].block as deneb.SignedBeaconBlock).message.body.blobKzgCommitments.length; + + this.metrics?.gossipBlock.verifiedToBlobsAvailabiltyTime.observe({numBlobs}, verifiedToBlobsAvailabiltyTime); + this.logger.verbose("Verified blockInput fully with blobs availability", { + slot: blocksInput[0].block.message.slot, + recvTofullyVerifedTime, + verifiedToBlobsAvailabiltyTime, + type: blocksInput[0].type, + numBlobs, + }); + } + } + + return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus}; } finally { abortController.abort(); } diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts new file mode 100644 index 000000000000..9c45469d56dd --- /dev/null +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts @@ -0,0 +1,126 @@ +import {computeTimeAtSlot, DataAvailableStatus} from "@lodestar/state-transition"; +import {ChainForkConfig} from "@lodestar/config"; +import {deneb, UintNum64} from "@lodestar/types"; +import {Logger} from "@lodestar/utils"; +import {BlockError, BlockErrorCode} from "../errors/index.js"; +import {validateBlobSidecars} from "../validation/blobSidecar.js"; +import {Metrics} from "../../metrics/metrics.js"; +import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation} from "./types.js"; + +// proposer boost is not available post 3 sec so try pulling using unknown block hash +// post 3 sec after throwing the availability error +const BLOB_AVAILABILITY_TIMEOUT = 3_000; + +/** + * Verifies some early cheap sanity checks on the block before running the full state transition. + * + * - Parent is known to the fork-choice + * - Check skipped slots limit + * - check_block_relevancy() + * - Block not in the future + * - Not genesis block + * - Block's slot is < Infinity + * - Not finalized slot + * - Not already known + */ +export async function verifyBlocksDataAvailability( + chain: {config: ChainForkConfig; genesisTime: UintNum64; logger: Logger; metrics: Metrics | null}, + blocks: BlockInput[], + opts: ImportBlockOpts +): Promise<{dataAvailabilityStatuses: DataAvailableStatus[]; availableTime: number}> { + if (blocks.length === 0) { + throw Error("Empty partiallyVerifiedBlocks"); + } + + const dataAvailabilityStatuses: DataAvailableStatus[] = []; + const seenTime = opts.seenTimestampSec !== undefined ? opts.seenTimestampSec * 1000 : Date.now(); + + for (const blockInput of blocks) { + // Validate status of only not yet finalized blocks, we don't need yet to propogate the status + // as it is not used upstream anywhere + const dataAvailabilityStatus = await maybeValidateBlobs(chain, blockInput, opts); + dataAvailabilityStatuses.push(dataAvailabilityStatus); + } + + const availableTime = blocks[blocks.length - 1].type === BlockInputType.blobsPromise ? Date.now() : seenTime; + if (blocks.length === 1 && opts.seenTimestampSec !== undefined && blocks[0].type !== BlockInputType.preDeneb) { + const recvToAvailableTime = availableTime / 1000 - opts.seenTimestampSec; + const numBlobs = (blocks[0].block as deneb.SignedBeaconBlock).message.body.blobKzgCommitments.length; + + chain.metrics?.gossipBlock.receivedToBlobsAvailabilityTime.observe({numBlobs}, recvToAvailableTime); + chain.logger.verbose("Verified blobs availability", { + slot: blocks[0].block.message.slot, + recvToAvailableTime, + type: blocks[0].type, + }); + } + + return {dataAvailabilityStatuses, availableTime}; +} + +async function maybeValidateBlobs( + chain: {config: ChainForkConfig; genesisTime: UintNum64}, + blockInput: BlockInput, + opts: ImportBlockOpts +): Promise { + switch (blockInput.type) { + case BlockInputType.preDeneb: + return DataAvailableStatus.preDeneb; + + case BlockInputType.postDeneb: + if (opts.validBlobSidecars === BlobSidecarValidation.Full) { + return DataAvailableStatus.available; + } + + // eslint-disable-next-line no-fallthrough + case BlockInputType.blobsPromise: { + // run full validation + const {block} = blockInput; + const blockSlot = block.message.slot; + + const blobsData = + blockInput.type === BlockInputType.postDeneb + ? blockInput + : await raceWithCutoff(chain, blockInput, blockInput.availabilityPromise); + const {blobs} = blobsData; + + const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body; + const beaconBlockRoot = chain.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); + + // if the blob siddecars have been individually verified then we can skip kzg proof check + // but other checks to match blobs with block data still need to be performed + const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; + validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); + + return DataAvailableStatus.available; + } + } +} + +/** + * Wait for blobs to become available with a cutoff time. If fails then throw DATA_UNAVAILABLE error + * which may try unknownblock/blobs fill (by root). + */ +async function raceWithCutoff( + chain: {config: ChainForkConfig; genesisTime: UintNum64}, + blockInput: BlockInput, + availabilityPromise: Promise +): Promise { + const {block} = blockInput; + const blockSlot = block.message.slot; + + const cutoffTime = Math.max( + computeTimeAtSlot(chain.config, blockSlot, chain.genesisTime) * 1000 + BLOB_AVAILABILITY_TIMEOUT - Date.now(), + 0 + ); + const cutoffTimeout = new Promise((_resolve, reject) => setTimeout(reject, cutoffTime)); + + try { + await Promise.race([availabilityPromise, cutoffTimeout]); + } catch (e) { + // throw unavailable so that the unknownblock/blobs can be triggered to pull the block + throw new BlockError(block, {code: BlockErrorCode.DATA_UNAVAILABLE}); + } + // we can only be here if availabilityPromise has resolved else an error will be thrown + return availabilityPromise; +} diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts index 7f4edd14c618..5dbe104c9541 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts @@ -5,7 +5,6 @@ import { isExecutionBlockBodyType, isMergeTransitionBlock as isMergeTransitionBlockFn, isExecutionEnabled, - kzgCommitmentToVersionedHash, } from "@lodestar/state-transition"; import {bellatrix, allForks, Slot, deneb} from "@lodestar/types"; import { @@ -24,6 +23,7 @@ import {ForkSeq, SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params"; import {IExecutionEngine} from "../../execution/engine/interface.js"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {IClock} from "../../util/clock.js"; +import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {BlockProcessOpts} from "../options.js"; import {ExecutionPayloadStatus} from "../../execution/engine/interface.js"; import {IEth1ForBlockProduction} from "../../eth1/index.js"; @@ -45,6 +45,7 @@ export type SegmentExecStatus = | { execAborted: null; executionStatuses: MaybeValidExecutionStatus[]; + executionTime: number; mergeBlockFound: bellatrix.BeaconBlock | null; } | {execAborted: ExecAbortType; invalidSegmentLVH?: LVHInvalidResponse; mergeBlockFound: null}; @@ -243,8 +244,9 @@ export async function verifyBlocksExecutionPayload( } } - if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { - const recvToVerifiedExecPayload = Date.now() / 1000 - opts.seenTimestampSec; + const executionTime = Date.now(); + if (blocks.length === 1 && opts.seenTimestampSec !== undefined && executionStatuses[0] === ExecutionStatus.Valid) { + const recvToVerifiedExecPayload = executionTime / 1000 - opts.seenTimestampSec; chain.metrics?.gossipBlock.receivedToExecutionPayloadVerification.observe(recvToVerifiedExecPayload); chain.logger.verbose("Verified execution payload", { slot: blocks[0].message.slot, @@ -255,6 +257,7 @@ export async function verifyBlocksExecutionPayload( return { execAborted: null, executionStatuses, + executionTime, mergeBlockFound, }; } diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts index 9fb7d04f1ed8..e62355a4889d 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts @@ -1,12 +1,11 @@ -import {computeStartSlotAtEpoch, DataAvailableStatus} from "@lodestar/state-transition"; +import {computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; -import {Slot, deneb} from "@lodestar/types"; +import {Slot} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; import {IClock} from "../../util/clock.js"; import {BlockError, BlockErrorCode} from "../errors/index.js"; -import {validateBlobSidecars} from "../validation/blobSidecar.js"; -import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation} from "./types.js"; +import {BlockInput, ImportBlockOpts} from "./types.js"; /** * Verifies some early cheap sanity checks on the block before running the full state transition. @@ -26,7 +25,6 @@ export function verifyBlocksSanityChecks( opts: ImportBlockOpts ): { relevantBlocks: BlockInput[]; - dataAvailabilityStatuses: DataAvailableStatus[]; parentSlots: Slot[]; parentBlock: ProtoBlock | null; } { @@ -35,7 +33,6 @@ export function verifyBlocksSanityChecks( } const relevantBlocks: BlockInput[] = []; - const dataAvailabilityStatuses: DataAvailableStatus[] = []; const parentSlots: Slot[] = []; let parentBlock: ProtoBlock | null = null; @@ -64,10 +61,6 @@ export function verifyBlocksSanityChecks( } } - // Validate status of only not yet finalized blocks, we don't need yet to propogate the status - // as it is not used upstream anywhere - const dataAvailabilityStatus = maybeValidateBlobs(chain.config, blockInput, opts); - let parentBlockSlot: Slot; if (relevantBlocks.length > 0) { @@ -105,7 +98,6 @@ export function verifyBlocksSanityChecks( // Block is relevant relevantBlocks.push(blockInput); - dataAvailabilityStatuses.push(dataAvailabilityStatus); parentSlots.push(parentBlockSlot); } @@ -115,35 +107,5 @@ export function verifyBlocksSanityChecks( throw Error(`Internal error, parentBlock should not be null for relevantBlocks=${relevantBlocks.length}`); } - return {relevantBlocks, dataAvailabilityStatuses, parentSlots, parentBlock}; -} - -function maybeValidateBlobs( - config: ChainForkConfig, - blockInput: BlockInput, - opts: ImportBlockOpts -): DataAvailableStatus { - switch (blockInput.type) { - case BlockInputType.postDeneb: { - if (opts.validBlobSidecars === BlobSidecarValidation.Full) { - return DataAvailableStatus.available; - } - - // run full validation - const {block, blobs} = blockInput; - const blockSlot = block.message.slot; - const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body; - const beaconBlockRoot = config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); - - // if the blob siddecars have been individually verified then we can skip kzg proof check - // but other checks to match blobs with block data still need to be performed - const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; - validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); - - return DataAvailableStatus.available; - } - - case BlockInputType.preDeneb: - return DataAvailableStatus.preDeneb; - } + return {relevantBlocks, parentSlots, parentBlock}; } diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts index fbbef969b696..14ad46a35c1e 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts @@ -20,7 +20,7 @@ export async function verifyBlocksSignatures( preState0: CachedBeaconStateAllForks, blocks: allForks.SignedBeaconBlock[], opts: ImportBlockOpts -): Promise { +): Promise<{verifySignaturesTime: number}> { const isValidPromises: Promise[] = []; // Verifies signatures after running state transition, so all SyncCommittee signed roots are known at this point. @@ -46,17 +46,20 @@ export async function verifyBlocksSignatures( } } - if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { - const recvToSigVer = Date.now() / 1000 - opts.seenTimestampSec; - metrics?.gossipBlock.receivedToSignaturesVerification.observe(recvToSigVer); - logger.verbose("Verified block signatures", {slot: blocks[0].message.slot, recvToSigVer}); - } - // `rejectFirstInvalidResolveAllValid()` returns on isValid result with its index const res = await rejectFirstInvalidResolveAllValid(isValidPromises); if (!res.allValid) { throw new BlockError(blocks[res.index], {code: BlockErrorCode.INVALID_SIGNATURE, state: preState0}); } + + const verifySignaturesTime = Date.now(); + if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { + const recvToSigVer = verifySignaturesTime / 1000 - opts.seenTimestampSec; + metrics?.gossipBlock.receivedToSignaturesVerification.observe(recvToSigVer); + logger.verbose("Verified block signatures", {slot: blocks[0].message.slot, recvToSigVer}); + } + + return {verifySignaturesTime}; } type AllValidRes = {allValid: true} | {allValid: false; index: number}; diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts index 709ad0c02b27..7d15d4e4f6ce 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts @@ -3,6 +3,7 @@ import { stateTransition, ExecutionPayloadStatus, DataAvailableStatus, + StateHashTreeRootSource, } from "@lodestar/state-transition"; import {ErrorAborted, Logger, sleep} from "@lodestar/utils"; import {Metrics} from "../../metrics/index.js"; @@ -27,7 +28,7 @@ export async function verifyBlocksStateTransitionOnly( metrics: Metrics | null, signal: AbortSignal, opts: BlockProcessOpts & ImportBlockOpts -): Promise<{postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]}> { +): Promise<{postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]; verifyStateTime: number}> { const postStates: CachedBeaconStateAllForks[] = []; const proposerBalanceDeltas: number[] = []; @@ -57,7 +58,9 @@ export async function verifyBlocksStateTransitionOnly( metrics ); - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({source: "block_transition"}); + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.blockTransition, + }); const stateRoot = postState.hashTreeRoot(); hashTreeRootTimer?.(); @@ -90,12 +93,13 @@ export async function verifyBlocksStateTransitionOnly( } } + const verifyStateTime = Date.now(); if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { const slot = blocks[0].block.message.slot; - const recvToTransition = Date.now() / 1000 - opts.seenTimestampSec; + const recvToTransition = verifyStateTime / 1000 - opts.seenTimestampSec; metrics?.gossipBlock.receivedToStateTransition.observe(recvToTransition); - logger.verbose("Transitioned gossip block", {slot, recvToTransition}); + logger.verbose("Verified block state transition", {slot, recvToTransition}); } - return {postStates, proposerBalanceDeltas}; + return {postStates, proposerBalanceDeltas, verifyStateTime}; } diff --git a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts index 0603ed7e7f7e..0b94d32b84ec 100644 --- a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts +++ b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts @@ -13,7 +13,7 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI const fnPromises: Promise[] = []; for (const blockInput of blocksInput) { - const {block, blockBytes, type} = blockInput; + const {block, blockBytes} = blockInput; const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message); const blockRootHex = toHex(blockRoot); if (blockBytes) { @@ -29,8 +29,13 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI root: blockRootHex, }); - if (type === BlockInputType.postDeneb) { - const {blobs: blobSidecars} = blockInput; + if (blockInput.type === BlockInputType.postDeneb || blockInput.type === BlockInputType.blobsPromise) { + const blobSidecars = + blockInput.type == BlockInputType.postDeneb + ? blockInput.blobs + : // At this point of import blobs are available and can be safely awaited + (await blockInput.availabilityPromise).blobs; + // NOTE: Old blobs are pruned on archive fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars})); this.logger.debug("Persisted blobSidecars to hot DB", { diff --git a/packages/beacon-node/src/chain/bls/index.ts b/packages/beacon-node/src/chain/bls/index.ts index 3ee72ac66cbd..f9898b13776b 100644 --- a/packages/beacon-node/src/chain/bls/index.ts +++ b/packages/beacon-node/src/chain/bls/index.ts @@ -1,4 +1,4 @@ export type {IBlsVerifier} from "./interface.js"; -export type {BlsMultiThreadWorkerPoolModules} from "./multithread/index.js"; +export type {BlsMultiThreadWorkerPoolModules, JobQueueItemType} from "./multithread/index.js"; export {BlsMultiThreadWorkerPool} from "./multithread/index.js"; export {BlsSingleThreadVerifier} from "./singleThread.js"; diff --git a/packages/beacon-node/src/chain/bls/multithread/index.ts b/packages/beacon-node/src/chain/bls/multithread/index.ts index 9b0006566253..235ec1536be7 100644 --- a/packages/beacon-node/src/chain/bls/multithread/index.ts +++ b/packages/beacon-node/src/chain/bls/multithread/index.ts @@ -41,6 +41,8 @@ export type BlsMultiThreadWorkerPoolOptions = { blsVerifyAllMultiThread?: boolean; }; +export type {JobQueueItemType}; + // 1 worker for the main thread const blsPoolSize = Math.max(defaultPoolSize - 1, 1); diff --git a/packages/beacon-node/src/chain/bls/multithread/jobItem.ts b/packages/beacon-node/src/chain/bls/multithread/jobItem.ts index 4ae05cdab913..8b5c63df2eeb 100644 --- a/packages/beacon-node/src/chain/bls/multithread/jobItem.ts +++ b/packages/beacon-node/src/chain/bls/multithread/jobItem.ts @@ -56,7 +56,7 @@ export function jobItemWorkReq(job: JobQueueItem, format: PointFormat, metrics: opts: job.opts, sets: job.sets.map((set) => ({ // this can throw, handled in the consumer code - publicKey: getAggregatedPubkey(set).toBytes(format), + publicKey: getAggregatedPubkey(set, metrics).toBytes(format), signature: set.signature, message: set.signingRoot, })), diff --git a/packages/beacon-node/src/chain/bls/utils.ts b/packages/beacon-node/src/chain/bls/utils.ts index 0b1010de27f6..4a3a027f31ac 100644 --- a/packages/beacon-node/src/chain/bls/utils.ts +++ b/packages/beacon-node/src/chain/bls/utils.ts @@ -1,14 +1,19 @@ import type {PublicKey} from "@chainsafe/bls/types"; import bls from "@chainsafe/bls"; import {ISignatureSet, SignatureSetType} from "@lodestar/state-transition"; +import {Metrics} from "../../metrics/metrics.js"; -export function getAggregatedPubkey(signatureSet: ISignatureSet): PublicKey { +export function getAggregatedPubkey(signatureSet: ISignatureSet, metrics: Metrics | null = null): PublicKey { switch (signatureSet.type) { case SignatureSetType.single: return signatureSet.pubkey; - case SignatureSetType.aggregate: - return bls.PublicKey.aggregate(signatureSet.pubkeys); + case SignatureSetType.aggregate: { + const timer = metrics?.blsThreadPool.pubkeysAggregationMainThreadDuration.startTimer(); + const pubkeys = bls.PublicKey.aggregate(signatureSet.pubkeys); + timer?.(); + return pubkeys; + } default: throw Error("Unknown signature set type"); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 45cda3d94bc9..520b20b820fc 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -44,7 +44,7 @@ import {ensureDir, writeIfNotExist} from "../util/file.js"; import {isOptimisticBlock} from "../util/forkChoice.js"; import {BlockProcessor, ImportBlockOpts} from "./blocks/index.js"; import {ChainEventEmitter, ChainEvent} from "./emitter.js"; -import {IBeaconChain, ProposerPreparationData, BlockHash, StateGetOpts} from "./interface.js"; +import {IBeaconChain, ProposerPreparationData, BlockHash, StateGetOpts, CommonBlockBody} from "./interface.js"; import {IChainOptions} from "./options.js"; import {QueuedStateRegenerator, RegenCaller} from "./regen/index.js"; import {initializeForkChoice} from "./forkChoice/index.js"; @@ -73,12 +73,13 @@ import {SeenBlockAttesters} from "./seenCache/seenBlockAttesters.js"; import {BeaconProposerCache} from "./beaconProposerCache.js"; import {CheckpointBalancesCache} from "./balancesCache.js"; import {AssembledBlockType, BlobsResultType, BlockType} from "./produceBlock/index.js"; -import {BlockAttributes, produceBlockBody} from "./produceBlock/produceBlockBody.js"; +import {BlockAttributes, produceBlockBody, produceCommonBlockBody} from "./produceBlock/produceBlockBody.js"; import {computeNewStateRoot} from "./produceBlock/computeNewStateRoot.js"; import {BlockInput} from "./blocks/types.js"; import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js"; import {ShufflingCache} from "./shufflingCache.js"; import {StateContextCache} from "./stateCache/stateContextCache.js"; +import {SeenGossipBlockInput} from "./seenCache/index.js"; import {CheckpointStateCache} from "./stateCache/stateContextCheckpointsCache.js"; /** @@ -87,7 +88,6 @@ import {CheckpointStateCache} from "./stateCache/stateContextCheckpointsCache.js * allow some margin if the node overloads. */ const DEFAULT_MAX_CACHED_PRODUCED_ROOTS = 4; -const DEFAULT_MAX_CACHED_BLOB_SIDECARS = 4; export class BeaconChain implements IBeaconChain { readonly genesisTime: UintNum64; @@ -125,6 +125,7 @@ export class BeaconChain implements IBeaconChain { readonly seenSyncCommitteeMessages = new SeenSyncCommitteeMessages(); readonly seenContributionAndProof: SeenContributionAndProof; readonly seenAttestationDatas: SeenAttestationDatas; + readonly seenGossipBlockInput = new SeenGossipBlockInput(); // Seen cache for liveness checks readonly seenBlockAttesters = new SeenBlockAttesters(); @@ -136,8 +137,7 @@ export class BeaconChain implements IBeaconChain { readonly checkpointBalancesCache: CheckpointBalancesCache; readonly shufflingCache: ShufflingCache; /** Map keyed by executionPayload.blockHash of the block for those blobs */ - readonly producedBlobSidecarsCache = new Map(); - readonly producedBlindedBlobSidecarsCache = new Map(); + readonly producedContentsCache = new Map(); // Cache payload from the local execution so that produceBlindedBlock or produceBlockV3 and // send and get signed/published blinded versions which beacon can assemble into full before @@ -463,29 +463,60 @@ export class BeaconChain implements IBeaconChain { return {block: data, executionOptimistic: isOptimisticBlock(block)}; } // If block is not found in hot db, try cold db since there could be an archive cycle happening - // TODO: Add a lock to the archiver to have determinstic behaviour on where are blocks + // TODO: Add a lock to the archiver to have deterministic behavior on where are blocks } const data = await this.db.blockArchive.getByRoot(fromHexString(root)); return data && {block: data, executionOptimistic: false}; } - produceBlock( - blockAttributes: BlockAttributes - ): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}> { + async produceCommonBlockBody(blockAttributes: BlockAttributes): Promise { + const {slot} = blockAttributes; + const head = this.forkChoice.getHead(); + const state = await this.regen.getBlockSlotState( + head.blockRoot, + slot, + {dontTransferCache: true}, + RegenCaller.produceBlock + ); + const parentBlockRoot = fromHexString(head.blockRoot); + + // TODO: To avoid breaking changes for metric define this attribute + const blockType = BlockType.Full; + + return produceCommonBlockBody.call(this, blockType, state, { + ...blockAttributes, + parentBlockRoot, + parentSlot: slot - 1, + }); + } + + produceBlock(blockAttributes: BlockAttributes & {commonBlockBody?: CommonBlockBody}): Promise<{ + block: allForks.BeaconBlock; + executionPayloadValue: Wei; + consensusBlockValue: Gwei; + shouldOverrideBuilder?: boolean; + }> { return this.produceBlockWrapper(BlockType.Full, blockAttributes); } - produceBlindedBlock( - blockAttributes: BlockAttributes - ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}> { + produceBlindedBlock(blockAttributes: BlockAttributes & {commonBlockBody?: CommonBlockBody}): Promise<{ + block: allForks.BlindedBeaconBlock; + executionPayloadValue: Wei; + consensusBlockValue: Gwei; + }> { return this.produceBlockWrapper(BlockType.Blinded, blockAttributes); } async produceBlockWrapper( blockType: T, - {randaoReveal, graffiti, slot, feeRecipient}: BlockAttributes - ): Promise<{block: AssembledBlockType; executionPayloadValue: Wei; consensusBlockValue: Gwei}> { + {randaoReveal, graffiti, slot, feeRecipient, commonBlockBody}: BlockAttributes & {commonBlockBody?: CommonBlockBody} + ): Promise<{ + block: AssembledBlockType; + executionPayloadValue: Wei; + consensusBlockValue: Gwei; + shouldOverrideBuilder?: boolean; + }> { const head = this.forkChoice.getHead(); const state = await this.regen.getBlockSlotState( head.blockRoot, @@ -497,16 +528,22 @@ export class BeaconChain implements IBeaconChain { const proposerIndex = state.epochCtx.getBeaconProposer(slot); const proposerPubKey = state.epochCtx.index2pubkey[proposerIndex].toBytes(); - const {body, blobs, executionPayloadValue} = await produceBlockBody.call(this, blockType, state, { - randaoReveal, - graffiti, - slot, - feeRecipient, - parentSlot: slot - 1, - parentBlockRoot, - proposerIndex, - proposerPubKey, - }); + const {body, blobs, executionPayloadValue, shouldOverrideBuilder} = await produceBlockBody.call( + this, + blockType, + state, + { + randaoReveal, + graffiti, + slot, + feeRecipient, + parentSlot: slot - 1, + parentBlockRoot, + proposerIndex, + proposerPubKey, + commonBlockBody, + } + ); // The hashtree root computed here for debug log will get cached and hence won't introduce additional delays const bodyRoot = @@ -552,35 +589,12 @@ export class BeaconChain implements IBeaconChain { // publishing the blinded block's full version if (blobs.type === BlobsResultType.produced) { // body is of full type here - const blockHash = blobs.blockHash; - const blobSidecars = blobs.blobSidecars.map((blobSidecar) => ({ - ...blobSidecar, - blockRoot, - slot, - blockParentRoot: parentBlockRoot, - proposerIndex, - })); - - this.producedBlobSidecarsCache.set(blockHash, blobSidecars); - this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size); - } else if (blobs.type === BlobsResultType.blinded) { - // body is of blinded type here - const blockHash = blobs.blockHash; - const blindedBlobSidecars = blobs.blobSidecars.map((blindedBlobSidecar) => ({ - ...blindedBlobSidecar, - blockRoot, - slot, - blockParentRoot: parentBlockRoot, - proposerIndex, - })); - - this.producedBlindedBlobSidecarsCache.set(blockHash, blindedBlobSidecars); - this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set( - this.producedBlindedBlobSidecarsCache.size - ); + const {blockHash, contents} = blobs; + this.producedContentsCache.set(blockHash, contents); + this.metrics?.blockProductionCaches.producedContentsCache.set(this.producedContentsCache.size); } - return {block, executionPayloadValue, consensusBlockValue: proposerReward}; + return {block, executionPayloadValue, consensusBlockValue: proposerReward, shouldOverrideBuilder}; } /** @@ -593,14 +607,14 @@ export class BeaconChain implements IBeaconChain { * kzg_aggregated_proof=compute_proof_from_blobs(blobs), * ) */ - getBlobSidecars(beaconBlock: deneb.BeaconBlock): deneb.BlobSidecars { + getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents { const blockHash = toHex(beaconBlock.body.executionPayload.blockHash); - const blobSidecars = this.producedBlobSidecarsCache.get(blockHash); - if (!blobSidecars) { - throw Error(`No blobSidecars for executionPayload.blockHash ${blockHash}`); + const contents = this.producedContentsCache.get(blockHash); + if (!contents) { + throw Error(`No contents for executionPayload.blockHash ${blockHash}`); } - return blobSidecars; + return contents; } async processBlock(block: BlockInput, opts?: ImportBlockOpts): Promise { @@ -882,19 +896,8 @@ export class BeaconChain implements IBeaconChain { this.metrics?.blockProductionCaches.producedBlindedBlockRoot.set(this.producedBlindedBlockRoot.size); if (this.config.getForkSeq(slot) >= ForkSeq.deneb) { - pruneSetToMax( - this.producedBlobSidecarsCache, - this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS - ); - this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size); - - pruneSetToMax( - this.producedBlindedBlobSidecarsCache, - this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS - ); - this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set( - this.producedBlindedBlobSidecarsCache.size - ); + pruneSetToMax(this.producedContentsCache, this.opts.maxCachedProducedRoots ?? DEFAULT_MAX_CACHED_PRODUCED_ROOTS); + this.metrics?.blockProductionCaches.producedContentsCache.set(this.producedContentsCache.size); } const metrics = this.metrics; @@ -938,15 +941,20 @@ export class BeaconChain implements IBeaconChain { this.logger.verbose("Fork choice justified", {epoch: cp.epoch, root: cp.rootHex}); } - private onForkChoiceFinalized(this: BeaconChain, cp: CheckpointWithHex): void { + private async onForkChoiceFinalized(this: BeaconChain, cp: CheckpointWithHex): Promise { this.logger.verbose("Fork choice finalized", {epoch: cp.epoch, root: cp.rootHex}); this.seenBlockProposers.prune(computeStartSlotAtEpoch(cp.epoch)); // TODO: Improve using regen here - const headState = this.regen.getStateSync(this.forkChoice.getHead().stateRoot); - const finalizedState = this.regen.getCheckpointStateSync(cp); + const {blockRoot, stateRoot, slot} = this.forkChoice.getHead(); + const headState = this.regen.getStateSync(stateRoot); + const headBlock = await this.db.block.get(fromHexString(blockRoot)); + if (headBlock == null) { + throw Error(`Head block ${slot} ${headBlock} is not available in database`); + } + if (headState) { - this.opPool.pruneAll(headState, finalizedState); + this.opPool.pruneAll(headBlock, headState); } } diff --git a/packages/beacon-node/src/chain/errors/blobSidecarError.ts b/packages/beacon-node/src/chain/errors/blobSidecarError.ts index e242cbcb11ba..f38aa883002c 100644 --- a/packages/beacon-node/src/chain/errors/blobSidecarError.ts +++ b/packages/beacon-node/src/chain/errors/blobSidecarError.ts @@ -21,6 +21,7 @@ export enum BlobSidecarErrorCode { PARENT_UNKNOWN = "BLOB_SIDECAR_ERROR_PARENT_UNKNOWN", NOT_LATER_THAN_PARENT = "BLOB_SIDECAR_ERROR_NOT_LATER_THAN_PARENT", PROPOSAL_SIGNATURE_INVALID = "BLOB_SIDECAR_ERROR_PROPOSAL_SIGNATURE_INVALID", + INCLUSION_PROOF_INVALID = "BLOB_SIDECAR_ERROR_INCLUSION_PROOF_INVALID", INCORRECT_PROPOSER = "BLOB_SIDECAR_ERROR_INCORRECT_PROPOSER", } @@ -37,6 +38,7 @@ export type BlobSidecarErrorType = | {code: BlobSidecarErrorCode.PARENT_UNKNOWN; parentRoot: RootHex} | {code: BlobSidecarErrorCode.NOT_LATER_THAN_PARENT; parentSlot: Slot; slot: Slot} | {code: BlobSidecarErrorCode.PROPOSAL_SIGNATURE_INVALID} + | {code: BlobSidecarErrorCode.INCLUSION_PROOF_INVALID; slot: Slot; blobIdx: number} | {code: BlobSidecarErrorCode.INCORRECT_PROPOSER; proposerIndex: ValidatorIndex}; export class BlobSidecarGossipError extends GossipActionError {} diff --git a/packages/beacon-node/src/chain/errors/blockError.ts b/packages/beacon-node/src/chain/errors/blockError.ts index ee06927a4fc1..6ab15275934e 100644 --- a/packages/beacon-node/src/chain/errors/blockError.ts +++ b/packages/beacon-node/src/chain/errors/blockError.ts @@ -63,6 +63,8 @@ export enum BlockErrorCode { /** The attestation head block is too far behind the attestation slot, causing many skip slots. This is deemed a DoS risk */ TOO_MANY_SKIPPED_SLOTS = "TOO_MANY_SKIPPED_SLOTS", + /** The blobs are unavailable */ + DATA_UNAVAILABLE = "BLOCK_ERROR_DATA_UNAVAILABLE", } type ExecutionErrorStatus = Exclude< @@ -103,7 +105,8 @@ export type BlockErrorType = | {code: BlockErrorCode.TOO_MUCH_GAS_USED; gasUsed: number; gasLimit: number} | {code: BlockErrorCode.SAME_PARENT_HASH; blockHash: RootHex} | {code: BlockErrorCode.TRANSACTIONS_TOO_BIG; size: number; max: number} - | {code: BlockErrorCode.EXECUTION_ENGINE_ERROR; execStatus: ExecutionErrorStatus; errorMessage: string}; + | {code: BlockErrorCode.EXECUTION_ENGINE_ERROR; execStatus: ExecutionErrorStatus; errorMessage: string} + | {code: BlockErrorCode.DATA_UNAVAILABLE}; export class BlockGossipError extends GossipActionError {} diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 62355e334f61..6e932b25c50e 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -11,6 +11,8 @@ import { deneb, Wei, Gwei, + capella, + altair, } from "@lodestar/types"; import { BeaconStateAllForks, @@ -49,6 +51,7 @@ import {CheckpointBalancesCache} from "./balancesCache.js"; import {IChainOptions} from "./options.js"; import {AssembledBlockType, BlockAttributes, BlockType} from "./produceBlock/produceBlockBody.js"; import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js"; +import {SeenGossipBlockInput} from "./seenCache/index.js"; import {ShufflingCache} from "./shufflingCache.js"; export {BlockType, type AssembledBlockType}; @@ -102,14 +105,14 @@ export interface IBeaconChain { readonly seenSyncCommitteeMessages: SeenSyncCommitteeMessages; readonly seenContributionAndProof: SeenContributionAndProof; readonly seenAttestationDatas: SeenAttestationDatas; + readonly seenGossipBlockInput: SeenGossipBlockInput; // Seen cache for liveness checks readonly seenBlockAttesters: SeenBlockAttesters; readonly beaconProposerCache: BeaconProposerCache; readonly checkpointBalancesCache: CheckpointBalancesCache; - readonly producedBlobSidecarsCache: Map; + readonly producedContentsCache: Map; readonly producedBlockRoot: Map; - readonly producedBlindedBlobSidecarsCache: Map; readonly shufflingCache: ShufflingCache; readonly producedBlindedBlockRoot: Set; readonly opts: IChainOptions; @@ -151,14 +154,20 @@ export interface IBeaconChain { */ getBlockByRoot(root: RootHex): Promise<{block: allForks.SignedBeaconBlock; executionOptimistic: boolean} | null>; - getBlobSidecars(beaconBlock: deneb.BeaconBlock): deneb.BlobSidecars; - - produceBlock( - blockAttributes: BlockAttributes - ): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}>; - produceBlindedBlock( - blockAttributes: BlockAttributes - ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}>; + getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents; + + produceCommonBlockBody(blockAttributes: BlockAttributes): Promise; + produceBlock(blockAttributes: BlockAttributes & {commonBlockBody?: CommonBlockBody}): Promise<{ + block: allForks.BeaconBlock; + executionPayloadValue: Wei; + consensusBlockValue: Gwei; + shouldOverrideBuilder?: boolean; + }>; + produceBlindedBlock(blockAttributes: BlockAttributes & {commonBlockBody?: CommonBlockBody}): Promise<{ + block: allForks.BlindedBeaconBlock; + executionPayloadValue: Wei; + consensusBlockValue: Gwei; + }>; /** Process a block until complete */ processBlock(block: BlockInput, opts?: ImportBlockOpts): Promise; @@ -198,3 +207,7 @@ export type SSZObjectType = | "signedAggregatedAndProof" | "syncCommittee" | "contributionAndProof"; + +export type CommonBlockBody = phase0.BeaconBlockBody & + Pick & + Pick; diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts index cee8d0614c30..1fdee886ff1d 100644 --- a/packages/beacon-node/src/chain/opPools/opPool.ts +++ b/packages/beacon-node/src/chain/opPools/opPool.ts @@ -13,12 +13,14 @@ import { MAX_BLS_TO_EXECUTION_CHANGES, BLS_WITHDRAWAL_PREFIX, MAX_ATTESTER_SLASHINGS, + ForkSeq, } from "@lodestar/params"; -import {Epoch, phase0, capella, ssz, ValidatorIndex} from "@lodestar/types"; +import {Epoch, phase0, capella, ssz, ValidatorIndex, allForks} from "@lodestar/types"; import {IBeaconDb} from "../../db/index.js"; import {SignedBLSToExecutionChangeVersioned} from "../../util/types.js"; import {BlockType} from "../interface.js"; import {Metrics} from "../../metrics/metrics.js"; +import {BlockProductionStep} from "../produceBlock/produceBlockBody.js"; import {isValidBlsToExecutionChangeForBlockInclusion} from "./utils.js"; type HexRoot = string; @@ -178,7 +180,7 @@ export class OpPool { ] { const {config} = state; const stateEpoch = computeEpochAtSlot(state.slot); - const stateFork = config.getForkName(state.slot); + const stateFork = config.getForkSeq(state.slot); const toBeSlashedIndices = new Set(); const proposerSlashings: phase0.ProposerSlashing[] = []; @@ -201,7 +203,7 @@ export class OpPool { } } endProposerSlashing?.({ - step: "proposerSlashing", + step: BlockProductionStep.proposerSlashing, }); const endAttesterSlashings = stepsMetrics?.startTimer(); @@ -235,7 +237,7 @@ export class OpPool { } } endAttesterSlashings?.({ - step: "attesterSlashings", + step: BlockProductionStep.attesterSlashings, }); const endVoluntaryExits = stepsMetrics?.startTimer(); @@ -247,7 +249,10 @@ export class OpPool { // Signature validation is skipped in `isValidVoluntaryExit(,,false)` since it was already validated in gossip // However we must make sure that the signature fork is the same, or it will become invalid if included through // a future fork. - stateFork === config.getForkName(computeStartSlotAtEpoch(voluntaryExit.message.epoch)) + isVoluntaryExitSignatureIncludable( + stateFork, + config.getForkSeq(computeStartSlotAtEpoch(voluntaryExit.message.epoch)) + ) ) { voluntaryExits.push(voluntaryExit); if (voluntaryExits.length >= MAX_VOLUNTARY_EXITS) { @@ -256,7 +261,7 @@ export class OpPool { } } endVoluntaryExits?.({ - step: "voluntaryExits", + step: BlockProductionStep.voluntaryExits, }); const endBlsToExecutionChanges = stepsMetrics?.startTimer(); @@ -270,7 +275,7 @@ export class OpPool { } } endBlsToExecutionChanges?.({ - step: "blsToExecutionChanges", + step: BlockProductionStep.blsToExecutionChanges, }); return [attesterSlashings, proposerSlashings, voluntaryExits, blsToExecutionChanges]; @@ -299,11 +304,11 @@ export class OpPool { /** * Prune all types of transactions given the latest head state */ - pruneAll(headState: CachedBeaconStateAllForks, finalizedState: CachedBeaconStateAllForks | null): void { + pruneAll(headBlock: allForks.SignedBeaconBlock, headState: CachedBeaconStateAllForks): void { this.pruneAttesterSlashings(headState); this.pruneProposerSlashings(headState); this.pruneVoluntaryExits(headState); - this.pruneBlsToExecutionChanges(headState, finalizedState); + this.pruneBlsToExecutionChanges(headBlock, headState); } /** @@ -368,19 +373,28 @@ export class OpPool { } /** - * Call after finalizing - * Prune blsToExecutionChanges for validators which have been set with withdrawal - * credentials + * Prune BLS to execution changes that have been applied to the state more than 1 block ago. + * In the worse case where head block is reorged, the same BlsToExecutionChange message can be re-added + * to opPool once gossipsub seen cache TTL passes. */ private pruneBlsToExecutionChanges( - headState: CachedBeaconStateAllForks, - finalizedState: CachedBeaconStateAllForks | null + headBlock: allForks.SignedBeaconBlock, + headState: CachedBeaconStateAllForks ): void { + const {config} = headState; + const recentBlsToExecutionChanges = + config.getForkSeq(headBlock.message.slot) >= ForkSeq.capella + ? (headBlock as capella.SignedBeaconBlock).message.body.blsToExecutionChanges + : []; + + const recentBlsToExecutionChangeIndexes = new Set( + recentBlsToExecutionChanges.map((blsToExecutionChange) => blsToExecutionChange.message.validatorIndex) + ); + for (const [key, blsToExecutionChange] of this.blsToExecutionChanges.entries()) { - // TODO CAPELLA: We need the finalizedState to safely prune BlsToExecutionChanges. Finalized state may not be - // available in the cache, so it can be null. Once there's a head only prunning strategy, change - if (finalizedState !== null) { - const validator = finalizedState.validators.getReadonly(blsToExecutionChange.data.message.validatorIndex); + const {validatorIndex} = blsToExecutionChange.data.message; + if (!recentBlsToExecutionChangeIndexes.has(validatorIndex)) { + const validator = headState.validators.getReadonly(validatorIndex); if (validator.withdrawalCredentials[0] !== BLS_WITHDRAWAL_PREFIX) { this.blsToExecutionChanges.delete(key); } @@ -389,6 +403,19 @@ export class OpPool { } } +/** + * Returns true if a pre-validated signature is still valid to be included in a specific block's fork + */ +function isVoluntaryExitSignatureIncludable(stateFork: ForkSeq, voluntaryExitFork: ForkSeq): boolean { + if (stateFork >= ForkSeq.deneb) { + // Exists are perpetually valid https://eips.ethereum.org/EIPS/eip-7044 + return true; + } else { + // Can only include exits from the current and previous fork + return voluntaryExitFork === stateFork || voluntaryExitFork === stateFork - 1; + } +} + function isSlashableAtEpoch(validator: phase0.Validator, epoch: Epoch): boolean { return !validator.slashed && validator.activationEpoch <= epoch && epoch < validator.withdrawableEpoch; } diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index ce8e720cd766..60658b69ca98 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -1,4 +1,9 @@ -import {computeEpochAtSlot, isExecutionStateType, computeTimeAtSlot} from "@lodestar/state-transition"; +import { + computeEpochAtSlot, + isExecutionStateType, + computeTimeAtSlot, + StateHashTreeRootSource, +} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {ForkSeq, SLOTS_PER_EPOCH, ForkExecution} from "@lodestar/params"; import {Slot} from "@lodestar/types"; @@ -92,6 +97,9 @@ export class PrepareNextSlotScheduler { headRoot, isEpochTransition, }); + const precomputeEpochTransitionTimer = isEpochTransition + ? this.metrics?.precomputeNextEpochTransition.duration.startTimer() + : null; // No need to wait for this or the clock drift // Pre Bellatrix: we only do precompute state transition for the last slot of epoch // For Bellatrix, we always do the `processSlots()` to prepare payload for the next slot @@ -106,7 +114,9 @@ export class PrepareNextSlotScheduler { // cache HashObjects for faster hashTreeRoot() later, especially for computeNewStateRoot() if we need to produce a block at slot 0 of epoch // see https://github.com/ChainSafe/lodestar/issues/6194 - const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({source: "prepare_next_slot"}); + const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.prepareNextSlot, + }); prepareState.hashTreeRoot(); hashTreeRootTimer?.(); @@ -126,6 +136,8 @@ export class PrepareNextSlotScheduler { prepareSlot, previousHits, }); + + precomputeEpochTransitionTimer?.(); } if (isExecutionStateType(prepareState)) { diff --git a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts index f5d02dbf9b6f..ccc0595d0db6 100644 --- a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts +++ b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts @@ -2,6 +2,7 @@ import { CachedBeaconStateAllForks, DataAvailableStatus, ExecutionPayloadStatus, + StateHashTreeRootSource, stateTransition, } from "@lodestar/state-transition"; import {allForks, Gwei, Root} from "@lodestar/types"; @@ -44,7 +45,9 @@ export function computeNewStateRoot( const {attestations, syncAggregate, slashing} = postState.proposerRewards; const proposerReward = BigInt(attestations + syncAggregate + slashing); - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({source: "compute_new_state_root"}); + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.computeNewStateRoot, + }); const newStateRoot = postState.hashTreeRoot(); hashTreeRootTimer?.(); diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index 1c522c54a93d..b25b71514a71 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -1,8 +1,6 @@ import { Bytes32, - phase0, allForks, - altair, Root, RootHex, Slot, @@ -35,17 +33,31 @@ import {PayloadId, IExecutionEngine, IExecutionBuilder, PayloadAttributes} from import {ZERO_HASH, ZERO_HASH_HEX} from "../../constants/index.js"; import {IEth1ForBlockProduction} from "../../eth1/index.js"; import {numToQuantity} from "../../eth1/provider/utils.js"; -import { - validateBlobsAndKzgCommitments, - validateBlindedBlobsAndKzgCommitments, -} from "./validateBlobsAndKzgCommitments.js"; +import {CommonBlockBody} from "../interface.js"; +import {validateBlobsAndKzgCommitments} from "./validateBlobsAndKzgCommitments.js"; // Time to provide the EL to generate a payload from new payload id const PAYLOAD_GENERATION_TIME_MS = 500; -enum PayloadPreparationType { + +export enum PayloadPreparationType { Fresh = "Fresh", Cached = "Cached", Reorged = "Reorged", + Blinded = "Blinded", +} + +/** + * Block production steps tracked in metrics + */ +export enum BlockProductionStep { + proposerSlashing = "proposerSlashing", + attesterSlashings = "attesterSlashings", + voluntaryExits = "voluntaryExits", + blsToExecutionChanges = "blsToExecutionChanges", + attestations = "attestations", + eth1DataAndDeposits = "eth1DataAndDeposits", + syncAggregate = "syncAggregate", + executionPayload = "executionPayload", } export type BlockAttributes = { @@ -74,34 +86,42 @@ export enum BlobsResultType { export type BlobsResult = | {type: BlobsResultType.preDeneb} - | {type: BlobsResultType.produced; blobSidecars: deneb.BlobSidecars; blockHash: RootHex} - | {type: BlobsResultType.blinded; blobSidecars: deneb.BlindedBlobSidecars; blockHash: RootHex}; + | {type: BlobsResultType.produced; contents: deneb.Contents; blockHash: RootHex} + | {type: BlobsResultType.blinded}; export async function produceBlockBody( this: BeaconChain, blockType: T, currentState: CachedBeaconStateAllForks, - { - randaoReveal, - graffiti, - slot: blockSlot, - feeRecipient: requestedFeeRecipient, - parentSlot, - parentBlockRoot, - proposerIndex, - proposerPubKey, - }: BlockAttributes & { + blockAttr: BlockAttributes & { parentSlot: Slot; parentBlockRoot: Root; proposerIndex: ValidatorIndex; proposerPubKey: BLSPubkey; + commonBlockBody?: CommonBlockBody; } -): Promise<{body: AssembledBodyType; blobs: BlobsResult; executionPayloadValue: Wei}> { +): Promise<{ + body: AssembledBodyType; + blobs: BlobsResult; + executionPayloadValue: Wei; + shouldOverrideBuilder?: boolean; +}> { + const { + slot: blockSlot, + feeRecipient: requestedFeeRecipient, + parentBlockRoot, + proposerIndex, + proposerPubKey, + commonBlockBody, + } = blockAttr; // Type-safe for blobs variable. Translate 'null' value into 'preDeneb' enum // TODO: Not ideal, but better than just using null. // TODO: Does not guarantee that preDeneb enum goes with a preDeneb block let blobsResult: BlobsResult; let executionPayloadValue: Wei; + // even though shouldOverrideBuilder is relevant for the engine response, for simplicity of typing + // we just return it undefined for the builder which anyway doesn't get consumed downstream + let shouldOverrideBuilder: boolean | undefined; const fork = currentState.config.getForkName(blockSlot); const logMeta: Record = { @@ -110,63 +130,17 @@ export async function produceBlockBody( slot: blockSlot, }; this.logger.verbose("Producing beacon block body", logMeta); - - // TODO: - // Iterate through the naive aggregation pool and ensure all the attestations from there - // are included in the operation pool. - // for (const attestation of db.attestationPool.getAll()) { - // try { - // opPool.insertAttestation(attestation); - // } catch (e) { - // // Don't stop block production if there's an error, just create a log. - // logger.error("Attestation did not transfer to op pool", {}, e); - // } - // } - const stepsMetrics = blockType === BlockType.Full ? this.metrics?.executionBlockProductionTimeSteps : this.metrics?.builderBlockProductionTimeSteps; - const [attesterSlashings, proposerSlashings, voluntaryExits, blsToExecutionChanges] = - this.opPool.getSlashingsAndExits(currentState, blockType, this.metrics); - - const endAttestations = stepsMetrics?.startTimer(); - const attestations = this.aggregatedAttestationPool.getAttestationsForBlock(this.forkChoice, currentState); - endAttestations?.({ - step: "attestations", - }); - - const endEth1DataAndDeposits = stepsMetrics?.startTimer(); - const {eth1Data, deposits} = await this.eth1.getEth1DataAndDeposits(currentState); - endEth1DataAndDeposits?.({ - step: "eth1DataAndDeposits", - }); + const blockBody = commonBlockBody + ? Object.assign({}, commonBlockBody) + : await produceCommonBlockBody.call(this, blockType, currentState, blockAttr); - const blockBody: phase0.BeaconBlockBody = { - randaoReveal, - graffiti, - eth1Data, - proposerSlashings, - attesterSlashings, - attestations, - deposits, - voluntaryExits, - }; - - const blockEpoch = computeEpochAtSlot(blockSlot); - - const endSyncAggregate = stepsMetrics?.startTimer(); - if (blockEpoch >= this.config.ALTAIR_FORK_EPOCH) { - const syncAggregate = this.syncContributionAndProofPool.getAggregate(parentSlot, parentBlockRoot); - this.metrics?.production.producedSyncAggregateParticipants.observe( - syncAggregate.syncCommitteeBits.getTrueBitIndexes().length - ); - (blockBody as altair.BeaconBlockBody).syncAggregate = syncAggregate; - } - endSyncAggregate?.({ - step: "syncAggregate", - }); + const {attestations, deposits, voluntaryExits, attesterSlashings, proposerSlashings, blsToExecutionChanges} = + blockBody; Object.assign(logMeta, { attestations: attestations.length, @@ -221,7 +195,7 @@ export async function produceBlockBody( executionPayloadValue = builderRes.executionPayloadValue; const fetchedTime = Date.now() / 1000 - computeTimeAtSlot(this.config, blockSlot, this.genesisTime); - const prepType = "blinded"; + const prepType = PayloadPreparationType.Blinded; this.metrics?.blockPayload.payloadFetchedTime.observe({prepType}, fetchedTime); this.logger.verbose("Fetched execution payload header from builder", { slot: blockSlot, @@ -231,35 +205,14 @@ export async function produceBlockBody( }); if (ForkSeq[fork] >= ForkSeq.deneb) { - const {blindedBlobsBundle} = builderRes; - if (blindedBlobsBundle === undefined) { - throw Error(`Invalid builder getHeader response for fork=${fork}, missing blindedBlobsBundle`); + const {blobKzgCommitments} = builderRes; + if (blobKzgCommitments === undefined) { + throw Error(`Invalid builder getHeader response for fork=${fork}, missing blobKzgCommitments`); } - // validate blindedBlobsBundle - if (this.opts.sanityCheckExecutionEngineBlobs) { - validateBlindedBlobsAndKzgCommitments(builderRes.header, blindedBlobsBundle); - } - - (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blindedBlobsBundle.commitments; - const blockHash = toHex(builderRes.header.blockHash); - - const blobSidecars = Array.from({length: blindedBlobsBundle.blobRoots.length}, (_v, index) => { - const blobRoot = blindedBlobsBundle.blobRoots[index]; - const commitment = blindedBlobsBundle.commitments[index]; - const proof = blindedBlobsBundle.proofs[index]; - const blindedBlobSidecar = { - index, - blobRoot, - kzgProof: proof, - kzgCommitment: commitment, - }; - // Other fields will be injected after postState is calculated - return blindedBlobSidecar; - }) as deneb.BlindedBlobSidecars; - blobsResult = {type: BlobsResultType.blinded, blobSidecars, blockHash}; - - Object.assign(logMeta, {blobs: blindedBlobsBundle.commitments.length}); + (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blobKzgCommitments; + blobsResult = {type: BlobsResultType.blinded}; + Object.assign(logMeta, {blobs: blobKzgCommitments.length}); } else { blobsResult = {type: BlobsResultType.preDeneb}; } @@ -303,9 +256,11 @@ export async function produceBlockBody( const engineRes = await this.executionEngine.getPayload(fork, payloadId); const {executionPayload, blobsBundle} = engineRes; + shouldOverrideBuilder = engineRes.shouldOverrideBuilder; + (blockBody as allForks.ExecutionBlockBody).executionPayload = executionPayload; executionPayloadValue = engineRes.executionPayloadValue; - Object.assign(logMeta, {transactions: executionPayload.transactions.length}); + Object.assign(logMeta, {transactions: executionPayload.transactions.length, shouldOverrideBuilder}); const fetchedTime = Date.now() / 1000 - computeTimeAtSlot(this.config, blockSlot, this.genesisTime); this.metrics?.blockPayload.payloadFetchedTime.observe({prepType}, fetchedTime); @@ -315,6 +270,7 @@ export async function produceBlockBody( prepType, payloadId, fetchedTime, + executionHeadBlockHash: toHex(engineRes.executionPayload.blockHash), }); if (executionPayload.transactions.length === 0) { this.metrics?.blockPayload.emptyPayloads.inc({prepType}); @@ -332,23 +288,10 @@ export async function produceBlockBody( (blockBody as deneb.BeaconBlockBody).blobKzgCommitments = blobsBundle.commitments; const blockHash = toHex(executionPayload.blockHash); + const contents = {kzgProofs: blobsBundle.proofs, blobs: blobsBundle.blobs}; + blobsResult = {type: BlobsResultType.produced, contents, blockHash}; - const blobSidecars = Array.from({length: blobsBundle.blobs.length}, (_v, index) => { - const blob = blobsBundle.blobs[index]; - const commitment = blobsBundle.commitments[index]; - const proof = blobsBundle.proofs[index]; - const blobSidecar = { - index, - blob, - kzgProof: proof, - kzgCommitment: commitment, - }; - // Other fields will be injected after postState is calculated - return blobSidecar; - }) as deneb.BlobSidecars; - blobsResult = {type: BlobsResultType.produced, blobSidecars, blockHash}; - - Object.assign(logMeta, {blobs: blobSidecars.length}); + Object.assign(logMeta, {blobs: blobsBundle.commitments.length}); } else { blobsResult = {type: BlobsResultType.preDeneb}; } @@ -380,12 +323,10 @@ export async function produceBlockBody( executionPayloadValue = BigInt(0); } endExecutionPayload?.({ - step: "executionPayload", + step: BlockProductionStep.executionPayload, }); if (ForkSeq[fork] >= ForkSeq.capella) { - // TODO: blsToExecutionChanges should be passed in the produceBlock call - (blockBody as capella.BeaconBlockBody).blsToExecutionChanges = blsToExecutionChanges; Object.assign(logMeta, { blsToExecutionChanges: blsToExecutionChanges.length, }); @@ -401,7 +342,7 @@ export async function produceBlockBody( Object.assign(logMeta, {executionPayloadValue}); this.logger.verbose("Produced beacon block body", logMeta); - return {body: blockBody as AssembledBodyType, blobs: blobsResult, executionPayloadValue}; + return {body: blockBody as AssembledBodyType, blobs: blobsResult, executionPayloadValue, shouldOverrideBuilder}; } /** @@ -502,7 +443,7 @@ async function prepareExecutionPayloadHeader( ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blindedBlobsBundle?: deneb.BlindedBlobsBundle; + blobKzgCommitments?: deneb.BlobKzgCommitments; }> { if (!chain.executionBuilder) { throw Error("executionBuilder required"); @@ -627,4 +568,81 @@ function preparePayloadAttributes( return payloadAttributes; } -/** process_sync_committee_contributions is implemented in syncCommitteeContribution.getSyncAggregate */ +export async function produceCommonBlockBody( + this: BeaconChain, + blockType: T, + currentState: CachedBeaconStateAllForks, + { + randaoReveal, + graffiti, + slot, + parentSlot, + parentBlockRoot, + }: BlockAttributes & { + parentSlot: Slot; + parentBlockRoot: Root; + } +): Promise { + const stepsMetrics = + blockType === BlockType.Full + ? this.metrics?.executionBlockProductionTimeSteps + : this.metrics?.builderBlockProductionTimeSteps; + + const blockEpoch = computeEpochAtSlot(slot); + const fork = currentState.config.getForkName(slot); + + // TODO: + // Iterate through the naive aggregation pool and ensure all the attestations from there + // are included in the operation pool. + // for (const attestation of db.attestationPool.getAll()) { + // try { + // opPool.insertAttestation(attestation); + // } catch (e) { + // // Don't stop block production if there's an error, just create a log. + // logger.error("Attestation did not transfer to op pool", {}, e); + // } + // } + const [attesterSlashings, proposerSlashings, voluntaryExits, blsToExecutionChanges] = + this.opPool.getSlashingsAndExits(currentState, blockType, this.metrics); + + const endAttestations = stepsMetrics?.startTimer(); + const attestations = this.aggregatedAttestationPool.getAttestationsForBlock(this.forkChoice, currentState); + endAttestations?.({ + step: BlockProductionStep.attestations, + }); + + const endEth1DataAndDeposits = stepsMetrics?.startTimer(); + const {eth1Data, deposits} = await this.eth1.getEth1DataAndDeposits(currentState); + endEth1DataAndDeposits?.({ + step: BlockProductionStep.eth1DataAndDeposits, + }); + + const blockBody: Omit = { + randaoReveal, + graffiti, + eth1Data, + proposerSlashings, + attesterSlashings, + attestations, + deposits, + voluntaryExits, + }; + + if (ForkSeq[fork] >= ForkSeq.capella) { + (blockBody as CommonBlockBody).blsToExecutionChanges = blsToExecutionChanges; + } + + const endSyncAggregate = stepsMetrics?.startTimer(); + if (blockEpoch >= this.config.ALTAIR_FORK_EPOCH) { + const syncAggregate = this.syncContributionAndProofPool.getAggregate(parentSlot, parentBlockRoot); + this.metrics?.production.producedSyncAggregateParticipants.observe( + syncAggregate.syncCommitteeBits.getTrueBitIndexes().length + ); + (blockBody as CommonBlockBody).syncAggregate = syncAggregate; + } + endSyncAggregate?.({ + step: BlockProductionStep.syncAggregate, + }); + + return blockBody as CommonBlockBody; +} diff --git a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts index 0d00d0c8bd72..54e90672d189 100644 --- a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts +++ b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts @@ -1,4 +1,4 @@ -import {allForks, deneb} from "@lodestar/types"; +import {allForks} from "@lodestar/types"; import {BlobsBundle} from "../../execution/index.js"; /** @@ -13,15 +13,3 @@ export function validateBlobsAndKzgCommitments(payload: allForks.ExecutionPayloa ); } } - -export function validateBlindedBlobsAndKzgCommitments( - payload: allForks.ExecutionPayloadHeader, - blindedBlobsBundle: deneb.BlindedBlobsBundle -): void { - // sanity-check that the KZG commitments match the blobs (as produced by the execution engine) - if (blindedBlobsBundle.blobRoots.length !== blindedBlobsBundle.commitments.length) { - throw Error( - `BlindedBlobs bundle blobs len ${blindedBlobsBundle.blobRoots.length} != commitments len ${blindedBlobsBundle.commitments.length}` - ); - } -} diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 5305502c8c05..dfda56cc1eea 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -221,7 +221,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { private jobQueueProcessor = async (regenRequest: RegenRequest): Promise => { const metricsLabels = { caller: regenRequest.args[regenRequest.args.length - 1] as RegenCaller, - entrypoint: regenRequest.key, + entrypoint: regenRequest.key as RegenFnName, }; let timer; try { diff --git a/packages/beacon-node/src/chain/reprocess.ts b/packages/beacon-node/src/chain/reprocess.ts index 3ab6056fb3af..4c91ef07ff69 100644 --- a/packages/beacon-node/src/chain/reprocess.ts +++ b/packages/beacon-node/src/chain/reprocess.ts @@ -11,7 +11,7 @@ export const REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC = 2; /** * Reprocess status for metrics */ -enum ReprocessStatus { +export enum ReprocessStatus { /** * There are too many attestations that have unknown block root. */ @@ -140,7 +140,10 @@ export class ReprocessController { for (const awaitingPromise of awaitingPromisesByRoot.values()) { const {resolve, addedTimeMs} = awaitingPromise; resolve(false); - this.metrics?.reprocessApiAttestations.waitSecBeforeReject.set((now - addedTimeMs) / 1000); + this.metrics?.reprocessApiAttestations.waitSecBeforeReject.set( + {reason: ReprocessStatus.expired}, + (now - addedTimeMs) / 1000 + ); this.metrics?.reprocessApiAttestations.reject.inc({reason: ReprocessStatus.expired}); } diff --git a/packages/beacon-node/src/chain/seenCache/index.ts b/packages/beacon-node/src/chain/seenCache/index.ts index f354a37f93ee..250e6581c312 100644 --- a/packages/beacon-node/src/chain/seenCache/index.ts +++ b/packages/beacon-node/src/chain/seenCache/index.ts @@ -2,3 +2,4 @@ export {SeenAggregators, SeenAttesters} from "./seenAttesters.js"; export {SeenBlockProposers} from "./seenBlockProposers.js"; export {SeenSyncCommitteeMessages} from "./seenCommittee.js"; export {SeenContributionAndProof} from "./seenCommitteeContribution.js"; +export {SeenGossipBlockInput} from "./seenGossipBlockInput.js"; diff --git a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts index ded54a5b4a54..a19476497e9f 100644 --- a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts +++ b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts @@ -17,7 +17,7 @@ export type AttestationDataCacheEntry = { subnet: number; }; -enum RejectReason { +export enum RejectReason { // attestation data reaches MAX_CACHE_SIZE_PER_SLOT reached_limit = "reached_limit", // attestation data is too old diff --git a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts new file mode 100644 index 000000000000..8b767975c112 --- /dev/null +++ b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts @@ -0,0 +1,170 @@ +import {toHexString} from "@chainsafe/ssz"; +import {deneb, RootHex, ssz, allForks} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {pruneSetToMax} from "@lodestar/utils"; +import {BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; + +import { + BlockInput, + getBlockInput, + BlockSource, + BlockInputBlobs, + BlobsCache, + GossipedInputType, +} from "../blocks/types.js"; + +type GossipedBlockInput = + | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null} + | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null}; + +type BlockInputCacheType = { + block?: allForks.SignedBeaconBlock; + blockBytes?: Uint8Array | null; + blobsCache: BlobsCache; + // promise and its callback cached for delayed resolution + availabilityPromise: Promise; + resolveAvailability: (blobs: BlockInputBlobs) => void; +}; + +const MAX_GOSSIPINPUT_CACHE = 5; + +/** + * SeenGossipBlockInput tracks and caches the live blobs and blocks on the network to solve data availability + * for the blockInput. If no block has been seen yet for some already seen blobs, it responds will null, but + * on the first block or the consequent blobs it responds with blobs promise till all blobs become available. + * + * One can start processing block on blobs promise blockInput response and can await on the promise before + * fully importing the block. The blobs promise is gets resolved as soon as all blobs corresponding to that + * block are seen by SeenGossipBlockInput + */ +export class SeenGossipBlockInput { + private blockInputCache = new Map(); + + prune(): void { + pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE); + } + + getGossipBlockInput( + config: ChainForkConfig, + gossipedInput: GossipedBlockInput + ): + | { + blockInput: BlockInput; + blockInputMeta: {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number}; + } + | {blockInput: null; blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}} { + let blockHex; + let blockCache; + + if (gossipedInput.type === GossipedInputType.block) { + const {signedBlock, blockBytes} = gossipedInput; + + blockHex = toHexString( + config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message) + ); + blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(); + + blockCache.block = signedBlock; + blockCache.blockBytes = blockBytes; + } else { + const {blobSidecar, blobBytes} = gossipedInput; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); + blockHex = toHexString(blockRoot); + blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(); + + // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions + blockCache.blobsCache.set(blobSidecar.index, { + blobSidecar, + // easily splice out the unsigned message as blob is a fixed length type + blobBytes: blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null, + }); + } + + if (!this.blockInputCache.has(blockHex)) { + this.blockInputCache.set(blockHex, blockCache); + } + const {block: signedBlock, blockBytes, blobsCache, availabilityPromise, resolveAvailability} = blockCache; + + if (signedBlock !== undefined) { + // block is available, check if all blobs have shown up + const {slot, body} = signedBlock.message; + const {blobKzgCommitments} = body as deneb.BeaconBlockBody; + const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + + if (blobKzgCommitments.length < blobsCache.size) { + throw Error( + `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` + ); + } + + if (blobKzgCommitments.length === blobsCache.size) { + const allBlobs = getBlockInputBlobs(blobsCache); + resolveAvailability(allBlobs); + const {blobs, blobsBytes} = allBlobs; + return { + blockInput: getBlockInput.postDeneb( + config, + signedBlock, + BlockSource.gossip, + blobs, + blockBytes ?? null, + blobsBytes + ), + blockInputMeta: {pending: null, haveBlobs: blobs.length, expectedBlobs: blobKzgCommitments.length}, + }; + } else { + return { + blockInput: getBlockInput.blobsPromise( + config, + signedBlock, + BlockSource.gossip, + blobsCache, + blockBytes ?? null, + availabilityPromise + ), + blockInputMeta: { + pending: GossipedInputType.blob, + haveBlobs: blobsCache.size, + expectedBlobs: blobKzgCommitments.length, + }, + }; + } + } else { + // will need to wait for the block to showup + return { + blockInput: null, + blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null}, + }; + } + } +} + +function getEmptyBlockInputCacheEntry(): BlockInputCacheType { + // Capture both the promise and its callbacks. + // It is not spec'ed but in tests in Firefox and NodeJS the promise constructor is run immediately + let resolveAvailability: ((blobs: BlockInputBlobs) => void) | null = null; + const availabilityPromise = new Promise((resolveCB) => { + resolveAvailability = resolveCB; + }); + if (resolveAvailability === null) { + throw Error("Promise Constructor was not executed immediately"); + } + const blobsCache = new Map(); + return {availabilityPromise, resolveAvailability, blobsCache}; +} + +function getBlockInputBlobs(blobsCache: BlobsCache): BlockInputBlobs { + const blobs = []; + const blobsBytes = []; + + for (let index = 0; index < blobsCache.size; index++) { + const blobCache = blobsCache.get(index); + if (blobCache === undefined) { + throw Error(`Missing blobSidecar at index=${index}`); + } + const {blobSidecar, blobBytes} = blobCache; + blobs.push(blobSidecar); + blobsBytes.push(blobBytes); + } + return {blobs, blobsBytes}; +} diff --git a/packages/beacon-node/src/chain/shufflingCache.ts b/packages/beacon-node/src/chain/shufflingCache.ts index c8468f3b6db5..23177142d846 100644 --- a/packages/beacon-node/src/chain/shufflingCache.ts +++ b/packages/beacon-node/src/chain/shufflingCache.ts @@ -167,6 +167,23 @@ export class ShufflingCache { } } + /** + * Same to get() function but synchronous. + */ + getSync(shufflingEpoch: Epoch, decisionRootHex: RootHex): EpochShuffling | null { + const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).get(decisionRootHex); + if (cacheItem === undefined) { + return null; + } + + if (isShufflingCacheItem(cacheItem)) { + return cacheItem.shuffling; + } + + // ignore promise + return null; + } + private add(shufflingEpoch: Epoch, decisionBlock: RootHex, cacheItem: CacheItem): void { this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).set(decisionBlock, cacheItem); pruneSetToMax(this.itemsByDecisionRootByEpoch, this.maxEpochs); diff --git a/packages/beacon-node/src/chain/stateCache/datastore/db.ts b/packages/beacon-node/src/chain/stateCache/datastore/db.ts new file mode 100644 index 000000000000..fef38a7f8dd2 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/datastore/db.ts @@ -0,0 +1,38 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {phase0, ssz} from "@lodestar/types"; +import {IBeaconDb} from "../../../db/interface.js"; +import {CPStateDatastore, DatastoreKey} from "./types.js"; + +/** + * Implementation of CPStateDatastore using db. + */ +export class DbCPStateDatastore implements CPStateDatastore { + constructor(private readonly db: IBeaconDb) {} + + async write(cpKey: phase0.Checkpoint, state: CachedBeaconStateAllForks): Promise { + const serializedCheckpoint = checkpointToDatastoreKey(cpKey); + const stateBytes = state.serialize(); + await this.db.checkpointState.putBinary(serializedCheckpoint, stateBytes); + return serializedCheckpoint; + } + + async remove(serializedCheckpoint: DatastoreKey): Promise { + await this.db.checkpointState.delete(serializedCheckpoint); + } + + async read(serializedCheckpoint: DatastoreKey): Promise { + return this.db.checkpointState.getBinary(serializedCheckpoint); + } + + async readKeys(): Promise { + return this.db.checkpointState.keys(); + } +} + +export function datastoreKeyToCheckpoint(key: DatastoreKey): phase0.Checkpoint { + return ssz.phase0.Checkpoint.deserialize(key); +} + +export function checkpointToDatastoreKey(cp: phase0.Checkpoint): DatastoreKey { + return ssz.phase0.Checkpoint.serialize(cp); +} diff --git a/packages/beacon-node/src/chain/stateCache/datastore/index.ts b/packages/beacon-node/src/chain/stateCache/datastore/index.ts new file mode 100644 index 000000000000..c37de5292a38 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/datastore/index.ts @@ -0,0 +1,2 @@ +export * from "./types.js"; +export * from "./db.js"; diff --git a/packages/beacon-node/src/chain/stateCache/datastore/types.ts b/packages/beacon-node/src/chain/stateCache/datastore/types.ts new file mode 100644 index 000000000000..66ea67f93500 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/datastore/types.ts @@ -0,0 +1,13 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {phase0} from "@lodestar/types"; + +// With db implementation, persistedKey is serialized data of a checkpoint +export type DatastoreKey = Uint8Array; + +// Make this generic to support testing +export interface CPStateDatastore { + write: (cpKey: phase0.Checkpoint, state: CachedBeaconStateAllForks) => Promise; + remove: (key: DatastoreKey) => Promise; + read: (key: DatastoreKey) => Promise; + readKeys: () => Promise; +} diff --git a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts new file mode 100644 index 000000000000..854983101c04 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts @@ -0,0 +1,181 @@ +import {toHexString} from "@chainsafe/ssz"; +import {RootHex} from "@lodestar/types"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {routes} from "@lodestar/api"; +import {Metrics} from "../../metrics/index.js"; +import {LinkedList} from "../../util/array.js"; +import {MapTracker} from "./mapMetrics.js"; +import {BlockStateCache} from "./types.js"; + +export type FIFOBlockStateCacheOpts = { + maxBlockStates?: number; +}; + +/** + * Regen state if there's a reorg distance > 32 slots. + */ +export const DEFAULT_MAX_BLOCK_STATES = 32; + +/** + * New implementation of BlockStateCache that keeps the most recent n states consistently + * - Maintain a linked list (FIFO) with special handling for head state, which is always the first item in the list + * - Prune per add() instead of per checkpoint so it only keeps n historical states consistently, prune from tail + * - No need to prune per finalized checkpoint + * + * Given this block tree with Block 11 as head: + * ``` + Block 10 + | + +-----+-----+ + | | + Block 11 Block 12 + ^ | + | | + head Block 13 + * ``` + * The maintained key order would be: 11 -> 13 -> 12 -> 10, and state 10 will be pruned first. + */ +export class FIFOBlockStateCache implements BlockStateCache { + /** + * Max number of states allowed in the cache + */ + readonly maxStates: number; + + private readonly cache: MapTracker; + /** + * Key order to implement FIFO cache + */ + private readonly keyOrder: LinkedList; + private readonly metrics: Metrics["stateCache"] | null | undefined; + + constructor(opts: FIFOBlockStateCacheOpts, {metrics}: {metrics?: Metrics | null}) { + this.maxStates = opts.maxBlockStates ?? DEFAULT_MAX_BLOCK_STATES; + this.cache = new MapTracker(metrics?.stateCache); + if (metrics) { + this.metrics = metrics.stateCache; + metrics.stateCache.size.addCollect(() => metrics.stateCache.size.set(this.cache.size)); + } + this.keyOrder = new LinkedList(); + } + + /** + * Set a state as head, happens when importing a block and head block is changed. + */ + setHeadState(item: CachedBeaconStateAllForks | null): void { + if (item !== null) { + this.add(item, true); + } + } + + /** + * Get a state from this cache given a state root hex. + */ + get(rootHex: RootHex): CachedBeaconStateAllForks | null { + this.metrics?.lookups.inc(); + const item = this.cache.get(rootHex); + if (!item) { + return null; + } + + this.metrics?.hits.inc(); + this.metrics?.stateClonedCount.observe(item.clonedCount); + + return item; + } + + /** + * Add a state to this cache. + * @param isHead if true, move it to the head of the list. Otherwise add to the 2nd position. + * In importBlock() steps, normally it'll call add() with isHead = false first. Then call setHeadState() to set the head. + */ + add(item: CachedBeaconStateAllForks, isHead = false): void { + const key = toHexString(item.hashTreeRoot()); + if (this.cache.get(key) != null) { + if (!this.keyOrder.has(key)) { + throw Error(`State exists but key not found in keyOrder: ${key}`); + } + if (isHead) { + this.keyOrder.moveToHead(key); + } else { + this.keyOrder.moveToSecond(key); + } + // same size, no prune + return; + } + + // new state + this.metrics?.adds.inc(); + this.cache.set(key, item); + if (isHead) { + this.keyOrder.unshift(key); + } else { + // insert after head + const head = this.keyOrder.first(); + if (head == null) { + // should not happen, however handle just in case + this.keyOrder.unshift(key); + } else { + this.keyOrder.insertAfter(head, key); + } + } + this.prune(key); + } + + get size(): number { + return this.cache.size; + } + + /** + * Prune the cache from tail to keep the most recent n states consistently. + * The tail of the list is the oldest state, in case regen adds back the same state, + * it should stay next to head so that it won't be pruned right away. + * The FIFO cache helps with this. + */ + prune(lastAddedKey: string): void { + while (this.keyOrder.length > this.maxStates) { + const key = this.keyOrder.last(); + // it does not make sense to prune the last added state + // this only happens when max state is 1 in a short period of time + if (key === lastAddedKey) { + break; + } + if (!key) { + // should not happen + throw new Error("No key"); + } + this.keyOrder.pop(); + this.cache.delete(key); + } + } + + /** + * No need for this implementation + * This is only to conform to the old api + */ + deleteAllBeforeEpoch(): void {} + + /** + * ONLY FOR DEBUGGING PURPOSES. For lodestar debug API. + */ + clear(): void { + this.cache.clear(); + } + + /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */ + dumpSummary(): routes.lodestar.StateCacheItem[] { + return Array.from(this.cache.entries()).map(([key, state]) => ({ + slot: state.slot, + root: toHexString(state.hashTreeRoot()), + reads: this.cache.readCount.get(key) ?? 0, + lastRead: this.cache.lastRead.get(key) ?? 0, + checkpointState: false, + })); + } + + /** + * For unit test only. + */ + dumpKeyOrder(): string[] { + return this.keyOrder.toArray(); + } +} diff --git a/packages/beacon-node/src/chain/stateCache/index.ts b/packages/beacon-node/src/chain/stateCache/index.ts index 69fb34a77e4c..b16d87c3fa0d 100644 --- a/packages/beacon-node/src/chain/stateCache/index.ts +++ b/packages/beacon-node/src/chain/stateCache/index.ts @@ -1,2 +1,3 @@ export * from "./stateContextCache.js"; export * from "./stateContextCheckpointsCache.js"; +export * from "./fifoBlockStateCache.js"; diff --git a/packages/beacon-node/src/chain/stateCache/mapMetrics.ts b/packages/beacon-node/src/chain/stateCache/mapMetrics.ts index eb52755bfc00..bb33323015d4 100644 --- a/packages/beacon-node/src/chain/stateCache/mapMetrics.ts +++ b/packages/beacon-node/src/chain/stateCache/mapMetrics.ts @@ -1,8 +1,8 @@ -import {IAvgMinMax} from "../../metrics/index.js"; +import {AvgMinMax} from "@lodestar/utils"; type MapTrackerMetrics = { - reads: IAvgMinMax; - secondsSinceLastRead: IAvgMinMax; + reads: AvgMinMax; + secondsSinceLastRead: AvgMinMax; }; export class MapTracker extends Map { diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts new file mode 100644 index 000000000000..8ad5c5098118 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -0,0 +1,645 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {phase0, Epoch, RootHex} from "@lodestar/types"; +import {CachedBeaconStateAllForks, computeStartSlotAtEpoch, getBlockRootAtSlot} from "@lodestar/state-transition"; +import {Logger, MapDef} from "@lodestar/utils"; +import {routes} from "@lodestar/api"; +import {loadCachedBeaconState} from "@lodestar/state-transition"; +import {Metrics} from "../../metrics/index.js"; +import {IClock} from "../../util/clock.js"; +import {ShufflingCache} from "../shufflingCache.js"; +import {MapTracker} from "./mapMetrics.js"; +import {CheckpointHex, CheckpointStateCache, CacheItemType} from "./types.js"; +import {CPStateDatastore, DatastoreKey, datastoreKeyToCheckpoint} from "./datastore/index.js"; + +type GetHeadStateFn = () => CachedBeaconStateAllForks; + +type PersistentCheckpointStateCacheModules = { + metrics?: Metrics | null; + logger: Logger; + clock?: IClock | null; + shufflingCache: ShufflingCache; + datastore: CPStateDatastore; + getHeadState?: GetHeadStateFn; +}; + +type PersistentCheckpointStateCacheOpts = { + // Keep max n states in memory, persist the rest to disk + maxCPStateEpochsInMemory?: number; +}; + +/** checkpoint serialized as a string */ +type CacheKey = string; + +type InMemoryCacheItem = { + type: CacheItemType.inMemory; + state: CachedBeaconStateAllForks; + // if a cp state is reloaded from disk, it'll keep track of persistedKey to allow us to remove it from disk later + // it also helps not to persist it again + persistedKey?: DatastoreKey; +}; + +type PersistedCacheItem = { + type: CacheItemType.persisted; + value: DatastoreKey; +}; + +type CacheItem = InMemoryCacheItem | PersistedCacheItem; + +type LoadedStateBytesData = {persistedKey: DatastoreKey; stateBytes: Uint8Array}; + +/** + * Before n-historical states, lodestar keeps mostly 3 states in memory with 1 finalized state + * Since Jan 2024, lodestar stores the finalized state in disk and keeps up to 2 epochs in memory + */ +export const DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY = 2; + +/** + * An implementation of CheckpointStateCache that keep up to n epoch checkpoint states in memory and persist the rest to disk + * - If it's more than `maxEpochsInMemory` epochs old, it will persist n last epochs to disk based on the view of the block + * - Once a chain gets finalized we'll prune all states from memory and disk for epochs < finalizedEpoch + * - In get*() apis if shouldReload is true, it will reload from disk. The reload() api is expensive and should only be called in some important flows: + * - Get state for block processing + * - updateHeadState + * - as with any cache, the state could be evicted from memory at any time, so we should always check if the state is in memory or not + * - Each time we process a state, we only persist exactly 1 checkpoint state per epoch based on the view of block and prune all others. The persisted + * checkpoint state could be finalized and used later in archive task, it's also used to regen states. + * - When we process multiple states in the same epoch, we could persist different checkpoint states of the same epoch because each block could have its + * own view. See unit test of this file `packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts` for more details. + * + * The below diagram shows Previous Root Checkpoint State is persisted for epoch (n-2) and Current Root Checkpoint State is persisted for epoch (n-1) + * while at epoch (n) and (n+1) we have both of them in memory + * + * ╔════════════════════════════════════╗═══════════════╗ + * ║ persisted to db or fs ║ in memory ║ + * ║ reload if needed ║ ║ + * ║ -----------------------------------║---------------║ + * ║ epoch: (n-2) (n-1) ║ n (n+1) ║ + * ║ |-------|-------|----║--|-------|----║ + * ║ ^ ^ ║ ^ ^ ║ + * ║ ║ ^ ^ ║ + * ╚════════════════════════════════════╝═══════════════╝ + * + * The "in memory" checkpoint states are similar to the old implementation: we have both Previous Root Checkpoint State and Current Root Checkpoint State per epoch. + * However in the "persisted to db or fs" part, we usually only persist 1 checkpoint state per epoch, the one that could potentially be justified/finalized later + * based on the view of blocks. + */ +export class PersistentCheckpointStateCache implements CheckpointStateCache { + private readonly cache: MapTracker; + /** Epoch -> Set */ + private readonly epochIndex = new MapDef>(() => new Set()); + private readonly metrics: Metrics["cpStateCache"] | null | undefined; + private readonly logger: Logger; + private readonly clock: IClock | null | undefined; + private preComputedCheckpoint: string | null = null; + private preComputedCheckpointHits: number | null = null; + private readonly maxEpochsInMemory: number; + private readonly datastore: CPStateDatastore; + private readonly shufflingCache: ShufflingCache; + private readonly getHeadState?: GetHeadStateFn; + + constructor( + {metrics, logger, clock, shufflingCache, datastore, getHeadState}: PersistentCheckpointStateCacheModules, + opts: PersistentCheckpointStateCacheOpts + ) { + this.cache = new MapTracker(metrics?.cpStateCache); + if (metrics) { + this.metrics = metrics.cpStateCache; + metrics.cpStateCache.size.addCollect(() => { + let persistCount = 0; + let inMemoryCount = 0; + const memoryEpochs = new Set(); + const persistentEpochs = new Set(); + for (const [key, cacheItem] of this.cache.entries()) { + const {epoch} = fromCacheKey(key); + if (isPersistedCacheItem(cacheItem)) { + persistCount++; + persistentEpochs.add(epoch); + } else { + inMemoryCount++; + memoryEpochs.add(epoch); + } + } + metrics.cpStateCache.size.set({type: CacheItemType.persisted}, persistCount); + metrics.cpStateCache.size.set({type: CacheItemType.inMemory}, inMemoryCount); + metrics.cpStateCache.epochSize.set({type: CacheItemType.persisted}, persistentEpochs.size); + metrics.cpStateCache.epochSize.set({type: CacheItemType.inMemory}, memoryEpochs.size); + }); + } + this.logger = logger; + this.clock = clock; + if (opts.maxCPStateEpochsInMemory !== undefined && opts.maxCPStateEpochsInMemory < 0) { + throw new Error("maxEpochsInMemory must be >= 0"); + } + this.maxEpochsInMemory = opts.maxCPStateEpochsInMemory ?? DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY; + // Specify different datastore for testing + this.datastore = datastore; + this.shufflingCache = shufflingCache; + this.getHeadState = getHeadState; + } + + /** + * Reload checkpoint state keys from the last run. + */ + async init(): Promise { + const persistedKeys = await this.datastore.readKeys(); + for (const persistedKey of persistedKeys) { + const cp = datastoreKeyToCheckpoint(persistedKey); + this.cache.set(toCacheKey(cp), {type: CacheItemType.persisted, value: persistedKey}); + this.epochIndex.getOrDefault(cp.epoch).add(toHexString(cp.root)); + } + this.logger.info("Loaded persisted checkpoint states from the last run", { + count: persistedKeys.length, + maxEpochsInMemory: this.maxEpochsInMemory, + }); + } + + /** + * Get a state from cache, it may reload from disk. + * This is an expensive api, should only be called in some important flows: + * - Validate a gossip block + * - Get block for processing + * - Regen head state + */ + async getOrReload(cp: CheckpointHex): Promise { + const stateOrStateBytesData = await this.getStateOrLoadDb(cp); + if (stateOrStateBytesData === null || isCachedBeaconState(stateOrStateBytesData)) { + return stateOrStateBytesData; + } + const {persistedKey, stateBytes} = stateOrStateBytesData; + const logMeta = {persistedKey: toHexString(persistedKey)}; + this.logger.debug("Reload: read state successful", logMeta); + this.metrics?.stateReloadSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + const seedState = this.findSeedStateToReload(cp) ?? this.getHeadState?.(); + if (seedState == null) { + throw new Error("No seed state found for cp " + toCacheKey(cp)); + } + this.metrics?.stateReloadEpochDiff.observe(Math.abs(seedState.epochCtx.epoch - cp.epoch)); + this.logger.debug("Reload: found seed state", {...logMeta, seedSlot: seedState.slot}); + + try { + const timer = this.metrics?.stateReloadDuration.startTimer(); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, { + shufflingGetter: this.shufflingCache.getSync.bind(this.shufflingCache), + }); + newCachedState.commit(); + const stateRoot = toHexString(newCachedState.hashTreeRoot()); + timer?.(); + this.logger.debug("Reload: cached state load successful", { + ...logMeta, + stateSlot: newCachedState.slot, + stateRoot, + seedSlot: seedState.slot, + }); + + // only remove persisted state once we reload successfully + const cpKey = toCacheKey(cp); + this.cache.set(cpKey, {type: CacheItemType.inMemory, state: newCachedState, persistedKey}); + this.epochIndex.getOrDefault(cp.epoch).add(cp.rootHex); + // don't prune from memory here, call it at the last 1/3 of slot 0 of an epoch + return newCachedState; + } catch (e) { + this.logger.debug("Reload: error loading cached state", logMeta, e as Error); + return null; + } + } + + /** + * Return either state or state bytes loaded from db. + */ + async getStateOrBytes(cp: CheckpointHex): Promise { + const stateOrLoadedState = await this.getStateOrLoadDb(cp); + if (stateOrLoadedState === null || isCachedBeaconState(stateOrLoadedState)) { + return stateOrLoadedState; + } + return stateOrLoadedState.stateBytes; + } + + /** + * Return either state or state bytes with persisted key loaded from db. + */ + async getStateOrLoadDb(cp: CheckpointHex): Promise { + const cpKey = toCacheKey(cp); + const inMemoryState = this.get(cpKey); + if (inMemoryState) { + return inMemoryState; + } + + const cacheItem = this.cache.get(cpKey); + if (cacheItem === undefined) { + return null; + } + + if (isInMemoryCacheItem(cacheItem)) { + // should not happen, in-memory state is handled above + throw new Error("Expected persistent key"); + } + + const persistedKey = cacheItem.value; + const dbReadTimer = this.metrics?.stateReloadDbReadTime.startTimer(); + const stateBytes = await this.datastore.read(persistedKey); + dbReadTimer?.(); + + if (stateBytes === null) { + return null; + } + return {persistedKey, stateBytes}; + } + + /** + * Similar to get() api without reloading from disk + */ + get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null { + this.metrics?.lookups.inc(); + const cpKey = typeof cpOrKey === "string" ? cpOrKey : toCacheKey(cpOrKey); + const cacheItem = this.cache.get(cpKey); + + if (cacheItem === undefined) { + return null; + } + + this.metrics?.hits.inc(); + + if (cpKey === this.preComputedCheckpoint) { + this.preComputedCheckpointHits = (this.preComputedCheckpointHits ?? 0) + 1; + } + + if (isInMemoryCacheItem(cacheItem)) { + const {state} = cacheItem; + this.metrics?.stateClonedCount.observe(state.clonedCount); + return state; + } + + return null; + } + + /** + * Add a state of a checkpoint to this cache, prune from memory if necessary. + */ + add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void { + const cpHex = toCheckpointHex(cp); + const key = toCacheKey(cpHex); + const cacheItem = this.cache.get(key); + this.metrics?.adds.inc(); + if (cacheItem !== undefined && isPersistedCacheItem(cacheItem)) { + const persistedKey = cacheItem.value; + // was persisted to disk, set back to memory + this.cache.set(key, {type: CacheItemType.inMemory, state, persistedKey}); + this.logger.verbose("Added checkpoint state to memory but a persisted key existed", { + epoch: cp.epoch, + rootHex: cpHex.rootHex, + persistedKey: toHexString(persistedKey), + }); + } else { + this.cache.set(key, {type: CacheItemType.inMemory, state}); + this.logger.verbose("Added checkpoint state to memory", {epoch: cp.epoch, rootHex: cpHex.rootHex}); + } + this.epochIndex.getOrDefault(cp.epoch).add(cpHex.rootHex); + } + + /** + * Searches in-memory state for the latest cached state with a `root` without reload, starting with `epoch` and descending + */ + getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null { + // sort epochs in descending order, only consider epochs lte `epoch` + const epochs = Array.from(this.epochIndex.keys()) + .sort((a, b) => b - a) + .filter((e) => e <= maxEpoch); + for (const epoch of epochs) { + if (this.epochIndex.get(epoch)?.has(rootHex)) { + const inMemoryState = this.get({rootHex, epoch}); + if (inMemoryState) { + return inMemoryState; + } + } + } + return null; + } + + /** + * Searches state for the latest cached state with a `root`, reload if needed, starting with `epoch` and descending + * This is expensive api, should only be called in some important flows: + * - Validate a gossip block + * - Get block for processing + * - Regen head state + */ + async getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise { + // sort epochs in descending order, only consider epochs lte `epoch` + const epochs = Array.from(this.epochIndex.keys()) + .sort((a, b) => b - a) + .filter((e) => e <= maxEpoch); + for (const epoch of epochs) { + if (this.epochIndex.get(epoch)?.has(rootHex)) { + try { + const state = await this.getOrReload({rootHex, epoch}); + if (state) { + return state; + } + } catch (e) { + this.logger.debug("Error get or reload state", {epoch, rootHex}, e as Error); + } + } + } + return null; + } + + /** + * Update the precomputed checkpoint and return the number of his for the + * previous one (if any). + */ + updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null { + const previousHits = this.preComputedCheckpointHits; + this.preComputedCheckpoint = toCacheKey({rootHex, epoch}); + this.preComputedCheckpointHits = 0; + return previousHits; + } + + /** + * This is just to conform to the old implementation + */ + prune(): void { + // do nothing + } + + /** + * Prune all checkpoint states before the provided finalized epoch. + */ + pruneFinalized(finalizedEpoch: Epoch): void { + for (const epoch of this.epochIndex.keys()) { + if (epoch < finalizedEpoch) { + this.deleteAllEpochItems(epoch).catch((e) => + this.logger.debug("Error delete all epoch items", {epoch, finalizedEpoch}, e as Error) + ); + } + } + } + + /** + * After processing a block, prune from memory based on the view of that block. + * This is likely persist 1 state per epoch, at the last 1/3 of slot 0 of an epoch although it'll be called on every last 1/3 of slot. + * Given the following block b was processed with b2, b1, b0 are ancestors in epoch (n-2), (n-1), n respectively + * + * epoch: (n-2) (n-1) n (n+1) + * |-----------|-----------|-----------|-----------| + * ^ ^ ^ ^ + * | | | | + * block chain: b2---------->b1--------->b0-->b + * + * After processing block b, if maxEpochsInMemory is: + * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk + * - 1 then we'll persist {root: b2, epoch n-2} and {root: b1, epoch n-1} checkpoint state to disk + * - 0 then we'll persist {root: b2, epoch n-2} and {root: b1, epoch n-1} and {root: b0, epoch n} checkpoint state to disk + * - if any old epochs checkpoint states are persisted, no need to do it again + * + * Note that for each epoch there could be multiple checkpoint states, usually 2, one for Previous Root Checkpoint State and one for Current Root Checkpoint State. + * We normally only persist 1 checkpoint state per epoch, the one that could potentially be justified/finalized later based on the view of the block. + * Other checkpoint states are pruned from memory. + * + * This design also covers the reorg scenario. Given block c in the same epoch n where c.slot > b.slot, c is not descendant of b, and c is built on top of c0 + * instead of b0 (epoch (n - 1)) + * + * epoch: (n-2) (n-1) n (n+1) + * |-----------|-----------|-----------|-----------| + * ^ ^ ^ ^ ^ ^ + * | | | | | | + * block chain: b2---------->b1----->c0->b0-->b | + * ║ | + * ╚═══════════>c (reorg) + * + * After processing block c, if maxEpochsInMemory is: + * - 0 then we'll persist {root: c0, epoch: n} checkpoint state to disk. Note that regen should populate {root: c0, epoch: n} checkpoint state before. + * + * epoch: (n-1) n (n+1) + * |-------------------------------------------------------------|-------------------------------------------------------------| + * ^ ^ ^ ^ + * _______ | | | | + * | | | | | | + * | db |====== reload ======> {root: b1, epoch: n-1} cp state ======> c0 block state ======> {root: c0, epoch: n} cp state =====> c block state + * |_______| + * + * + * + * - 1 then we'll persist {root: b1, epoch n-1} checkpoint state to disk. Note that at epoch n there is both {root: b0, epoch: n} and {root: c0, epoch: n} checkpoint states in memory + * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk, there are also 2 checkpoint states in memory at epoch n, same to the above (maxEpochsInMemory=1) + * + * As of Nov 2023, it takes 1.3s to 1.5s to persist a state on holesky on fast server. TODO: + * - improve state serialization time + * - or research how to only store diff against the finalized state + */ + async processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise { + let persistCount = 0; + // it's important to sort the epochs in ascending order, in case of big reorg we always want to keep the most recent checkpoint states + const sortedEpochs = Array.from(this.epochIndex.keys()).sort((a, b) => a - b); + if (sortedEpochs.length <= this.maxEpochsInMemory) { + return 0; + } + + for (const lowestEpoch of sortedEpochs.slice(0, sortedEpochs.length - this.maxEpochsInMemory)) { + const epochBoundarySlot = computeStartSlotAtEpoch(lowestEpoch); + const epochBoundaryRoot = + epochBoundarySlot === state.slot ? fromHexString(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot); + const epochBoundaryHex = toHexString(epochBoundaryRoot); + + // for each epoch, usually there are 2 rootHex respective to the 2 checkpoint states: Previous Root Checkpoint State and Current Root Checkpoint State + for (const rootHex of this.epochIndex.get(lowestEpoch) ?? []) { + const cpKey = toCacheKey({epoch: lowestEpoch, rootHex}); + const cacheItem = this.cache.get(cpKey); + + if (cacheItem !== undefined && isInMemoryCacheItem(cacheItem)) { + // this is state in memory, we don't care if the checkpoint state is already persisted + let {persistedKey} = cacheItem; + const {state} = cacheItem; + const logMeta = { + stateSlot: state.slot, + rootHex, + epochBoundaryHex, + persistedKey: persistedKey ? toHexString(persistedKey) : "", + }; + + if (rootHex === epochBoundaryHex) { + if (persistedKey) { + // no need to persist + this.logger.verbose("Pruned checkpoint state from memory but no need to persist", logMeta); + } else { + // persist and do not update epochIndex + this.metrics?.statePersistSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + const timer = this.metrics?.statePersistDuration.startTimer(); + const cpPersist = {epoch: lowestEpoch, root: epochBoundaryRoot}; + persistedKey = await this.datastore.write(cpPersist, state); + timer?.(); + persistCount++; + this.logger.verbose("Pruned checkpoint state from memory and persisted to disk", { + ...logMeta, + persistedKey: toHexString(persistedKey), + }); + } + // overwrite cpKey, this means the state is deleted from memory + this.cache.set(cpKey, {type: CacheItemType.persisted, value: persistedKey}); + } else { + if (persistedKey) { + // persisted file will be eventually deleted by the archive task + // this also means the state is deleted from memory + this.cache.set(cpKey, {type: CacheItemType.persisted, value: persistedKey}); + // do not update epochIndex + } else { + // delete the state from memory + this.cache.delete(cpKey); + this.epochIndex.get(lowestEpoch)?.delete(rootHex); + } + this.metrics?.statePruneFromMemoryCount.inc(); + this.logger.verbose("Pruned checkpoint state from memory", logMeta); + } + } + } + } + + return persistCount; + } + + /** + * Find a seed state to reload the state of provided checkpoint. Based on the design of n-historical state: + * + * ╔════════════════════════════════════╗═══════════════╗ + * ║ persisted to db or fs ║ in memory ║ + * ║ reload if needed ║ ║ + * ║ -----------------------------------║---------------║ + * ║ epoch: (n-2) (n-1) ║ n (n+1) ║ + * ║ |-------|-------|----║--|-------|----║ + * ║ ^ ^ ║ ^ ^ ║ + * ║ ║ ^ ^ ║ + * ╚════════════════════════════════════╝═══════════════╝ + * + * we always reload an epoch in the past. We'll start with epoch n then (n+1) prioritizing ones with the same view of `reloadedCp`. + * + * This could return null and we should get head state in that case. + */ + findSeedStateToReload(reloadedCp: CheckpointHex): CachedBeaconStateAllForks | null { + const maxEpoch = Math.max(...Array.from(this.epochIndex.keys())); + const reloadedCpSlot = computeStartSlotAtEpoch(reloadedCp.epoch); + let firstState: CachedBeaconStateAllForks | null = null; + // no need to check epochs before `maxEpoch - this.maxEpochsInMemory + 1` before they are all persisted + for (let epoch = maxEpoch - this.maxEpochsInMemory + 1; epoch <= maxEpoch; epoch++) { + // if there's at least 1 state in memory in an epoch, just return the 1st one + if (firstState !== null) { + return firstState; + } + + for (const rootHex of this.epochIndex.get(epoch) || []) { + const cpKey = toCacheKey({rootHex, epoch}); + const cacheItem = this.cache.get(cpKey); + if (cacheItem === undefined) { + // should not happen + continue; + } + if (isInMemoryCacheItem(cacheItem)) { + const {state} = cacheItem; + if (firstState === null) { + firstState = state; + } + + // amongst states of the same epoch, choose the one with the same view of reloadedCp + if ( + reloadedCpSlot < state.slot && + toHexString(getBlockRootAtSlot(state, reloadedCpSlot)) === reloadedCp.rootHex + ) { + return state; + } + } + } + } + + return firstState; + } + + clear(): void { + this.cache.clear(); + this.epochIndex.clear(); + } + + /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */ + dumpSummary(): routes.lodestar.StateCacheItem[] { + return Array.from(this.cache.keys()).map((key) => { + const cp = fromCacheKey(key); + // TODO: add checkpoint key and persistent key to the summary + return { + slot: computeStartSlotAtEpoch(cp.epoch), + root: cp.rootHex, + reads: this.cache.readCount.get(key) ?? 0, + lastRead: this.cache.lastRead.get(key) ?? 0, + checkpointState: true, + }; + }); + } + + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ + dumpCheckpointKeys(): string[] { + return Array.from(this.cache.keys()); + } + + /** + * Delete all items of an epoch from disk and memory + */ + private async deleteAllEpochItems(epoch: Epoch): Promise { + let persistCount = 0; + const rootHexes = this.epochIndex.get(epoch) || []; + for (const rootHex of rootHexes) { + const key = toCacheKey({rootHex, epoch}); + const cacheItem = this.cache.get(key); + + if (cacheItem) { + const persistedKey = isPersistedCacheItem(cacheItem) ? cacheItem.value : cacheItem.persistedKey; + if (persistedKey) { + await this.datastore.remove(persistedKey); + persistCount++; + this.metrics?.persistedStateRemoveCount.inc(); + } + } + this.cache.delete(key); + } + this.epochIndex.delete(epoch); + this.logger.verbose("Pruned finalized checkpoints states for epoch", { + epoch, + persistCount, + rootHexes: Array.from(rootHexes).join(","), + }); + } +} + +function toCheckpointHex(checkpoint: phase0.Checkpoint): CheckpointHex { + return { + epoch: checkpoint.epoch, + rootHex: toHexString(checkpoint.root), + }; +} + +function toCacheKey(cp: CheckpointHex | phase0.Checkpoint): CacheKey { + if (isCheckpointHex(cp)) { + return `${cp.rootHex}_${cp.epoch}`; + } + return `${toHexString(cp.root)}_${cp.epoch}`; +} + +function fromCacheKey(key: CacheKey): CheckpointHex { + const [rootHex, epoch] = key.split("_"); + return { + rootHex, + epoch: Number(epoch), + }; +} + +function isCachedBeaconState( + stateOrBytes: CachedBeaconStateAllForks | LoadedStateBytesData +): stateOrBytes is CachedBeaconStateAllForks { + return (stateOrBytes as CachedBeaconStateAllForks).slot !== undefined; +} + +function isInMemoryCacheItem(cacheItem: CacheItem): cacheItem is InMemoryCacheItem { + return cacheItem.type === CacheItemType.inMemory; +} + +function isPersistedCacheItem(cacheItem: CacheItem): cacheItem is PersistedCacheItem { + return cacheItem.type === CacheItemType.persisted; +} + +function isCheckpointHex(cp: CheckpointHex | phase0.Checkpoint): cp is CheckpointHex { + return (cp as CheckpointHex).rootHex !== undefined; +} diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts index 44523abf799c..3a04c4f4a258 100644 --- a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts +++ b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts @@ -4,15 +4,16 @@ import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {MapTracker} from "./mapMetrics.js"; +import {BlockStateCache} from "./types.js"; const MAX_STATES = 3 * 32; /** - * In memory cache of CachedBeaconState - * - * Similar API to Repository + * Old implementation of StateCache + * - Prune per checkpoint so number of states ranges from 96 to 128 + * - Keep a separate head state to make sure it is always available */ -export class StateContextCache { +export class StateContextCache implements BlockStateCache { /** * Max number of states allowed in the cache */ diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts index 0cb48f0e2ded..a177db9b7c87 100644 --- a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts @@ -5,6 +5,7 @@ import {MapDef} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {MapTracker} from "./mapMetrics.js"; +import {CheckpointStateCache as CheckpointStateCacheInterface, CacheItemType} from "./types.js"; export type CheckpointHex = {epoch: Epoch; rootHex: RootHex}; const MAX_EPOCHS = 10; @@ -14,8 +15,9 @@ const MAX_EPOCHS = 10; * belonging to checkpoint * * Similar API to Repository + * TODO: rename to MemoryCheckpointStateCache in the next PR of n-historical states */ -export class CheckpointStateCache { +export class CheckpointStateCache implements CheckpointStateCacheInterface { private readonly cache: MapTracker; /** Epoch -> Set */ private readonly epochIndex = new MapDef>(() => new Set()); @@ -27,11 +29,32 @@ export class CheckpointStateCache { this.cache = new MapTracker(metrics?.cpStateCache); if (metrics) { this.metrics = metrics.cpStateCache; - metrics.cpStateCache.size.addCollect(() => metrics.cpStateCache.size.set(this.cache.size)); - metrics.cpStateCache.epochSize.addCollect(() => metrics.cpStateCache.epochSize.set(this.epochIndex.size)); + metrics.cpStateCache.size.addCollect(() => + metrics.cpStateCache.size.set({type: CacheItemType.inMemory}, this.cache.size) + ); + metrics.cpStateCache.epochSize.addCollect(() => + metrics.cpStateCache.epochSize.set({type: CacheItemType.inMemory}, this.epochIndex.size) + ); } } + async getOrReload(cp: CheckpointHex): Promise { + return this.get(cp); + } + + async getStateOrBytes(cp: CheckpointHex): Promise { + return this.get(cp); + } + + async getOrReloadLatest(rootHex: string, maxEpoch: number): Promise { + return this.getLatest(rootHex, maxEpoch); + } + + async processState(): Promise { + // do nothing, this class does not support prunning + return 0; + } + get(cp: CheckpointHex): CachedBeaconStateAllForks | null { this.metrics?.lookups.inc(); const cpKey = toCheckpointKey(cp); diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts new file mode 100644 index 000000000000..5867d7d356c1 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -0,0 +1,73 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {Epoch, RootHex, phase0} from "@lodestar/types"; +import {routes} from "@lodestar/api"; + +export type CheckpointHex = {epoch: Epoch; rootHex: RootHex}; + +/** + * Lodestar currently keeps two state caches around. + * + * 1. BlockStateCache is keyed by state root, and intended to keep extremely recent states around (eg: post states from the latest blocks) + * These states are most likely to be useful for state transition of new blocks. + * + * 2. CheckpointStateCache is keyed by checkpoint, and intended to keep states which have just undergone an epoch transition. + * These states are useful for gossip verification and for avoiding an epoch transition during state transition of first-in-epoch blocks + */ + +/** + * Store up to n recent block states. + * + * The cache key is state root + */ +export interface BlockStateCache { + get(rootHex: RootHex): CachedBeaconStateAllForks | null; + add(item: CachedBeaconStateAllForks): void; + setHeadState(item: CachedBeaconStateAllForks | null): void; + clear(): void; + size: number; + prune(headStateRootHex: RootHex): void; + deleteAllBeforeEpoch(finalizedEpoch: Epoch): void; + dumpSummary(): routes.lodestar.StateCacheItem[]; +} + +/** + * Store checkpoint states to preserve epoch transition, this helps lodestar run exactly 1 epoch transition per epoch in normal network conditions. + * + * There are 2 types of checkpoint states: + * + * - Previous Root Checkpoint State: where root is from previous epoch, this is added when we prepare for next slot, + * or to validate gossip block + * ``` + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * ``` + * + * - Current Root Checkpoint State: this is added when we process block slot 0 of epoch n, note that this block could + * be skipped so we don't always have this checkpoint state + * ``` + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * ``` + */ +export interface CheckpointStateCache { + init?: () => Promise; + getOrReload(cp: CheckpointHex): Promise; + getStateOrBytes(cp: CheckpointHex): Promise; + get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null; + add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void; + getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null; + getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise; + updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; + prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void; + pruneFinalized(finalizedEpoch: Epoch): void; + processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise; + clear(): void; + dumpSummary(): routes.lodestar.StateCacheItem[]; +} + +export enum CacheItemType { + persisted = "persisted", + inMemory = "in-memory", +} diff --git a/packages/beacon-node/src/chain/validation/attestation.ts b/packages/beacon-node/src/chain/validation/attestation.ts index 31e105911ab4..eae171631025 100644 --- a/packages/beacon-node/src/chain/validation/attestation.ts +++ b/packages/beacon-node/src/chain/validation/attestation.ts @@ -541,7 +541,7 @@ export function verifyHeadBlockAndTargetRoot( targetRoot: Root, attestationSlot: Slot, attestationEpoch: Epoch, - caller: string, + caller: RegenCaller, maxSkipSlots?: number ): ProtoBlock { const headBlock = verifyHeadBlockIsKnown(chain, beaconBlockRoot); diff --git a/packages/beacon-node/src/chain/validation/blobSidecar.ts b/packages/beacon-node/src/chain/validation/blobSidecar.ts index b5aab323c269..f1ea7bfa95c8 100644 --- a/packages/beacon-node/src/chain/validation/blobSidecar.ts +++ b/packages/beacon-node/src/chain/validation/blobSidecar.ts @@ -1,7 +1,7 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {deneb, Root, Slot} from "@lodestar/types"; -import {toHex} from "@lodestar/utils"; -import {getBlobProposerSignatureSet, computeStartSlotAtEpoch} from "@lodestar/state-transition"; +import {deneb, Root, Slot, ssz} from "@lodestar/types"; +import {toHex, verifyMerkleBranch} from "@lodestar/utils"; +import {computeStartSlotAtEpoch, getBlockHeaderProposerSignatureSet} from "@lodestar/state-transition"; +import {KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, KZG_COMMITMENT_SUBTREE_INDEX0} from "@lodestar/params"; import {BlobSidecarGossipError, BlobSidecarErrorCode} from "../errors/blobSidecarError.js"; import {GossipAction} from "../errors/gossipValidation.js"; @@ -11,13 +11,11 @@ import {IBeaconChain} from "../interface.js"; import {RegenCaller} from "../regen/index.js"; export async function validateGossipBlobSidecar( - config: ChainForkConfig, chain: IBeaconChain, - signedBlob: deneb.SignedBlobSidecar, + blobSidecar: deneb.BlobSidecar, gossipIndex: number ): Promise { - const blobSidecar = signedBlob.message; - const blobSlot = blobSidecar.slot; + const blobSlot = blobSidecar.signedBlockHeader.message.slot; // [REJECT] The sidecar is for the correct topic -- i.e. sidecar.index matches the topic {index}. if (blobSidecar.index !== gossipIndex) { @@ -58,9 +56,10 @@ export async function validateGossipBlobSidecar( // reboot if the `observed_block_producers` cache is empty. In that case, without this // check, we will load the parent and state from disk only to find out later that we // already know this block. - const blockRoot = toHex(blobSidecar.blockRoot); - if (chain.forkChoice.getBlockHex(blockRoot) !== null) { - throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockRoot}); + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); + const blockHex = toHex(blockRoot); + if (chain.forkChoice.getBlockHex(blockHex) !== null) { + throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockHex}); } // TODO: freetheblobs - check for badblock @@ -69,7 +68,7 @@ export async function validateGossipBlobSidecar( // _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both // gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is // retrieved). - const parentRoot = toHex(blobSidecar.blockParentRoot); + const parentRoot = toHex(blobSidecar.signedBlockHeader.message.parentRoot); const parentBlock = chain.forkChoice.getBlockHex(parentRoot); if (parentBlock === null) { // If fork choice does *not* consider the parent to be a descendant of the finalized block, @@ -97,18 +96,16 @@ export async function validateGossipBlobSidecar( // getBlockSlotState also checks for whether the current finalized checkpoint is an ancestor of the block. // As a result, we throw an IGNORE (whereas the spec says we should REJECT for this scenario). // this is something we should change this in the future to make the code airtight to the spec. - // _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both - // gossip and non-gossip sources) // _[REJECT]_ The blob's block's parent (defined by `sidecar.block_parent_root`) passes validation - // The above validation will happen while importing + // [IGNORE] The block's parent (defined by block.parent_root) has been seen (via both gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is retrieved). + // [REJECT] The block's parent (defined by block.parent_root) passes validation. const blockState = await chain.regen - .getBlockSlotState(parentRoot, blobSlot, {dontTransferCache: true}, RegenCaller.validateGossipBlob) + .getBlockSlotState(parentRoot, blobSlot, {dontTransferCache: true}, RegenCaller.validateGossipBlock) .catch(() => { throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.PARENT_UNKNOWN, parentRoot}); }); - // _[REJECT]_ The proposer signature, `signed_blob_sidecar.signature`, is valid with respect to the - // `sidecar.proposer_index` pubkey. - const signatureSet = getBlobProposerSignatureSet(blockState, signedBlob); + // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey. + const signatureSet = getBlockHeaderProposerSignatureSet(blockState, blobSidecar.signedBlockHeader); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { throw new BlobSidecarGossipError(GossipAction.REJECT, { @@ -116,6 +113,15 @@ export async function validateGossipBlobSidecar( }); } + // verify if the blob inclusion proof is correct + if (!validateInclusionProof(blobSidecar)) { + throw new BlobSidecarGossipError(GossipAction.REJECT, { + code: BlobSidecarErrorCode.INCLUSION_PROOF_INVALID, + slot: blobSidecar.signedBlockHeader.message.slot, + blobIdx: blobSidecar.index, + }); + } + // _[IGNORE]_ The sidecar is the only sidecar with valid signature received for the tuple // `(sidecar.block_root, sidecar.index)` // @@ -127,7 +133,7 @@ export async function validateGossipBlobSidecar( // If the `proposer_index` cannot immediately be verified against the expected shuffling, the sidecar // MAY be queued for later processing while proposers for the block's branch are calculated -- in such // a case _do not_ `REJECT`, instead `IGNORE` this message. - const proposerIndex = blobSidecar.proposerIndex; + const proposerIndex = blobSidecar.signedBlockHeader.message.proposerIndex; if (blockState.epochCtx.getBeaconProposer(blobSlot) !== proposerIndex) { throw new BlobSidecarGossipError(GossipAction.REJECT, { code: BlobSidecarErrorCode.INCORRECT_PROPOSER, @@ -168,16 +174,18 @@ export function validateBlobSidecars( const proofs = []; for (let index = 0; index < blobSidecars.length; index++) { const blobSidecar = blobSidecars[index]; + const blobBlockHeader = blobSidecar.signedBlockHeader.message; + const blobBlockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobBlockHeader); if ( - blobSidecar.slot !== blockSlot || - !byteArrayEquals(blobSidecar.blockRoot, blockRoot) || + blobBlockHeader.slot !== blockSlot || + !byteArrayEquals(blobBlockRoot, blockRoot) || blobSidecar.index !== index || !byteArrayEquals(expectedKzgCommitments[index], blobSidecar.kzgCommitment) ) { throw new Error( - `Invalid blob with slot=${blobSidecar.slot} blockRoot=${toHex(blockRoot)} index=${ + `Invalid blob with slot=${blobBlockHeader.slot} blobBlockRoot=${toHex(blobBlockRoot)} index=${ blobSidecar.index - } for the block root=${toHex(blockRoot)} slot=${blockSlot} index=${index}` + } for the block blockRoot=${toHex(blockRoot)} slot=${blockSlot} index=${index}` ); } blobs.push(blobSidecar.blob); @@ -207,3 +215,13 @@ function validateBlobsAndProofs( throw Error("Invalid verifyBlobKzgProofBatch"); } } + +function validateInclusionProof(blobSidecar: deneb.BlobSidecar): boolean { + return verifyMerkleBranch( + ssz.deneb.KZGCommitment.hashTreeRoot(blobSidecar.kzgCommitment), + blobSidecar.kzgCommitmentInclusionProof, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, + KZG_COMMITMENT_SUBTREE_INDEX0 + blobSidecar.index, + blobSidecar.signedBlockHeader.message.bodyRoot + ); +} diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 58b99f2a37e0..07cc47fa54d8 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -21,6 +21,7 @@ import { BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; +import {CheckpointStateRepository} from "./repositories/checkpointState.js"; export type BeaconDbModules = { config: ChainForkConfig; @@ -35,6 +36,7 @@ export class BeaconDb implements IBeaconDb { blobSidecarsArchive: BlobSidecarsArchiveRepository; stateArchive: StateArchiveRepository; + checkpointState: CheckpointStateRepository; voluntaryExit: VoluntaryExitRepository; proposerSlashing: ProposerSlashingRepository; @@ -67,6 +69,7 @@ export class BeaconDb implements IBeaconDb { this.blobSidecarsArchive = new BlobSidecarsArchiveRepository(config, db); this.stateArchive = new StateArchiveRepository(config, db); + this.checkpointState = new CheckpointStateRepository(config, db); this.voluntaryExit = new VoluntaryExitRepository(config, db); this.blsToExecutionChange = new BLSToExecutionChangeRepository(config, db); this.proposerSlashing = new ProposerSlashingRepository(config, db); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 1a3abfa33623..9dffd0608d52 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -28,6 +28,8 @@ export enum Bucket { phase0_proposerSlashing = 14, // ValidatorIndex -> ProposerSlashing phase0_attesterSlashing = 15, // Root -> AttesterSlashing capella_blsToExecutionChange = 16, // ValidatorIndex -> SignedBLSToExecutionChange + // checkpoint states + allForks_checkpointState = 17, // Root -> allForks.BeaconState // allForks_pendingBlock = 25, // Root -> SignedBeaconBlock // DEPRECATED on v0.30.0 phase0_depositEvent = 19, // depositIndex -> DepositEvent diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index 58bf25c57aa7..6ffb8992f635 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -19,6 +19,7 @@ import { BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; +import {CheckpointStateRepository} from "./repositories/checkpointState.js"; /** * The DB service manages the data layer of the beacon chain @@ -36,6 +37,8 @@ export interface IBeaconDb { // finalized states stateArchive: StateArchiveRepository; + // checkpoint states + checkpointState: CheckpointStateRepository; // op pool voluntaryExit: VoluntaryExitRepository; diff --git a/packages/beacon-node/src/db/repositories/blobSidecars.ts b/packages/beacon-node/src/db/repositories/blobSidecars.ts index 576a03df9e61..e5750ed31b58 100644 --- a/packages/beacon-node/src/db/repositories/blobSidecars.ts +++ b/packages/beacon-node/src/db/repositories/blobSidecars.ts @@ -2,6 +2,7 @@ import {ValueOf, ContainerType} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {Db, Repository} from "@lodestar/db"; import {ssz} from "@lodestar/types"; + import {Bucket, getBucketNameByValue} from "../buckets.js"; export const blobSidecarsWrapperSsz = new ContainerType( @@ -14,10 +15,7 @@ export const blobSidecarsWrapperSsz = new ContainerType( ); export type BlobSidecarsWrapper = ValueOf; - export const BLOB_SIDECARS_IN_WRAPPER_INDEX = 44; -// ssz.deneb.BlobSidecars.elementType.fixedSize; -export const BLOBSIDECAR_FIXED_SIZE = 131256; /** * blobSidecarsWrapper by block root (= hash_tree_root(SignedBeaconBlock.message)) diff --git a/packages/beacon-node/src/db/repositories/checkpointState.ts b/packages/beacon-node/src/db/repositories/checkpointState.ts new file mode 100644 index 000000000000..8848f4d26d3a --- /dev/null +++ b/packages/beacon-node/src/db/repositories/checkpointState.ts @@ -0,0 +1,31 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {ssz} from "@lodestar/types"; +import {Bucket, getBucketNameByValue} from "../buckets.js"; + +/** + * Store temporary checkpoint states. + * We should only put/get binary data from this repository, consumer will load it into an existing state ViewDU object. + */ +export class CheckpointStateRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + // Pick some type but won't be used. Casted to any because no type can match `BeaconStateAllForks` + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + const type = ssz.phase0.BeaconState as any; + const bucket = Bucket.allForks_checkpointState; + super(config, db, bucket, type, getBucketNameByValue(bucket)); + } + + getId(): Uint8Array { + throw Error("CheckpointStateRepository does not work with value"); + } + + encodeValue(): Uint8Array { + throw Error("CheckpointStateRepository does not work with value"); + } + + decodeValue(): BeaconStateAllForks { + throw Error("CheckpointStateRepository does not work with value"); + } +} diff --git a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts b/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts index 3a1b4ddb0ce1..faa4e310e10a 100644 --- a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts +++ b/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts @@ -1,8 +1,7 @@ import {EventEmitter} from "events"; import StrictEventEmitter from "strict-event-emitter-types"; import {fetch} from "@lodestar/api"; -import {ErrorAborted, TimeoutError, isValidHttpUrl, retry} from "@lodestar/utils"; -import {IGauge, IHistogram} from "../../metrics/interface.js"; +import {ErrorAborted, Gauge, Histogram, TimeoutError, isValidHttpUrl, retry} from "@lodestar/utils"; import {IJson, RpcPayload} from "../interface.js"; import {JwtClaim, encodeJwtToken} from "./jwt.js"; @@ -58,13 +57,13 @@ export type ReqOpts = { }; export type JsonRpcHttpClientMetrics = { - requestTime: IHistogram<"routeId">; - streamTime: IHistogram<"routeId">; - requestErrors: IGauge<"routeId">; - requestUsedFallbackUrl: IGauge<"routeId">; - activeRequests: IGauge<"routeId">; - configUrlsCount: IGauge; - retryCount: IGauge<"routeId">; + requestTime: Histogram<{routeId: string}>; + streamTime: Histogram<{routeId: string}>; + requestErrors: Gauge<{routeId: string}>; + requestUsedFallbackUrl: Gauge<{routeId: string}>; + activeRequests: Gauge<{routeId: string}>; + configUrlsCount: Gauge; + retryCount: Gauge<{routeId: string}>; }; export interface IJsonRpcHttpClient { diff --git a/packages/beacon-node/src/execution/builder/http.ts b/packages/beacon-node/src/execution/builder/http.ts index 20b7d4751c81..c47e8471f199 100644 --- a/packages/beacon-node/src/execution/builder/http.ts +++ b/packages/beacon-node/src/execution/builder/http.ts @@ -1,10 +1,6 @@ import {byteArrayEquals, toHexString} from "@chainsafe/ssz"; import {allForks, bellatrix, Slot, Root, BLSPubkey, ssz, deneb, Wei} from "@lodestar/types"; -import { - parseSignedBlindedBlockOrContents, - parseExecutionPayloadAndBlobsBundle, - reconstructFullBlockOrContents, -} from "@lodestar/state-transition"; +import {parseExecutionPayloadAndBlobsBundle, reconstructFullBlockOrContents} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {Logger} from "@lodestar/logger"; import {getClient, Api as BuilderApi} from "@lodestar/api/builder"; @@ -110,26 +106,23 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blindedBlobsBundle?: deneb.BlindedBlobsBundle; + blobKzgCommitments?: deneb.BlobKzgCommitments; }> { const res = await this.api.getHeader(slot, parentHash, proposerPubKey); ApiError.assert(res, "execution.builder.getheader"); const {header, value: executionPayloadValue} = res.response.data.message; - const {blindedBlobsBundle} = res.response.data.message as deneb.BuilderBid; - return {header, executionPayloadValue, blindedBlobsBundle}; + const {blobKzgCommitments} = res.response.data.message as deneb.BuilderBid; + return {header, executionPayloadValue, blobKzgCommitments}; } async submitBlindedBlock( - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents + signedBlindedBlock: allForks.SignedBlindedBeaconBlock ): Promise { - const res = await this.api.submitBlindedBlock(signedBlindedBlockOrContents); + const res = await this.api.submitBlindedBlock(signedBlindedBlock); ApiError.assert(res, "execution.builder.submitBlindedBlock"); const {data} = res.response; const {executionPayload, blobsBundle} = parseExecutionPayloadAndBlobsBundle(data); - const {signedBlindedBlock, signedBlindedBlobSidecars} = - parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents); - // some validations for execution payload const expectedTransactionsRoot = signedBlindedBlock.message.body.executionPayloadHeader.transactionsRoot; const actualTransactionsRoot = ssz.bellatrix.Transactions.hashTreeRoot(executionPayload.transactions); @@ -141,7 +134,7 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { ); } - const blobs = blobsBundle ? blobsBundle.blobs : null; - return reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs}); + const contents = blobsBundle ? {blobs: blobsBundle.blobs, kzgProofs: blobsBundle.proofs} : null; + return reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents}); } } diff --git a/packages/beacon-node/src/execution/builder/interface.ts b/packages/beacon-node/src/execution/builder/interface.ts index e9a2cabb69ef..8754a3616610 100644 --- a/packages/beacon-node/src/execution/builder/interface.ts +++ b/packages/beacon-node/src/execution/builder/interface.ts @@ -25,9 +25,7 @@ export interface IExecutionBuilder { ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blindedBlobsBundle?: deneb.BlindedBlobsBundle; + blobKzgCommitments?: deneb.BlobKzgCommitments; }>; - submitBlindedBlock( - signedBlock: allForks.SignedBlindedBeaconBlockOrContents - ): Promise; + submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise; } diff --git a/packages/beacon-node/src/execution/engine/http.ts b/packages/beacon-node/src/execution/engine/http.ts index 70df97ba1e4a..91ceabaf2770 100644 --- a/packages/beacon-node/src/execution/engine/http.ts +++ b/packages/beacon-node/src/execution/engine/http.ts @@ -363,7 +363,12 @@ export class ExecutionEngineHttp implements IExecutionEngine { async getPayload( fork: ForkName, payloadId: PayloadId - ): Promise<{executionPayload: allForks.ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle}> { + ): Promise<{ + executionPayload: allForks.ExecutionPayload; + executionPayloadValue: Wei; + blobsBundle?: BlobsBundle; + shouldOverrideBuilder?: boolean; + }> { const method = ForkSeq[fork] >= ForkSeq.deneb ? "engine_getPayloadV3" diff --git a/packages/beacon-node/src/execution/engine/interface.ts b/packages/beacon-node/src/execution/engine/interface.ts index 9a7ee3963379..e5f612fc0965 100644 --- a/packages/beacon-node/src/execution/engine/interface.ts +++ b/packages/beacon-node/src/execution/engine/interface.ts @@ -136,7 +136,12 @@ export interface IExecutionEngine { getPayload( fork: ForkName, payloadId: PayloadId - ): Promise<{executionPayload: allForks.ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle}>; + ): Promise<{ + executionPayload: allForks.ExecutionPayload; + executionPayloadValue: Wei; + blobsBundle?: BlobsBundle; + shouldOverrideBuilder?: boolean; + }>; getPayloadBodiesByHash(blockHash: DATA[]): Promise<(ExecutionPayloadBody | null)[]>; diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts index 83a5ea3a7ed6..5779713435a5 100644 --- a/packages/beacon-node/src/execution/engine/mock.ts +++ b/packages/beacon-node/src/execution/engine/mock.ts @@ -1,5 +1,4 @@ import crypto from "node:crypto"; -import {kzgCommitmentToVersionedHash} from "@lodestar/state-transition"; import {bellatrix, deneb, RootHex, ssz} from "@lodestar/types"; import {fromHex, toHex} from "@lodestar/utils"; import { @@ -12,6 +11,7 @@ import { } from "@lodestar/params"; import {ZERO_HASH_HEX} from "../../constants/index.js"; import {ckzg} from "../../util/kzg.js"; +import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {quantityToNum} from "../../eth1/provider/utils.js"; import { EngineApiRpcParamTypes, diff --git a/packages/beacon-node/src/execution/engine/types.ts b/packages/beacon-node/src/execution/engine/types.ts index 4f24480e0b96..72a0100f7a51 100644 --- a/packages/beacon-node/src/execution/engine/types.ts +++ b/packages/beacon-node/src/execution/engine/types.ts @@ -107,6 +107,7 @@ type ExecutionPayloadRpcWithValue = { // even though CL tracks this as executionPayloadValue, EL returns this as blockValue blockValue: QUANTITY; blobsBundle?: BlobsBundleRpc; + shouldOverrideBuilder?: boolean; }; type ExecutionPayloadResponse = ExecutionPayloadRpc | ExecutionPayloadRpcWithValue; @@ -207,19 +208,28 @@ export function hasPayloadValue(response: ExecutionPayloadResponse): response is export function parseExecutionPayload( fork: ForkName, response: ExecutionPayloadResponse -): {executionPayload: allForks.ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle} { +): { + executionPayload: allForks.ExecutionPayload; + executionPayloadValue: Wei; + blobsBundle?: BlobsBundle; + shouldOverrideBuilder?: boolean; +} { let data: ExecutionPayloadRpc; let executionPayloadValue: Wei; let blobsBundle: BlobsBundle | undefined; + let shouldOverrideBuilder: boolean; + if (hasPayloadValue(response)) { executionPayloadValue = quantityToBigint(response.blockValue); data = response.executionPayload; blobsBundle = response.blobsBundle ? parseBlobsBundle(response.blobsBundle) : undefined; + shouldOverrideBuilder = response.shouldOverrideBuilder ?? false; } else { data = response; // Just set it to zero as default executionPayloadValue = BigInt(0); blobsBundle = undefined; + shouldOverrideBuilder = false; } const executionPayload = { @@ -269,7 +279,7 @@ export function parseExecutionPayload( (executionPayload as deneb.ExecutionPayload).excessBlobGas = quantityToBigint(excessBlobGas); } - return {executionPayload, executionPayloadValue, blobsBundle}; + return {executionPayload, executionPayloadValue, blobsBundle, shouldOverrideBuilder}; } export function serializePayloadAttributes(data: PayloadAttributes): PayloadAttributesRpc { diff --git a/packages/beacon-node/src/metrics/index.ts b/packages/beacon-node/src/metrics/index.ts index fb2781333d66..a56591a04090 100644 --- a/packages/beacon-node/src/metrics/index.ts +++ b/packages/beacon-node/src/metrics/index.ts @@ -1,5 +1,4 @@ export * from "./metrics.js"; export * from "./server/index.js"; -export * from "./interface.js"; export * from "./nodeJsMetrics.js"; export {RegistryMetricCreator} from "./utils/registryMetricCreator.js"; diff --git a/packages/beacon-node/src/metrics/interface.ts b/packages/beacon-node/src/metrics/interface.ts deleted file mode 100644 index 2e2a267ca13c..000000000000 --- a/packages/beacon-node/src/metrics/interface.ts +++ /dev/null @@ -1,14 +0,0 @@ -import {Gauge, Histogram} from "prom-client"; - -type CollectFn = (metric: IGauge) => void; - -export type IGauge = Pick, "inc" | "dec" | "set"> & { - addCollect: (collectFn: CollectFn) => void; -}; - -export type IHistogram = Pick, "observe" | "startTimer">; - -export type IAvgMinMax = { - addGetValuesFn(getValuesFn: () => number[]): void; - set(values: number[]): void; -}; diff --git a/packages/beacon-node/src/metrics/metrics/beacon.ts b/packages/beacon-node/src/metrics/metrics/beacon.ts index 8d9094f19a25..9366174ef6c6 100644 --- a/packages/beacon-node/src/metrics/metrics/beacon.ts +++ b/packages/beacon-node/src/metrics/metrics/beacon.ts @@ -1,4 +1,6 @@ +import {ProducedBlockSource} from "@lodestar/types"; import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; +import {BlockProductionStep, PayloadPreparationType} from "../../chain/produceBlock/index.js"; export type BeaconMetrics = ReturnType; @@ -46,7 +48,7 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { // Additional Metrics // TODO: Implement - currentValidators: register.gauge<"status">({ + currentValidators: register.gauge<{status: string}>({ name: "beacon_current_validators", labelNames: ["status"], help: "number of validators in current epoch", @@ -115,55 +117,35 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { buckets: [1, 2, 3, 5, 7, 10, 20, 30, 50, 100], }), - blockProductionTime: register.histogram<"source">({ + blockProductionTime: register.histogram<{source: ProducedBlockSource}>({ name: "beacon_block_production_seconds", help: "Full runtime of block production", buckets: [0.1, 1, 2, 4, 10], labelNames: ["source"], }), - executionBlockProductionTimeSteps: register.histogram<"step">({ + executionBlockProductionTimeSteps: register.histogram<{step: BlockProductionStep}>({ name: "beacon_block_production_execution_steps_seconds", help: "Detailed steps runtime of execution block production", buckets: [0.01, 0.1, 0.2, 0.5, 1], - /** - * - proposerSlashing - * - attesterSlashings - * - voluntaryExits - * - blsToExecutionChanges - * - attestations - * - eth1DataAndDeposits - * - syncAggregate - * - executionPayload - */ labelNames: ["step"], }), - builderBlockProductionTimeSteps: register.histogram<"step">({ + builderBlockProductionTimeSteps: register.histogram<{step: BlockProductionStep}>({ name: "beacon_block_production_builder_steps_seconds", help: "Detailed steps runtime of builder block production", buckets: [0.01, 0.1, 0.2, 0.5, 1], - /** - * - proposerSlashing - * - attesterSlashings - * - voluntaryExits - * - blsToExecutionChanges - * - attestations - * - eth1DataAndDeposits - * - syncAggregate - * - executionPayload - */ labelNames: ["step"], }), - blockProductionRequests: register.gauge<"source">({ + blockProductionRequests: register.gauge<{source: ProducedBlockSource}>({ name: "beacon_block_production_requests_total", help: "Count of all block production requests", labelNames: ["source"], }), - blockProductionSuccess: register.gauge<"source">({ + blockProductionSuccess: register.gauge<{source: ProducedBlockSource}>({ name: "beacon_block_production_successes_total", help: "Count of blocks successfully produced", labelNames: ["source"], }), - blockProductionNumAggregated: register.histogram<"source">({ + blockProductionNumAggregated: register.histogram<{source: ProducedBlockSource}>({ name: "beacon_block_production_num_aggregated_total", help: "Count of all aggregated attestations in our produced block", buckets: [32, 64, 96, 128], @@ -173,34 +155,30 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { blockProductionCaches: { producedBlockRoot: register.gauge({ name: "beacon_blockroot_produced_cache_total", - help: "Count of cached produded block roots", + help: "Count of cached produced block roots", }), producedBlindedBlockRoot: register.gauge({ name: "beacon_blinded_blockroot_produced_cache_total", - help: "Count of cached produded blinded block roots", + help: "Count of cached produced blinded block roots", }), - producedBlobSidecarsCache: register.gauge({ - name: "beacon_blobsidecars_produced_cache_total", - help: "Count of cached produced blob sidecars", - }), - producedBlindedBlobSidecarsCache: register.gauge({ - name: "beacon_blinded_blobsidecars_produced_cache_total", - help: "Count of cached produced blinded blob sidecars", + producedContentsCache: register.gauge({ + name: "beacon_contents_produced_cache_total", + help: "Count of cached produced blob contents", }), }, blockPayload: { payloadAdvancePrepTime: register.histogram({ name: "beacon_block_payload_prepare_time", - help: "Time for perparing payload in advance", + help: "Time for preparing payload in advance", buckets: [0.1, 1, 3, 5, 10], }), - payloadFetchedTime: register.histogram<"prepType">({ + payloadFetchedTime: register.histogram<{prepType: PayloadPreparationType}>({ name: "beacon_block_payload_fetched_time", help: "Time to fetch the payload from EL", labelNames: ["prepType"], }), - emptyPayloads: register.gauge<"prepType">({ + emptyPayloads: register.gauge<{prepType: PayloadPreparationType}>({ name: "beacon_block_payload_empty_total", help: "Count of payload with empty transactions", labelNames: ["prepType"], diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index 8a22fe8f0a9b..f6b143913346 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -1,6 +1,22 @@ +import {EpochTransitionStep, StateCloneSource, StateHashTreeRootSource} from "@lodestar/state-transition"; import {allForks} from "@lodestar/types"; -import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; +import {BlockSource} from "../../chain/blocks/types.js"; +import {JobQueueItemType} from "../../chain/bls/index.js"; +import {BlockErrorCode} from "../../chain/errors/index.js"; +import {InsertOutcome} from "../../chain/opPools/types.js"; +import {RegenCaller, RegenFnName} from "../../chain/regen/interface.js"; +import {ReprocessStatus} from "../../chain/reprocess.js"; +import {RejectReason} from "../../chain/seenCache/seenAttestationData.js"; +import {ExecutionPayloadStatus} from "../../execution/index.js"; +import {GossipType} from "../../network/index.js"; +import {CannotAcceptWorkReason, ReprocessRejectReason} from "../../network/processor/index.js"; +import {BackfillSyncMethod} from "../../sync/backfill/backfill.js"; +import {PendingBlockType} from "../../sync/interface.js"; +import {PeerSyncType, RangeSyncType} from "../../sync/utils/remoteSyncType.js"; import {LodestarMetadata} from "../options.js"; +import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; +import {OpSource} from "../validatorMonitor.js"; +import {CacheItemType} from "../../chain/stateCache/types.js"; export type LodestarMetrics = ReturnType; @@ -14,7 +30,7 @@ export function createLodestarMetrics( anchorState?: Pick ) { if (metadata) { - register.static({ + register.static({ name: "lodestar_version", help: "Lodestar version", value: metadata, @@ -33,34 +49,34 @@ export function createLodestarMetrics( return { gossipValidationQueue: { - length: register.gauge<"topic">({ + length: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_length", help: "Count of total gossip validation queue length", labelNames: ["topic"], }), - keySize: register.gauge<"topic">({ + keySize: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_key_size", help: "Count of total gossip validation queue key size", labelNames: ["topic"], }), - droppedJobs: register.gauge<"topic">({ + droppedJobs: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_dropped_jobs_total", help: "Count of total gossip validation queue dropped jobs", labelNames: ["topic"], }), - jobTime: register.histogram<"topic">({ + jobTime: register.histogram<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_job_time_seconds", help: "Time to process gossip validation queue job in seconds", labelNames: ["topic"], buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], }), - jobWaitTime: register.histogram<"topic">({ + jobWaitTime: register.histogram<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_job_wait_time_seconds", help: "Time from job added to the queue to starting the job in seconds", labelNames: ["topic"], buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], }), - concurrency: register.gauge<"topic">({ + concurrency: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_concurrency", help: "Current count of jobs being run on network processor for topic", labelNames: ["topic"], @@ -79,22 +95,22 @@ export function createLodestarMetrics( }, networkProcessor: { - gossipValidationAccept: register.gauge<"topic">({ + gossipValidationAccept: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_accept_total", help: "Count of total gossip validation accept", labelNames: ["topic"], }), - gossipValidationIgnore: register.gauge<"topic">({ + gossipValidationIgnore: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_ignore_total", help: "Count of total gossip validation ignore", labelNames: ["topic"], }), - gossipValidationReject: register.gauge<"topic">({ + gossipValidationReject: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_reject_total", help: "Count of total gossip validation reject", labelNames: ["topic"], }), - gossipValidationError: register.gauge<"topic" | "error">({ + gossipValidationError: register.gauge<{topic: GossipType; error: string}>({ name: "lodestar_gossip_validation_error_total", help: "Count of total gossip validation errors detailed", labelNames: ["topic", "error"], @@ -108,7 +124,7 @@ export function createLodestarMetrics( help: "Total calls to network processor execute work fn", buckets: [0, 1, 5, 128], }), - canNotAcceptWork: register.gauge<"reason">({ + canNotAcceptWork: register.gauge<{reason: CannotAcceptWorkReason}>({ name: "lodestar_network_processor_can_not_accept_work_total", help: "Total times network processor can not accept work on executeWork", labelNames: ["reason"], @@ -121,7 +137,7 @@ export function createLodestarMetrics( help: "Current count of pending items in reqRespBridgeReqCaller data structure", }), }, - networkWorkerWireEventsOnMainThreadLatency: register.histogram<"eventName">({ + networkWorkerWireEventsOnMainThreadLatency: register.histogram<{eventName: string}>({ name: "lodestar_network_worker_wire_events_on_main_thread_latency_seconds", help: "Latency in seconds to transmit network events to main thread across worker port", labelNames: ["eventName"], @@ -206,19 +222,19 @@ export function createLodestarMetrics( }, apiRest: { - responseTime: register.histogram<"operationId">({ + responseTime: register.histogram<{operationId: string}>({ name: "lodestar_api_rest_response_time_seconds", help: "REST API time to fulfill a request by operationId", labelNames: ["operationId"], // Request times range between 1ms to 100ms in normal conditions. Can get to 1-5 seconds if overloaded buckets: [0.01, 0.1, 1], }), - requests: register.gauge<"operationId">({ + requests: register.gauge<{operationId: string}>({ name: "lodestar_api_rest_requests_total", help: "REST API total count requests by operationId", labelNames: ["operationId"], }), - errors: register.gauge<"operationId">({ + errors: register.gauge<{operationId: string}>({ name: "lodestar_api_rest_errors_total", help: "REST API total count of errors by operationId", labelNames: ["operationId"], @@ -286,7 +302,7 @@ export function createLodestarMetrics( help: "Time to call commit after process a single epoch transition in seconds", buckets: [0.01, 0.05, 0.1, 0.2, 0.5, 0.75, 1], }), - epochTransitionStepTime: register.histogram<"step">({ + epochTransitionStepTime: register.histogram<{step: EpochTransitionStep}>({ name: "lodestar_stfn_epoch_transition_step_seconds", help: "Time to call each step of epoch transition in seconds", labelNames: ["step"], @@ -304,28 +320,28 @@ export function createLodestarMetrics( help: "Time to call commit after process a single block in seconds", buckets: [0.005, 0.01, 0.02, 0.05, 0.1, 1], }), - stateHashTreeRootTime: register.histogram<"source">({ + stateHashTreeRootTime: register.histogram<{source: StateHashTreeRootSource}>({ name: "lodestar_stfn_hash_tree_root_seconds", help: "Time to compute the hash tree root of a post state in seconds", buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5], labelNames: ["source"], }), - preStateBalancesNodesPopulatedMiss: register.gauge<"source">({ + preStateBalancesNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_balances_nodes_populated_miss_total", help: "Total count state.balances nodesPopulated is false on stfn", labelNames: ["source"], }), - preStateBalancesNodesPopulatedHit: register.gauge<"source">({ + preStateBalancesNodesPopulatedHit: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_balances_nodes_populated_hit_total", help: "Total count state.balances nodesPopulated is true on stfn", labelNames: ["source"], }), - preStateValidatorsNodesPopulatedMiss: register.gauge<"source">({ + preStateValidatorsNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_validators_nodes_populated_miss_total", help: "Total count state.validators nodesPopulated is false on stfn", labelNames: ["source"], }), - preStateValidatorsNodesPopulatedHit: register.gauge<"source">({ + preStateValidatorsNodesPopulatedHit: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_validators_nodes_populated_hit_total", help: "Total count state.validators nodesPopulated is true on stfn", labelNames: ["source"], @@ -362,7 +378,7 @@ export function createLodestarMetrics( }, blsThreadPool: { - jobsWorkerTime: register.gauge<"workerId">({ + jobsWorkerTime: register.gauge<{workerId: number}>({ name: "lodestar_bls_thread_pool_time_seconds_sum", help: "Total time spent verifying signature sets measured on the worker", labelNames: ["workerId"], @@ -371,7 +387,7 @@ export function createLodestarMetrics( name: "lodestar_bls_thread_pool_success_jobs_signature_sets_count", help: "Count of total verified signature sets", }), - errorAggregateSignatureSetsCount: register.gauge<"type">({ + errorAggregateSignatureSetsCount: register.gauge<{type: JobQueueItemType}>({ name: "lodestar_bls_thread_pool_error_aggregate_signature_sets_count", help: "Count of error when aggregating pubkeys or signatures", labelNames: ["type"], @@ -397,12 +413,12 @@ export function createLodestarMetrics( name: "lodestar_bls_thread_pool_job_groups_started_total", help: "Count of total jobs groups started in bls thread pool, job groups include +1 jobs", }), - totalJobsStarted: register.gauge<"type">({ + totalJobsStarted: register.gauge<{type: JobQueueItemType}>({ name: "lodestar_bls_thread_pool_jobs_started_total", help: "Count of total jobs started in bls thread pool, jobs include +1 signature sets", labelNames: ["type"], }), - totalSigSetsStarted: register.gauge<"type">({ + totalSigSetsStarted: register.gauge<{type: JobQueueItemType}>({ name: "lodestar_bls_thread_pool_sig_sets_started_total", help: "Count of total signature sets started in bls thread pool, sig sets include 1 pk, msg, sig", labelNames: ["type"], @@ -460,9 +476,15 @@ export function createLodestarMetrics( name: "lodestar_bls_thread_pool_batchable_sig_sets_total", help: "Count of total batchable signature sets", }), - signatureDeserializationMainThreadDuration: register.gauge({ + signatureDeserializationMainThreadDuration: register.histogram({ name: "lodestar_bls_thread_pool_signature_deserialization_main_thread_time_seconds", help: "Total time spent deserializing signatures on main thread", + buckets: [0.001, 0.005, 0.01, 0.1], + }), + pubkeysAggregationMainThreadDuration: register.histogram({ + name: "lodestar_bls_thread_pool_pubkeys_aggregation_main_thread_time_seconds", + help: "Total time spent aggregating pubkeys on main thread", + buckets: [0.001, 0.005, 0.01, 0.1], }), }, @@ -487,29 +509,29 @@ export function createLodestarMetrics( name: "lodestar_sync_status", help: "Range sync status: [Stalled, SyncingFinalized, SyncingHead, Synced]", }), - syncPeersBySyncType: register.gauge<"syncType">({ + syncPeersBySyncType: register.gauge<{syncType: PeerSyncType}>({ name: "lodestar_sync_range_sync_peers", help: "Count of peers by sync type [FullySynced, Advanced, Behind]", labelNames: ["syncType"], }), - syncSwitchGossipSubscriptions: register.gauge<"action">({ + syncSwitchGossipSubscriptions: register.gauge<{action: string}>({ name: "lodestar_sync_switch_gossip_subscriptions", help: "Sync switched gossip subscriptions on/off", labelNames: ["action"], }), syncRange: { - syncChainsEvents: register.gauge<"syncType" | "event">({ + syncChainsEvents: register.gauge<{syncType: RangeSyncType; event: string}>({ name: "lodestar_sync_chains_events_total", help: "Total number of sync chains events events, labeled by syncType", labelNames: ["syncType", "event"], }), - syncChains: register.gauge<"syncType">({ + syncChains: register.gauge<{syncType: RangeSyncType}>({ name: "lodestar_sync_chains_count", help: "Count of sync chains by syncType", labelNames: ["syncType"], }), - syncChainsPeers: register.histogram<"syncType">({ + syncChainsPeers: register.histogram<{syncType: RangeSyncType}>({ name: "lodestar_sync_chains_peer_count_by_type", help: "Count of sync chain peers by syncType", labelNames: ["syncType"], @@ -522,12 +544,12 @@ export function createLodestarMetrics( }, syncUnknownBlock: { - switchNetworkSubscriptions: register.gauge<"action">({ + switchNetworkSubscriptions: register.gauge<{action: string}>({ name: "lodestar_sync_unknown_block_network_subscriptions_count", help: "Switch network subscriptions on/off", labelNames: ["action"], }), - requests: register.gauge<"type">({ + requests: register.gauge<{type: PendingBlockType}>({ name: "lodestar_sync_unknown_block_requests_total", help: "Total number of unknown block events or requests", labelNames: ["type"], @@ -581,43 +603,43 @@ export function createLodestarMetrics( // Gossip attestation gossipAttestation: { - useHeadBlockState: register.gauge<"caller">({ + useHeadBlockState: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_use_head_block_state_count", help: "Count of gossip attestation verification using head block state", labelNames: ["caller"], }), - useHeadBlockStateDialedToTargetEpoch: register.gauge<"caller">({ + useHeadBlockStateDialedToTargetEpoch: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_use_head_block_state_dialed_to_target_epoch_count", help: "Count of gossip attestation verification using head block state and dialed to target epoch", labelNames: ["caller"], }), - headSlotToAttestationSlot: register.histogram<"caller">({ + headSlotToAttestationSlot: register.histogram<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_head_slot_to_attestation_slot", help: "Slot distance between attestation slot and head slot", labelNames: ["caller"], buckets: [0, 1, 2, 4, 8, 16, 32, 64], }), - shufflingCacheHit: register.gauge<"caller">({ + shufflingCacheHit: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_hit_count", help: "Count of gossip attestation verification shuffling cache hit", labelNames: ["caller"], }), - shufflingCacheMiss: register.gauge<"caller">({ + shufflingCacheMiss: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_miss_count", help: "Count of gossip attestation verification shuffling cache miss", labelNames: ["caller"], }), - shufflingCacheRegenHit: register.gauge<"caller">({ + shufflingCacheRegenHit: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_regen_hit_count", help: "Count of gossip attestation verification shuffling cache regen hit", labelNames: ["caller"], }), - shufflingCacheRegenMiss: register.gauge<"caller">({ + shufflingCacheRegenMiss: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_regen_miss_count", help: "Count of gossip attestation verification shuffling cache regen miss", labelNames: ["caller"], }), - attestationSlotToClockSlot: register.histogram<"caller">({ + attestationSlotToClockSlot: register.histogram<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_attestation_slot_to_clock_slot", help: "Slot distance between clock slot and attestation slot", labelNames: ["caller"], @@ -649,29 +671,46 @@ export function createLodestarMetrics( receivedToGossipValidate: register.histogram({ name: "lodestar_gossip_block_received_to_gossip_validate", help: "Time elapsed between block received and block validated", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), receivedToStateTransition: register.histogram({ name: "lodestar_gossip_block_received_to_state_transition", help: "Time elapsed between block received and block state transition", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), receivedToSignaturesVerification: register.histogram({ name: "lodestar_gossip_block_received_to_signatures_verification", help: "Time elapsed between block received and block signatures verification", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), receivedToExecutionPayloadVerification: register.histogram({ name: "lodestar_gossip_block_received_to_execution_payload_verification", help: "Time elapsed between block received and execution payload verification", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + }), + receivedToBlobsAvailabilityTime: register.histogram<{numBlobs: number}>({ + name: "lodestar_gossip_block_received_to_blobs_availability_time", + help: "Time elapsed between block received and blobs became available", + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + labelNames: ["numBlobs"], + }), + receivedToFullyVerifiedTime: register.histogram({ + name: "lodestar_gossip_block_received_to_fully_verified_time", + help: "Time elapsed between block received and fully verified state, signatures and payload", + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + }), + verifiedToBlobsAvailabiltyTime: register.histogram<{numBlobs: number}>({ + name: "lodestar_gossip_block_verified_to_blobs_availability_time", + help: "Time elapsed between block verified and blobs became available", + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + labelNames: ["numBlobs"], }), receivedToBlockImport: register.histogram({ name: "lodestar_gossip_block_received_to_block_import", help: "Time elapsed between block received and block import", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), - processBlockErrors: register.gauge<"error">({ + processBlockErrors: register.gauge<{error: BlockErrorCode | "NOT_BLOCK_ERROR"}>({ name: "lodestar_gossip_block_process_block_errors", help: "Count of errors, by error type, while processing blocks", labelNames: ["error"], @@ -702,13 +741,13 @@ export function createLodestarMetrics( name: "lodestar_import_block_set_head_after_first_interval_total", help: "Total times an imported block is set as head after the first slot interval", }), - bySource: register.gauge<"source">({ + bySource: register.gauge<{source: BlockSource}>({ name: "lodestar_import_block_by_source_total", help: "Total number of imported blocks by source", labelNames: ["source"], }), }, - engineNotifyNewPayloadResult: register.gauge<"result">({ + engineNotifyNewPayloadResult: register.gauge<{result: ExecutionPayloadStatus}>({ name: "lodestar_execution_engine_notify_new_payload_result_total", help: "The total result of calling notifyNewPayload execution engine api", labelNames: ["result"], @@ -722,7 +761,7 @@ export function createLodestarMetrics( name: "lodestar_backfill_prev_fin_or_ws_slot", help: "Slot of previous finalized or wsCheckpoint block to be validated", }), - totalBlocks: register.gauge<"method">({ + totalBlocks: register.gauge<{method: BackfillSyncMethod}>({ name: "lodestar_backfill_sync_blocks_total", help: "Total amount of backfilled blocks", labelNames: ["method"], @@ -753,7 +792,7 @@ export function createLodestarMetrics( name: "lodestar_oppool_attestation_pool_size", help: "Current size of the AttestationPool = total attestations unique by data and slot", }), - attestationPoolInsertOutcome: register.counter<"insertOutcome">({ + attestationPoolInsertOutcome: register.counter<{insertOutcome: InsertOutcome}>({ name: "lodestar_attestation_pool_insert_outcome_total", help: "Total number of InsertOutcome as a result of adding an attestation in a pool", labelNames: ["insertOutcome"], @@ -778,7 +817,7 @@ export function createLodestarMetrics( name: "lodestar_oppool_sync_committee_message_pool_size", help: "Current size of the SyncCommitteeMessagePool unique by slot subnet and block root", }), - syncCommitteeMessagePoolInsertOutcome: register.counter<"insertOutcome">({ + syncCommitteeMessagePoolInsertOutcome: register.counter<{insertOutcome: InsertOutcome}>({ name: "lodestar_oppool_sync_committee_message_insert_outcome_total", help: "Total number of InsertOutcome as a result of adding a SyncCommitteeMessage to pool", labelNames: ["insertOutcome"], @@ -804,7 +843,7 @@ export function createLodestarMetrics( // Validator Monitor Metrics (per-epoch summaries) // Only track prevEpochOnChainBalance per index - prevEpochOnChainBalance: register.gauge<"index">({ + prevEpochOnChainBalance: register.gauge<{index: number}>({ name: "validator_monitor_prev_epoch_on_chain_balance", help: "Balance of validator after an epoch", labelNames: ["index"], @@ -913,12 +952,12 @@ export function createLodestarMetrics( help: "The count of times a sync signature was seen inside an aggregate", buckets: [0, 1, 2, 3, 5, 10], }), - prevEpochAttestationSummary: register.gauge<"summary">({ + prevEpochAttestationSummary: register.gauge<{summary: string}>({ name: "validator_monitor_prev_epoch_attestation_summary", help: "Best guess of the node of the result of previous epoch validators attestation actions and causality", labelNames: ["summary"], }), - prevEpochBlockProposalSummary: register.gauge<"summary">({ + prevEpochBlockProposalSummary: register.gauge<{summary: string}>({ name: "validator_monitor_prev_epoch_block_proposal_summary", help: "Best guess of the node of the result of previous epoch validators block proposal actions and causality", labelNames: ["summary"], @@ -926,12 +965,12 @@ export function createLodestarMetrics( // Validator Monitor Metrics (real-time) - unaggregatedAttestationTotal: register.gauge<"src">({ + unaggregatedAttestationTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_unaggregated_attestation_total", help: "Number of unaggregated attestations seen", labelNames: ["src"], }), - unaggregatedAttestationDelaySeconds: register.histogram<"src">({ + unaggregatedAttestationDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_unaggregated_attestation_delay_seconds", help: "The delay between when the validator should send the attestation and when it was received", labelNames: ["src"], @@ -945,23 +984,23 @@ export function createLodestarMetrics( // refine if we want more reasonable values buckets: [0, 10, 20, 30], }), - aggregatedAttestationTotal: register.gauge<"src">({ + aggregatedAttestationTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_aggregated_attestation_total", help: "Number of aggregated attestations seen", labelNames: ["src"], }), - aggregatedAttestationDelaySeconds: register.histogram<"src">({ + aggregatedAttestationDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_aggregated_attestation_delay_seconds", help: "The delay between then the validator should send the aggregate and when it was received", labelNames: ["src"], buckets: [0.1, 0.25, 0.5, 1, 2, 5, 10], }), - attestationInAggregateTotal: register.gauge<"src">({ + attestationInAggregateTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_attestation_in_aggregate_total", help: "Number of times an attestation has been seen in an aggregate", labelNames: ["src"], }), - attestationInAggregateDelaySeconds: register.histogram<"src">({ + attestationInAggregateDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_attestation_in_aggregate_delay_seconds", help: "The delay between when the validator should send the aggregate and when it was received", labelNames: ["src"], @@ -985,12 +1024,12 @@ export function createLodestarMetrics( name: "validator_monitor_sync_signature_in_aggregate_total", help: "Number of times a sync signature has been seen in an aggregate", }), - beaconBlockTotal: register.gauge<"src">({ + beaconBlockTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_beacon_block_total", help: "Total number of beacon blocks seen", labelNames: ["src"], }), - beaconBlockDelaySeconds: register.histogram<"src">({ + beaconBlockDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_beacon_block_delay_seconds", help: "The delay between when the validator should send the block and when it was received", labelNames: ["src"], @@ -1060,13 +1099,15 @@ export function createLodestarMetrics( name: "lodestar_cp_state_cache_adds_total", help: "Total number of items added in checkpoint state cache", }), - size: register.gauge({ + size: register.gauge<{type: CacheItemType}>({ name: "lodestar_cp_state_cache_size", help: "Checkpoint state cache size", + labelNames: ["type"], }), - epochSize: register.gauge({ + epochSize: register.gauge<{type: CacheItemType}>({ name: "lodestar_cp_state_epoch_size", help: "Checkpoint state cache size", + labelNames: ["type"], }), reads: register.avgMinMax({ name: "lodestar_cp_state_epoch_reads", @@ -1081,6 +1122,44 @@ export function createLodestarMetrics( help: "Histogram of cloned count per state every time state.clone() is called", buckets: [1, 2, 5, 10, 50, 250], }), + statePersistDuration: register.histogram({ + name: "lodestar_cp_state_cache_state_persist_seconds", + help: "Histogram of time to persist state to db", + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + statePruneFromMemoryCount: register.gauge({ + name: "lodestar_cp_state_cache_state_prune_from_memory_count", + help: "Total number of states pruned from memory", + }), + statePersistSecFromSlot: register.histogram({ + name: "lodestar_cp_state_cache_state_persist_seconds_from_slot", + help: "Histogram of time to persist state to db since the clock slot", + buckets: [0, 2, 4, 6, 8, 10, 12], + }), + stateReloadDuration: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_seconds", + help: "Histogram of time to load state from db", + buckets: [0, 2, 4, 6, 8, 10, 12], + }), + stateReloadEpochDiff: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_epoch_diff", + help: "Histogram of epoch difference between seed state epoch and loaded state epoch", + buckets: [0, 1, 2, 4, 8, 16, 32], + }), + stateReloadSecFromSlot: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_seconds_from_slot", + help: "Histogram of time to load state from db since the clock slot", + buckets: [0, 2, 4, 6, 8, 10, 12], + }), + stateReloadDbReadTime: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_db_read_seconds", + help: "Histogram of time to load state bytes from db", + buckets: [0.01, 0.05, 0.1, 0.2, 0.5], + }), + persistedStateRemoveCount: register.gauge({ + name: "lodestar_cp_state_cache_persisted_state_remove_count", + help: "Total number of persisted states removed", + }), }, balancesCache: { @@ -1092,7 +1171,7 @@ export function createLodestarMetrics( name: "lodestar_balances_cache_misses_total", help: "Total number of balances cache misses", }), - closestStateResult: register.counter<"stateId">({ + closestStateResult: register.counter<{stateId: string}>({ name: "lodestar_balances_cache_closest_state_result_total", help: "Total number of stateIds returned as closest justified balances state by id", labelNames: ["stateId"], @@ -1170,7 +1249,7 @@ export function createLodestarMetrics( name: "lodestar_seen_cache_attestation_data_miss_total", help: "Total number of attestation data miss in SeenAttestationData", }), - reject: register.gauge<"reason">({ + reject: register.gauge<{reason: RejectReason}>({ name: "lodestar_seen_cache_attestation_data_reject_total", help: "Total number of attestation data rejected in SeenAttestationData", labelNames: ["reason"], @@ -1178,23 +1257,23 @@ export function createLodestarMetrics( }, }, - regenFnCallTotal: register.gauge<"entrypoint" | "caller">({ + regenFnCallTotal: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_call_total", help: "Total number of calls for regen functions", labelNames: ["entrypoint", "caller"], }), - regenFnQueuedTotal: register.gauge<"entrypoint" | "caller">({ + regenFnQueuedTotal: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_queued_total", help: "Total number of calls queued for regen functions", labelNames: ["entrypoint", "caller"], }), - regenFnCallDuration: register.histogram<"entrypoint" | "caller">({ + regenFnCallDuration: register.histogram<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_call_duration", help: "regen function duration", labelNames: ["entrypoint", "caller"], buckets: [0.1, 1, 10, 100], }), - regenFnTotalErrors: register.gauge<"entrypoint" | "caller">({ + regenFnTotalErrors: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_errors_total", help: "regen function total errors", labelNames: ["entrypoint", "caller"], @@ -1206,7 +1285,7 @@ export function createLodestarMetrics( // Precompute next epoch transition precomputeNextEpochTransition: { - count: register.counter<"result">({ + count: register.counter<{result: string}>({ name: "lodestar_precompute_next_epoch_transition_result_total", labelNames: ["result"], help: "Total number of precomputeNextEpochTransition runs by result", @@ -1219,6 +1298,11 @@ export function createLodestarMetrics( name: "lodestar_precompute_next_epoch_transition_waste_total", help: "Total number of precomputing next epoch transition wasted", }), + duration: register.histogram({ + name: "lodestar_precompute_next_epoch_transition_duration_seconds", + help: "Duration of precomputeNextEpochTransition, including epoch transition and hashTreeRoot", + buckets: [1, 2, 3, 4, 8], + }), }, // reprocess attestations @@ -1235,14 +1319,15 @@ export function createLodestarMetrics( name: "lodestar_reprocess_attestations_wait_time_resolve_seconds", help: "Time to wait for unknown block in seconds", }), - reject: register.gauge<"reason">({ + reject: register.gauge<{reason: ReprocessStatus}>({ name: "lodestar_reprocess_attestations_reject_total", help: "Total number of attestations are rejected to reprocess", labelNames: ["reason"], }), - waitSecBeforeReject: register.gauge<"reason">({ + waitSecBeforeReject: register.gauge<{reason: ReprocessStatus}>({ name: "lodestar_reprocess_attestations_wait_time_reject_seconds", help: "Time to wait for unknown block before being rejected", + labelNames: ["reason"], }), }, @@ -1264,24 +1349,25 @@ export function createLodestarMetrics( name: "lodestar_reprocess_gossip_attestations_wait_time_resolve_seconds", help: "Time to wait for unknown block in seconds", }), - reject: register.gauge<"reason">({ + reject: register.gauge<{reason: ReprocessRejectReason}>({ name: "lodestar_reprocess_gossip_attestations_reject_total", help: "Total number of attestations are rejected to reprocess", labelNames: ["reason"], }), - waitSecBeforeReject: register.gauge<"reason">({ + waitSecBeforeReject: register.gauge<{reason: ReprocessRejectReason}>({ name: "lodestar_reprocess_gossip_attestations_wait_time_reject_seconds", help: "Time to wait for unknown block before being rejected", + labelNames: ["reason"], }), }, lightclientServer: { - onSyncAggregate: register.gauge<"event">({ + onSyncAggregate: register.gauge<{event: string}>({ name: "lodestar_lightclient_server_on_sync_aggregate_event_total", help: "Total number of relevant events onSyncAggregate fn", labelNames: ["event"], }), - highestSlot: register.gauge<"item">({ + highestSlot: register.gauge<{item: string}>({ name: "lodestar_lightclient_server_highest_slot", help: "Current highest slot of items stored by LightclientServer", labelNames: ["item"], @@ -1392,7 +1478,11 @@ export function createLodestarMetrics( }), // Merge details - eth1MergeBlockDetails: register.gauge<"terminalBlockHash" | "terminalBlockNumber" | "terminalBlockTD">({ + eth1MergeBlockDetails: register.gauge<{ + terminalBlockHash: string; + terminalBlockNumber: string; + terminalBlockTD: string; + }>({ name: "lodestar_eth1_merge_block_details", help: "If found then 1 with terminal block details", labelNames: ["terminalBlockHash", "terminalBlockNumber", "terminalBlockTD"], @@ -1400,36 +1490,36 @@ export function createLodestarMetrics( }, eth1HttpClient: { - requestTime: register.histogram<"routeId">({ + requestTime: register.histogram<{routeId: string}>({ name: "lodestar_eth1_http_client_request_time_seconds", help: "eth1 JsonHttpClient - histogram or roundtrip request times", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - streamTime: register.histogram<"routeId">({ + streamTime: register.histogram<{routeId: string}>({ name: "lodestar_eth1_http_client_stream_time_seconds", help: "eth1 JsonHttpClient - streaming time by routeId", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - requestErrors: register.gauge<"routeId">({ + requestErrors: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_request_errors_total", help: "eth1 JsonHttpClient - total count of request errors", labelNames: ["routeId"], }), - retryCount: register.gauge<"routeId">({ + retryCount: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_request_retries_total", help: "eth1 JsonHttpClient - total count of request retries", labelNames: ["routeId"], }), - requestUsedFallbackUrl: register.gauge({ + requestUsedFallbackUrl: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_request_used_fallback_url_total", help: "eth1 JsonHttpClient - total count of requests on fallback url(s)", labelNames: ["routeId"], }), - activeRequests: register.gauge({ + activeRequests: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_active_requests", help: "eth1 JsonHttpClient - current count of active requests", labelNames: ["routeId"], @@ -1441,36 +1531,36 @@ export function createLodestarMetrics( }, executionEnginerHttpClient: { - requestTime: register.histogram<"routeId">({ + requestTime: register.histogram<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_time_seconds", help: "ExecutionEngineHttp client - histogram or roundtrip request times", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - streamTime: register.histogram<"routeId">({ + streamTime: register.histogram<{routeId: string}>({ name: "lodestar_execution_engine_http_client_stream_time_seconds", help: "ExecutionEngineHttp client - streaming time by routeId", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - requestErrors: register.gauge<"routeId">({ + requestErrors: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_errors_total", help: "ExecutionEngineHttp client - total count of request errors", labelNames: ["routeId"], }), - retryCount: register.gauge<"routeId">({ + retryCount: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_retries_total", help: "ExecutionEngineHttp client - total count of request retries", labelNames: ["routeId"], }), - requestUsedFallbackUrl: register.gauge({ + requestUsedFallbackUrl: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_used_fallback_url_total", help: "ExecutionEngineHttp client - total count of requests on fallback url(s)", labelNames: ["routeId"], }), - activeRequests: register.gauge({ + activeRequests: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_active_requests", help: "ExecutionEngineHttp client - current count of active requests", labelNames: ["routeId"], @@ -1482,32 +1572,32 @@ export function createLodestarMetrics( }, builderHttpClient: { - requestTime: register.histogram<"routeId">({ + requestTime: register.histogram<{routeId: string}>({ name: "lodestar_builder_http_client_request_time_seconds", help: "Histogram of builder http client request time by routeId", labelNames: ["routeId"], // Expected times are ~ 50-500ms, but in an overload NodeJS they can be greater buckets: [0.01, 0.1, 1, 5], }), - streamTime: register.histogram<"routeId">({ + streamTime: register.histogram<{routeId: string}>({ name: "lodestar_builder_http_client_stream_time_seconds", help: "Builder api - streaming time by routeId", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - requestErrors: register.gauge<"routeId">({ + requestErrors: register.gauge<{routeId: string}>({ name: "lodestar_builder_http_client_request_errors_total", help: "Total count of errors on builder http client requests by routeId", labelNames: ["routeId"], }), - requestToFallbacks: register.gauge<"routeId">({ + requestToFallbacks: register.gauge<{routeId: string}>({ name: "lodestar_builder_http_client_request_to_fallbacks_total", help: "Total count of requests to fallback URLs on builder http API by routeId", labelNames: ["routeId"], }), - urlsScore: register.gauge<"urlIndex">({ + urlsScore: register.gauge<{urlIndex: number}>({ name: "lodestar_builder_http_client_urls_score", help: "Current score of builder http URLs by url index", labelNames: ["urlIndex"], @@ -1515,22 +1605,22 @@ export function createLodestarMetrics( }, db: { - dbReadReq: register.gauge<"bucket">({ + dbReadReq: register.gauge<{bucket: string}>({ name: "lodestar_db_read_req_total", help: "Total count of db read requests, may read 0 or more items", labelNames: ["bucket"], }), - dbReadItems: register.gauge<"bucket">({ + dbReadItems: register.gauge<{bucket: string}>({ name: "lodestar_db_read_items_total", help: "Total count of db read items, item = key | value | entry", labelNames: ["bucket"], }), - dbWriteReq: register.gauge<"bucket">({ + dbWriteReq: register.gauge<{bucket: string}>({ name: "lodestar_db_write_req_total", help: "Total count of db write requests, may write 0 or more items", labelNames: ["bucket"], }), - dbWriteItems: register.gauge<"bucket">({ + dbWriteItems: register.gauge<{bucket: string}>({ name: "lodestar_db_write_items_total", help: "Total count of db write items", labelNames: ["bucket"], diff --git a/packages/beacon-node/src/metrics/server/http.ts b/packages/beacon-node/src/metrics/server/http.ts index b699471e07d5..d8fbb289e951 100644 --- a/packages/beacon-node/src/metrics/server/http.ts +++ b/packages/beacon-node/src/metrics/server/http.ts @@ -15,6 +15,11 @@ export type HttpMetricsServer = { close(): Promise; }; +enum RequestStatus { + success = "success", + error = "error", +} + export async function getHttpMetricsServer( opts: HttpMetricsServerOpts, { @@ -26,7 +31,7 @@ export async function getHttpMetricsServer( // New registry to metric the metrics. Using the same registry would deadlock the .metrics promise const httpServerRegister = new RegistryMetricCreator(); - const scrapeTimeMetric = httpServerRegister.histogram<"status">({ + const scrapeTimeMetric = httpServerRegister.histogram<{status: RequestStatus}>({ name: "lodestar_metrics_scrape_seconds", help: "Lodestar metrics server async time to scrape metrics", labelNames: ["status"], @@ -40,7 +45,7 @@ export async function getHttpMetricsServer( if (req.method === "GET" && req.url && req.url.includes("/metrics")) { const timer = scrapeTimeMetric.startTimer(); const metricsRes = await Promise.all([wrapError(register.metrics()), getOtherMetrics()]); - timer({status: metricsRes[0].err ? "error" : "success"}); + timer({status: metricsRes[0].err ? RequestStatus.error : RequestStatus.success}); // Ensure we only writeHead once if (metricsRes[0].err) { diff --git a/packages/beacon-node/src/metrics/utils/avgMinMax.ts b/packages/beacon-node/src/metrics/utils/avgMinMax.ts index 43f51c821790..709c83ee38d6 100644 --- a/packages/beacon-node/src/metrics/utils/avgMinMax.ts +++ b/packages/beacon-node/src/metrics/utils/avgMinMax.ts @@ -1,21 +1,21 @@ import {GaugeConfiguration} from "prom-client"; +import {AvgMinMax as IAvgMinMax, LabelKeys, LabelsGeneric} from "@lodestar/utils"; import {GaugeExtra} from "./gauge.js"; type GetValuesFn = () => number[]; -type Labels = Partial>; /** * Special non-standard "Histogram" that captures the avg, min and max of values */ -export class AvgMinMax { - private readonly sum: GaugeExtra; - private readonly avg: GaugeExtra; - private readonly min: GaugeExtra; - private readonly max: GaugeExtra; +export class AvgMinMax implements IAvgMinMax { + private readonly sum: GaugeExtra; + private readonly avg: GaugeExtra; + private readonly min: GaugeExtra; + private readonly max: GaugeExtra; private getValuesFn: GetValuesFn | null = null; - constructor(configuration: GaugeConfiguration) { + constructor(configuration: GaugeConfiguration>) { this.sum = new GaugeExtra({...configuration, name: `${configuration.name}_sum`}); this.avg = new GaugeExtra({...configuration, name: `${configuration.name}_avg`}); this.min = new GaugeExtra({...configuration, name: `${configuration.name}_min`}); @@ -33,8 +33,8 @@ export class AvgMinMax { } set(values: number[]): void; - set(labels: Labels, values: number[]): void; - set(arg1?: Labels | number[], arg2?: number[]): void { + set(labels: Labels, values: number[]): void; + set(arg1?: Labels | number[], arg2?: number[]): void { if (arg2 === undefined) { const values = arg1 as number[]; const {sum, avg, min, max} = getStats(values); @@ -44,7 +44,7 @@ export class AvgMinMax { this.max.set(max); } else { const values = (arg2 !== undefined ? arg2 : arg1) as number[]; - const labels = arg1 as Labels; + const labels = arg1 as Labels; const {sum, avg, min, max} = getStats(values); this.sum.set(labels, sum); this.avg.set(labels, avg); diff --git a/packages/beacon-node/src/metrics/utils/gauge.ts b/packages/beacon-node/src/metrics/utils/gauge.ts index fb95fe25d24d..1f527adfcb64 100644 --- a/packages/beacon-node/src/metrics/utils/gauge.ts +++ b/packages/beacon-node/src/metrics/utils/gauge.ts @@ -1,29 +1,16 @@ -import {Gauge, GaugeConfiguration} from "prom-client"; -import {IGauge} from "../interface.js"; - -type CollectFn = (metric: IGauge) => void; -type Labels = Partial>; +import {Gauge} from "prom-client"; +import {CollectFn, Gauge as IGauge, LabelKeys, LabelsGeneric} from "@lodestar/utils"; /** - * Extends the prom-client Gauge with extra features: - * - Add multiple collect functions after instantiation - * - Create child gauges with fixed labels + * Extends the prom-client Gauge to be able to add multiple collect functions after instantiation */ -export class GaugeExtra extends Gauge implements IGauge { - private collectFns: CollectFn[] = []; - - constructor(configuration: GaugeConfiguration) { - super(configuration); - } +export class GaugeExtra extends Gauge> implements IGauge { + private collectFns: CollectFn[] = []; - addCollect(collectFn: CollectFn): void { + addCollect(collectFn: CollectFn): void { this.collectFns.push(collectFn); } - child(labels: Labels): GaugeChild { - return new GaugeChild(labels, this); - } - /** * @override Metric.collect */ @@ -33,48 +20,3 @@ export class GaugeExtra extends Gauge implements IGauge { } } } - -export class GaugeChild implements IGauge { - gauge: GaugeExtra; - labelsParent: Labels; - constructor(labelsParent: Labels, gauge: GaugeExtra) { - this.gauge = gauge; - this.labelsParent = labelsParent; - } - - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler - inc(value?: number): void; - inc(labels: Labels, value?: number): void; - inc(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.gauge.inc({...this.labelsParent, ...arg1}, arg2 ?? 1); - } else { - this.gauge.inc(this.labelsParent, arg1 ?? 1); - } - } - - dec(value?: number): void; - dec(labels: Labels, value?: number): void; - dec(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.gauge.dec({...this.labelsParent, ...arg1}, arg2 ?? 1); - } else { - this.gauge.dec(this.labelsParent, arg1 ?? 1); - } - } - - set(value: number): void; - set(labels: Labels, value: number): void; - set(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.gauge.set({...this.labelsParent, ...arg1}, arg2 ?? 0); - } else { - this.gauge.set(this.labelsParent, arg1 ?? 0); - } - } - - addCollect(collectFn: CollectFn): void { - this.gauge.addCollect(() => collectFn(this)); - } -} diff --git a/packages/beacon-node/src/metrics/utils/histogram.ts b/packages/beacon-node/src/metrics/utils/histogram.ts deleted file mode 100644 index 4490929629f2..000000000000 --- a/packages/beacon-node/src/metrics/utils/histogram.ts +++ /dev/null @@ -1,48 +0,0 @@ -import {Histogram, HistogramConfiguration} from "prom-client"; -import {IHistogram} from "../interface.js"; - -type Labels = Partial>; - -/** - * Extends the prom-client Histogram with extra features: - * - Add multiple collect functions after instantiation - * - Create child histograms with fixed labels - */ -export class HistogramExtra extends Histogram implements IHistogram { - constructor(configuration: HistogramConfiguration) { - super(configuration); - } - - child(labels: Labels): HistogramChild { - return new HistogramChild(labels, this); - } -} - -export class HistogramChild implements IHistogram { - histogram: HistogramExtra; - labelsParent: Labels; - constructor(labelsParent: Labels, histogram: HistogramExtra) { - this.histogram = histogram; - this.labelsParent = labelsParent; - } - - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `observe(value: number, labels?: Labels)`, this would be simpler - observe(value?: number): void; - observe(labels: Labels, value?: number): void; - observe(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.histogram.observe({...this.labelsParent, ...arg1}, arg2 ?? 0); - } else { - this.histogram.observe(this.labelsParent, arg1 ?? 0); - } - } - - startTimer(arg1?: Labels): (labels?: Labels) => number { - if (typeof arg1 === "object") { - return this.histogram.startTimer({...this.labelsParent, ...arg1}); - } else { - return this.histogram.startTimer(this.labelsParent); - } - } -} diff --git a/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts b/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts index 8864eb2c74c4..adec6f984702 100644 --- a/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts +++ b/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts @@ -1,33 +1,41 @@ -import {Gauge, GaugeConfiguration, Registry, HistogramConfiguration, CounterConfiguration, Counter} from "prom-client"; +import {Gauge, Registry, Counter, Histogram} from "prom-client"; +import { + AvgMinMaxConfig, + CounterConfig, + GaugeConfig, + HistogramConfig, + AvgMinMax as IAvgMinMax, + Counter as ICounter, + GaugeExtra as IGaugeExtra, + Histogram as IHistogram, + LabelKeys, + LabelsGeneric, + MetricsRegisterCustom, + NoLabels, + StaticConfig, +} from "@lodestar/utils"; import {AvgMinMax} from "./avgMinMax.js"; import {GaugeExtra} from "./gauge.js"; -import {HistogramExtra} from "./histogram.js"; -type StaticConfiguration = { - name: GaugeConfiguration["name"]; - help: GaugeConfiguration["help"]; - value: Record; -}; - -export class RegistryMetricCreator extends Registry { - gauge(configuration: GaugeConfiguration): GaugeExtra { - return new GaugeExtra({...configuration, registers: [this]}); +export class RegistryMetricCreator extends Registry implements MetricsRegisterCustom { + gauge(configuration: GaugeConfig): IGaugeExtra { + return new GaugeExtra({...configuration, registers: [this]}); } - histogram(configuration: HistogramConfiguration): HistogramExtra { - return new HistogramExtra({...configuration, registers: [this]}); + histogram(configuration: HistogramConfig): IHistogram { + return new Histogram>({...configuration, registers: [this]}); } - avgMinMax(configuration: GaugeConfiguration): AvgMinMax { - return new AvgMinMax({...configuration, registers: [this]}); + avgMinMax(configuration: AvgMinMaxConfig): IAvgMinMax { + return new AvgMinMax({...configuration, registers: [this]}); } /** Static metric to send string-based data such as versions, config params, etc */ - static({name, help, value}: StaticConfiguration): void { + static({name, help, value}: StaticConfig): void { new Gauge({name, help, labelNames: Object.keys(value), registers: [this]}).set(value, 1); } - counter(configuration: CounterConfiguration): Counter { - return new Counter({...configuration, registers: [this]}); + counter(configuration: CounterConfig): ICounter { + return new Counter>({...configuration, registers: [this]}); } } diff --git a/packages/beacon-node/src/monitoring/service.ts b/packages/beacon-node/src/monitoring/service.ts index f50f992ebe1f..9581c5f11c92 100644 --- a/packages/beacon-node/src/monitoring/service.ts +++ b/packages/beacon-node/src/monitoring/service.ts @@ -1,8 +1,7 @@ import {Registry} from "prom-client"; import {fetch} from "@lodestar/api"; -import {ErrorAborted, Logger, TimeoutError} from "@lodestar/utils"; +import {ErrorAborted, Histogram, Logger, TimeoutError} from "@lodestar/utils"; import {RegistryMetricCreator} from "../metrics/index.js"; -import {HistogramExtra} from "../metrics/utils/histogram.js"; import {defaultMonitoringOptions, MonitoringOptions} from "./options.js"; import {createClientStats} from "./clientStats.js"; import {ClientStats} from "./types.js"; @@ -25,6 +24,11 @@ enum Status { Closed = "closed", } +enum SendDataStatus { + Success = "success", + Error = "error", +} + export type Client = "beacon" | "validator"; /** @@ -38,8 +42,8 @@ export class MonitoringService { private readonly register: Registry; private readonly logger: Logger; - private readonly collectDataMetric: HistogramExtra; - private readonly sendDataMetric: HistogramExtra<"status">; + private readonly collectDataMetric: Histogram; + private readonly sendDataMetric: Histogram<{status: SendDataStatus}>; private status = Status.Started; private initialDelayTimeout?: NodeJS.Timeout; @@ -193,7 +197,7 @@ export class MonitoringService { throw e; } } finally { - timer({status: res?.ok ? "success" : "error"}); + timer({status: res?.ok ? SendDataStatus.Success : SendDataStatus.Error}); clearTimeout(timeout); } } diff --git a/packages/beacon-node/src/network/core/metrics.ts b/packages/beacon-node/src/network/core/metrics.ts index 4f416ad4fba2..0137ce1f0540 100644 --- a/packages/beacon-node/src/network/core/metrics.ts +++ b/packages/beacon-node/src/network/core/metrics.ts @@ -1,4 +1,8 @@ import {RegistryMetricCreator} from "../../metrics/utils/registryMetricCreator.js"; +import {SubnetType} from "../metadata.js"; +import {DiscoveredPeerStatus} from "../peers/discover.js"; +import {SubnetSource} from "../subnets/attnetsService.js"; +import {DLLSubnetSource} from "../subnets/dllAttnetsService.js"; export type NetworkCoreMetrics = ReturnType; @@ -13,12 +17,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "libp2p_peers", help: "number of connected peers", }), - peersByDirection: register.gauge<"direction">({ + peersByDirection: register.gauge<{direction: string}>({ name: "lodestar_peers_by_direction_count", help: "number of peers, labeled by direction", labelNames: ["direction"], }), - peersByClient: register.gauge<"client">({ + peersByClient: register.gauge<{client: string}>({ name: "lodestar_peers_by_client_count", help: "number of peers, labeled by client", labelNames: ["client"], @@ -28,14 +32,14 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { help: "Histogram of current count of long lived attnets of connected peers", buckets: [0, 4, 16, 32, 64], }), - peerScoreByClient: register.histogram<"client">({ + peerScoreByClient: register.histogram<{client: string}>({ name: "lodestar_app_peer_score", help: "Current peer score at lodestar app side", // Min score = -100, max score = 100, disconnect = -20, ban = -50 buckets: [-100, -50, -20, 0, 25], labelNames: ["client"], }), - peerGossipScoreByClient: register.histogram<"client">({ + peerGossipScoreByClient: register.histogram<{client: string}>({ name: "lodestar_gossip_score_by_client", help: "Gossip peer score by client", labelNames: ["client"], @@ -53,27 +57,27 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_peers_sync_count", help: "Current count of peers useful for sync", }), - peerConnectedEvent: register.gauge<"direction" | "status">({ + peerConnectedEvent: register.gauge<{direction: string; status: string}>({ name: "lodestar_peer_connected_total", help: "Total number of peer:connected event, labeled by direction", labelNames: ["direction", "status"], }), - peerDisconnectedEvent: register.gauge<"direction">({ + peerDisconnectedEvent: register.gauge<{direction: string}>({ name: "lodestar_peer_disconnected_total", help: "Total number of peer:disconnected event, labeled by direction", labelNames: ["direction"], }), - peerGoodbyeReceived: register.gauge<"reason">({ + peerGoodbyeReceived: register.gauge<{reason: string}>({ name: "lodestar_peer_goodbye_received_total", help: "Total number of goodbye received, labeled by reason", labelNames: ["reason"], }), - peerLongConnectionDisconnect: register.gauge<"reason">({ + peerLongConnectionDisconnect: register.gauge<{reason: string}>({ name: "lodestar_peer_long_connection_disconnect_total", help: "For peers with long connection, track disconnect reason", labelNames: ["reason"], }), - peerGoodbyeSent: register.gauge<"reason">({ + peerGoodbyeSent: register.gauge<{reason: string}>({ name: "lodestar_peer_goodbye_sent_total", help: "Total number of goodbye sent, labeled by reason", labelNames: ["reason"], @@ -82,22 +86,22 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_peers_requested_total_to_connect", help: "Prioritization results total peers count requested to connect", }), - peersRequestedToDisconnect: register.gauge<"reason">({ + peersRequestedToDisconnect: register.gauge<{reason: string}>({ name: "lodestar_peers_requested_total_to_disconnect", help: "Prioritization results total peers count requested to disconnect", labelNames: ["reason"], }), - peersRequestedSubnetsToQuery: register.gauge<"type">({ + peersRequestedSubnetsToQuery: register.gauge<{type: SubnetType}>({ name: "lodestar_peers_requested_total_subnets_to_query", help: "Prioritization results total subnets to query and discover peers in", labelNames: ["type"], }), - peersRequestedSubnetsPeerCount: register.gauge<"type">({ + peersRequestedSubnetsPeerCount: register.gauge<{type: SubnetType}>({ name: "lodestar_peers_requested_total_subnets_peers_count", help: "Prioritization results total peers in subnets to query and discover peers in", labelNames: ["type"], }), - peersReportPeerCount: register.gauge<"reason">({ + peersReportPeerCount: register.gauge<{reason: string}>({ name: "lodestar_peers_report_peer_count", help: "network.reportPeer count by reason", labelNames: ["reason"], @@ -115,12 +119,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_peers_to_connect", help: "Current peers to connect count from discoverPeers requests", }), - subnetPeersToConnect: register.gauge<"type">({ + subnetPeersToConnect: register.gauge<{type: SubnetType}>({ name: "lodestar_discovery_subnet_peers_to_connect", help: "Current peers to connect count from discoverPeers requests", labelNames: ["type"], }), - subnetsToConnect: register.gauge<"type">({ + subnetsToConnect: register.gauge<{type: SubnetType}>({ name: "lodestar_discovery_subnets_to_connect", help: "Current subnets to connect count from discoverPeers requests", labelNames: ["type"], @@ -129,7 +133,7 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_cached_enrs_size", help: "Current size of the cachedENRs Set", }), - findNodeQueryRequests: register.gauge<"action">({ + findNodeQueryRequests: register.gauge<{action: string}>({ name: "lodestar_discovery_find_node_query_requests_total", help: "Total count of find node queries started", labelNames: ["action"], @@ -143,7 +147,7 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_find_node_query_enrs_total", help: "Total count of found ENRs in queries", }), - discoveredStatus: register.gauge<"status">({ + discoveredStatus: register.gauge<{status: DiscoveredPeerStatus}>({ name: "lodestar_discovery_discovered_status_total_count", help: "Total count of status results of PeerDiscovery.onDiscovered() function", labelNames: ["status"], @@ -152,7 +156,7 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_total_dial_attempts", help: "Total dial attempts by peer discovery", }), - dialTime: register.histogram<"status">({ + dialTime: register.histogram<{status: string}>({ name: "lodestar_discovery_dial_time_seconds", help: "Time to dial peers in seconds", labelNames: ["status"], @@ -161,62 +165,13 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { }, reqResp: { - rateLimitErrors: register.gauge<"method">({ + rateLimitErrors: register.gauge<{method: string}>({ name: "beacon_reqresp_rate_limiter_errors_total", help: "Count rate limiter errors", labelNames: ["method"], }), }, - gossipValidationAccept: register.gauge<"topic">({ - name: "lodestar_gossip_validation_accept_total", - help: "Count of total gossip validation accept", - labelNames: ["topic"], - }), - gossipValidationIgnore: register.gauge<"topic">({ - name: "lodestar_gossip_validation_ignore_total", - help: "Count of total gossip validation ignore", - labelNames: ["topic"], - }), - gossipValidationReject: register.gauge<"topic">({ - name: "lodestar_gossip_validation_reject_total", - help: "Count of total gossip validation reject", - labelNames: ["topic"], - }), - gossipValidationError: register.gauge<"topic" | "error">({ - name: "lodestar_gossip_validation_error_total", - help: "Count of total gossip validation errors detailed", - labelNames: ["topic", "error"], - }), - - gossipValidationQueueLength: register.gauge<"topic">({ - name: "lodestar_gossip_validation_queue_length", - help: "Count of total gossip validation queue length", - labelNames: ["topic"], - }), - gossipValidationQueueDroppedJobs: register.gauge<"topic">({ - name: "lodestar_gossip_validation_queue_dropped_jobs_total", - help: "Count of total gossip validation queue dropped jobs", - labelNames: ["topic"], - }), - gossipValidationQueueJobTime: register.histogram<"topic">({ - name: "lodestar_gossip_validation_queue_job_time_seconds", - help: "Time to process gossip validation queue job in seconds", - labelNames: ["topic"], - buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], - }), - gossipValidationQueueJobWaitTime: register.histogram<"topic">({ - name: "lodestar_gossip_validation_queue_job_wait_time_seconds", - help: "Time from job added to the queue to starting the job in seconds", - labelNames: ["topic"], - buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], - }), - gossipValidationQueueConcurrency: register.gauge<"topic">({ - name: "lodestar_gossip_validation_queue_concurrency", - help: "Current count of jobs being run on network processor for topic", - labelNames: ["topic"], - }), - discv5: { decodeEnrAttemptCount: register.counter({ name: "lodestar_discv5_decode_enr_attempt_count", @@ -237,14 +192,14 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_attnets_service_committee_subscriptions_total", help: "Count of committee subscriptions", }), - subscriptionsCommitteeMeshPeers: register.histogram<"subnet">({ + subscriptionsCommitteeMeshPeers: register.histogram<{subnet: number}>({ name: "lodestar_attnets_service_committee_subscriptions_mesh_peers", help: "Histogram of mesh peers per committee subscription", labelNames: ["subnet"], // Dlow = 6, D = 8, DHi = 12 plus 2 more buckets buckets: [0, 4, 6, 8, 12], }), - subscriptionsCommitteeTimeToStableMesh: register.histogram<"subnet">({ + subscriptionsCommitteeTimeToStableMesh: register.histogram<{subnet: number}>({ name: "lodestar_attnets_service_committee_subscriptions_time_to_stable_mesh_seconds", help: "Histogram of time until committee subscription is considered healthy (>= 6 mesh peers)", labelNames: ["subnet"], @@ -259,12 +214,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_attnets_service_long_lived_subscriptions_total", help: "Count of long lived subscriptions", }), - subscribeSubnets: register.gauge<"subnet" | "src">({ + subscribeSubnets: register.gauge<{subnet: number; src: SubnetSource | DLLSubnetSource}>({ name: "lodestar_attnets_service_subscribe_subnets_total", help: "Count of subscribe_subnets calls", labelNames: ["subnet", "src"], }), - unsubscribeSubnets: register.gauge<"subnet" | "src">({ + unsubscribeSubnets: register.gauge<{subnet: number; src: SubnetSource | DLLSubnetSource}>({ name: "lodestar_attnets_service_unsubscribe_subnets_total", help: "Count of unsubscribe_subnets calls", labelNames: ["subnet", "src"], @@ -280,12 +235,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_syncnets_service_committee_subscriptions_total", help: "Count of syncnet committee subscriptions", }), - subscribeSubnets: register.gauge<"subnet">({ + subscribeSubnets: register.gauge<{subnet: number}>({ name: "lodestar_syncnets_service_subscribe_subnets_total", help: "Count of syncnet subscribe_subnets calls", labelNames: ["subnet"], }), - unsubscribeSubnets: register.gauge<"subnet">({ + unsubscribeSubnets: register.gauge<{subnet: number}>({ name: "lodestar_syncnets_service_unsubscribe_subnets_total", help: "Count of syncnet unsubscribe_subnets calls", labelNames: ["subnet"], @@ -303,7 +258,7 @@ export function getNetworkCoreWorkerMetrics(register: RegistryMetricCreator) { name: "lodestar_network_worker_reqresp_bridge_caller_pending_count", help: "Current count of pending elements in respBridgeCaller", }), - networkWorkerWireEventsOnWorkerThreadLatency: register.histogram<"eventName">({ + networkWorkerWireEventsOnWorkerThreadLatency: register.histogram<{eventName: string}>({ name: "lodestar_network_worker_wire_events_on_worker_thread_latency_seconds", help: "Latency in seconds to transmit network events to worker thread across parent port", labelNames: ["eventName"], diff --git a/packages/beacon-node/src/network/discv5/worker.ts b/packages/beacon-node/src/network/discv5/worker.ts index 1b50ee86aa29..e09b063d13d1 100644 --- a/packages/beacon-node/src/network/discv5/worker.ts +++ b/packages/beacon-node/src/network/discv5/worker.ts @@ -3,12 +3,20 @@ import path from "node:path"; import fs from "node:fs"; import {createFromProtobuf} from "@libp2p/peer-id-factory"; import {Multiaddr, multiaddr} from "@multiformats/multiaddr"; -import {Gauge} from "prom-client"; import {expose} from "@chainsafe/threads/worker"; import {Observable, Subject} from "@chainsafe/threads/observable"; -import {createKeypairFromPeerId, Discv5, ENR, ENRData, SignableENR, SignableENRData} from "@chainsafe/discv5"; +import { + createKeypairFromPeerId, + Discv5, + ENR, + ENRData, + IDiscv5CreateOptions, + SignableENR, + SignableENRData, +} from "@chainsafe/discv5"; import {createBeaconConfig} from "@lodestar/config"; import {getNodeLogger} from "@lodestar/logger/node"; +import {Gauge} from "@lodestar/utils"; import {RegistryMetricCreator} from "../../metrics/index.js"; import {collectNodeJSMetrics} from "../../metrics/nodeJsMetrics.js"; import {profileNodeJS, writeHeapSnapshot} from "../../util/profile.js"; @@ -28,14 +36,14 @@ const logger = getNodeLogger(workerData.loggerOpts); // Set up metrics, nodejs and discv5-specific let metricsRegistry: RegistryMetricCreator | undefined; -let enrRelevanceMetric: Gauge<"status"> | undefined; +let enrRelevanceMetric: Gauge<{status: string}> | undefined; let closeMetrics: () => void | undefined; if (workerData.metrics) { metricsRegistry = new RegistryMetricCreator(); closeMetrics = collectNodeJSMetrics(metricsRegistry, "discv5_worker_"); // add enr relevance metric - enrRelevanceMetric = metricsRegistry.gauge<"status">({ + enrRelevanceMetric = metricsRegistry.gauge<{status: string}>({ name: "lodestar_discv5_discovered_status_total_count", help: "Total count of status results of enrRelevance() function", labelNames: ["status"], @@ -56,7 +64,7 @@ const discv5 = Discv5.create({ ip6: workerData.bindAddrs.ip6 ? multiaddr(workerData.bindAddrs.ip6) : undefined, }, config: workerData.config, - metricsRegistry, + metricsRegistry: metricsRegistry as IDiscv5CreateOptions["metricsRegistry"], }); // Load boot enrs diff --git a/packages/beacon-node/src/network/gossip/interface.ts b/packages/beacon-node/src/network/gossip/interface.ts index 8e9013487a06..600f96193296 100644 --- a/packages/beacon-node/src/network/gossip/interface.ts +++ b/packages/beacon-node/src/network/gossip/interface.ts @@ -70,7 +70,7 @@ export type SSZTypeOfGossipTopic = T extends {type: infer export type GossipTypeMap = { [GossipType.beacon_block]: allForks.SignedBeaconBlock; - [GossipType.blob_sidecar]: deneb.SignedBlobSidecar; + [GossipType.blob_sidecar]: deneb.BlobSidecar; [GossipType.beacon_aggregate_and_proof]: phase0.SignedAggregateAndProof; [GossipType.beacon_attestation]: phase0.Attestation; [GossipType.voluntary_exit]: phase0.SignedVoluntaryExit; @@ -85,7 +85,7 @@ export type GossipTypeMap = { export type GossipFnByType = { [GossipType.beacon_block]: (signedBlock: allForks.SignedBeaconBlock) => Promise | void; - [GossipType.blob_sidecar]: (signedBlobSidecar: deneb.SignedBlobSidecar) => Promise | void; + [GossipType.blob_sidecar]: (blobSidecar: deneb.BlobSidecar) => Promise | void; [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: phase0.SignedAggregateAndProof) => Promise | void; [GossipType.beacon_attestation]: (attestation: phase0.Attestation) => Promise | void; [GossipType.voluntary_exit]: (voluntaryExit: phase0.SignedVoluntaryExit) => Promise | void; diff --git a/packages/beacon-node/src/network/gossip/metrics.ts b/packages/beacon-node/src/network/gossip/metrics.ts index 3711669edddf..c2b5d0b32338 100644 --- a/packages/beacon-node/src/network/gossip/metrics.ts +++ b/packages/beacon-node/src/network/gossip/metrics.ts @@ -1,4 +1,6 @@ +import {ForkName} from "@lodestar/params"; import {RegistryMetricCreator} from "../../metrics/index.js"; +import {GossipType} from "./interface.js"; export type Eth2GossipsubMetrics = ReturnType; @@ -6,12 +8,12 @@ export type Eth2GossipsubMetrics = ReturnType export function createEth2GossipsubMetrics(register: RegistryMetricCreator) { return { gossipPeer: { - scoreByThreshold: register.gauge<"threshold">({ + scoreByThreshold: register.gauge<{threshold: string}>({ name: "lodestar_gossip_peer_score_by_threshold_count", help: "Gossip peer score by threshold", labelNames: ["threshold"], }), - meshPeersByClient: register.gauge<"client">({ + meshPeersByClient: register.gauge<{client: string}>({ name: "lodestar_gossip_mesh_peers_by_client_count", help: "number of mesh peers, labeled by client", labelNames: ["client"], @@ -22,34 +24,34 @@ export function createEth2GossipsubMetrics(register: RegistryMetricCreator) { }), }, gossipMesh: { - peersByType: register.gauge<"type" | "fork">({ + peersByType: register.gauge<{type: GossipType; fork: ForkName}>({ name: "lodestar_gossip_mesh_peers_by_type_count", help: "Number of connected mesh peers per gossip type", labelNames: ["type", "fork"], }), - peersByBeaconAttestationSubnet: register.gauge<"subnet" | "fork">({ + peersByBeaconAttestationSubnet: register.gauge<{subnet: string; fork: ForkName}>({ name: "lodestar_gossip_mesh_peers_by_beacon_attestation_subnet_count", help: "Number of connected mesh peers per beacon attestation subnet", labelNames: ["subnet", "fork"], }), - peersBySyncCommitteeSubnet: register.gauge<"subnet" | "fork">({ + peersBySyncCommitteeSubnet: register.gauge<{subnet: number; fork: ForkName}>({ name: "lodestar_gossip_mesh_peers_by_sync_committee_subnet_count", help: "Number of connected mesh peers per sync committee subnet", labelNames: ["subnet", "fork"], }), }, gossipTopic: { - peersByType: register.gauge<"type" | "fork">({ + peersByType: register.gauge<{type: GossipType; fork: ForkName}>({ name: "lodestar_gossip_topic_peers_by_type_count", help: "Number of connected topic peers per gossip type", labelNames: ["type", "fork"], }), - peersByBeaconAttestationSubnet: register.gauge<"subnet" | "fork">({ + peersByBeaconAttestationSubnet: register.gauge<{subnet: string; fork: ForkName}>({ name: "lodestar_gossip_topic_peers_by_beacon_attestation_subnet_count", help: "Number of connected topic peers per beacon attestation subnet", labelNames: ["subnet", "fork"], }), - peersBySyncCommitteeSubnet: register.gauge<"subnet" | "fork">({ + peersBySyncCommitteeSubnet: register.gauge<{subnet: number; fork: ForkName}>({ name: "lodestar_gossip_topic_peers_by_sync_committee_subnet_count", help: "Number of connected topic peers per sync committee subnet", labelNames: ["subnet", "fork"], diff --git a/packages/beacon-node/src/network/gossip/topic.ts b/packages/beacon-node/src/network/gossip/topic.ts index de1a571c3330..c5cd68ffa1de 100644 --- a/packages/beacon-node/src/network/gossip/topic.ts +++ b/packages/beacon-node/src/network/gossip/topic.ts @@ -85,7 +85,7 @@ export function getGossipSSZType(topic: GossipTopic) { // beacon_block is updated in altair to support the updated SignedBeaconBlock type return ssz[topic.fork].SignedBeaconBlock; case GossipType.blob_sidecar: - return ssz.deneb.SignedBlobSidecar; + return ssz.deneb.BlobSidecar; case GossipType.beacon_aggregate_and_proof: return ssz.phase0.SignedAggregateAndProof; case GossipType.beacon_attestation: diff --git a/packages/beacon-node/src/network/interface.ts b/packages/beacon-node/src/network/interface.ts index 047263d15022..9531c8529acf 100644 --- a/packages/beacon-node/src/network/interface.ts +++ b/packages/beacon-node/src/network/interface.ts @@ -44,7 +44,7 @@ export interface INetwork extends INetworkCorePublic { // Gossip publishBeaconBlock(signedBlock: allForks.SignedBeaconBlock): Promise; - publishBlobSidecar(signedBlobSidecar: deneb.SignedBlobSidecar): Promise; + publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise; publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise; publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise; publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise; diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index d2571a2a92e0..200bd4fd3a8d 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -288,14 +288,14 @@ export class Network implements INetwork { }); } - async publishBlobSidecar(signedBlobSidecar: deneb.SignedBlobSidecar): Promise { - const fork = this.config.getForkName(signedBlobSidecar.message.slot); - const index = signedBlobSidecar.message.index; - return this.publishGossip( - {type: GossipType.blob_sidecar, fork, index}, - signedBlobSidecar, - {ignoreDuplicatePublishError: true} - ); + async publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise { + const slot = blobSidecar.signedBlockHeader.message.slot; + const fork = this.config.getForkName(slot); + const index = blobSidecar.index; + + return this.publishGossip({type: GossipType.blob_sidecar, fork, index}, blobSidecar, { + ignoreDuplicatePublishError: true, + }); } async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise { diff --git a/packages/beacon-node/src/network/peers/discover.ts b/packages/beacon-node/src/network/peers/discover.ts index 2090e8bedab6..2805f67b4763 100644 --- a/packages/beacon-node/src/network/peers/discover.ts +++ b/packages/beacon-node/src/network/peers/discover.ts @@ -43,7 +43,7 @@ enum QueryStatusCode { } type QueryStatus = {code: QueryStatusCode.NotActive} | {code: QueryStatusCode.Active; count: number}; -enum DiscoveredPeerStatus { +export enum DiscoveredPeerStatus { bad_score = "bad_score", already_connected = "already_connected", already_dialing = "already_dialing", diff --git a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts index 24fcfaae6cbc..d31cb3e2d7f9 100644 --- a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts +++ b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts @@ -4,7 +4,7 @@ import { getBlockRootFromSignedAggregateAndProofSerialized, getSlotFromAttestationSerialized, getSlotFromSignedAggregateAndProofSerialized, - getSlotFromSignedBlobSidecarSerialized, + getSlotFromBlobSidecarSerialized, getSlotFromSignedBeaconBlockSerialized, } from "../../util/sszBytes.js"; import {GossipType} from "../gossip/index.js"; @@ -43,7 +43,7 @@ export function createExtractBlockSlotRootFns(): ExtractSlotRootFns { return {slot}; }, [GossipType.blob_sidecar]: (data: Uint8Array): SlotOptionalRoot | null => { - const slot = getSlotFromSignedBlobSidecarSerialized(data); + const slot = getSlotFromBlobSidecarSerialized(data); if (slot === null) { return null; diff --git a/packages/beacon-node/src/network/processor/gossipHandlers.ts b/packages/beacon-node/src/network/processor/gossipHandlers.ts index 2e9ab3bb5a11..d31183828b85 100644 --- a/packages/beacon-node/src/network/processor/gossipHandlers.ts +++ b/packages/beacon-node/src/network/processor/gossipHandlers.ts @@ -116,7 +116,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler fork: ForkName, peerIdStr: string, seenTimestampSec: number - ): Promise { + ): Promise { const slot = signedBlock.message.slot; const forkTypes = config.getForkTypes(slot); const blockHex = prettyBytes(forkTypes.BeaconBlock.hashTreeRoot(signedBlock.message)); @@ -126,13 +126,21 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler let blockInput; let blockInputMeta; if (config.getForkSeq(signedBlock.message.slot) >= ForkSeq.deneb) { - const blockInputRes = getBlockInput.getGossipBlockInput(config, { + const blockInputRes = chain.seenGossipBlockInput.getGossipBlockInput(config, { type: GossipedInputType.block, signedBlock, blockBytes, }); + blockInput = blockInputRes.blockInput; blockInputMeta = blockInputRes.blockInputMeta; + + // blockInput can't be returned null, improve by enforcing via return types + if (blockInput === null) { + throw Error( + `Invalid null blockInput returned by getGossipBlockInput for type=${GossipedInputType.block} blockHex=${blockHex} slot=${slot}` + ); + } } else { blockInput = getBlockInput.preDeneb(config, signedBlock, BlockSource.gossip, blockBytes); blockInputMeta = {}; @@ -170,20 +178,23 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } async function validateBeaconBlob( - signedBlob: deneb.SignedBlobSidecar, + blobSidecar: deneb.BlobSidecar, blobBytes: Uint8Array, gossipIndex: number, peerIdStr: string, seenTimestampSec: number ): Promise { - const slot = signedBlob.message.slot; - const blockHex = prettyBytes(signedBlob.message.blockRoot); + const blobBlockHeader = blobSidecar.signedBlockHeader.message; + const slot = blobBlockHeader.slot; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobBlockHeader); + const blockHex = prettyBytes(blockRoot); + const delaySec = chain.clock.secFromSlot(slot, seenTimestampSec); const recvToVal = Date.now() / 1000 - seenTimestampSec; - const {blockInput, blockInputMeta} = getBlockInput.getGossipBlockInput(config, { + const {blockInput, blockInputMeta} = chain.seenGossipBlockInput.getGossipBlockInput(config, { type: GossipedInputType.blob, - signedBlob, + blobSidecar, blobBytes, }); @@ -200,7 +211,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler }); try { - await validateGossipBlobSidecar(config, chain, signedBlob, gossipIndex); + await validateGossipBlobSidecar(chain, blobSidecar, gossipIndex); return blockInput; } catch (e) { if (e instanceof BlobSidecarGossipError) { @@ -211,7 +222,11 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } if (e.action === GossipAction.REJECT) { - chain.persistInvalidSszValue(ssz.deneb.SignedBlobSidecar, signedBlob, `gossip_reject_slot_${slot}`); + chain.persistInvalidSszValue( + ssz.deneb.BlobSidecar, + blobSidecar, + `gossip_reject_slot_${slot}_index_${blobSidecar.index}` + ); } } @@ -250,10 +265,20 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler // Returns the delay between the start of `block.slot` and `current time` const delaySec = chain.clock.secFromSlot(signedBlock.message.slot); metrics?.gossipBlock.elapsedTimeTillProcessed.observe(delaySec); + chain.seenGossipBlockInput.prune(); }) .catch((e) => { if (e instanceof BlockError) { switch (e.type.code) { + case BlockErrorCode.DATA_UNAVAILABLE: { + // TODO: create a newevent unknownBlobs and only pull blobs + const slot = signedBlock.message.slot; + const forkTypes = config.getForkTypes(slot); + const rootHex = toHexString(forkTypes.BeaconBlock.hashTreeRoot(signedBlock.message)); + + events.emit(NetworkEvent.unknownBlock, {rootHex, peer: peerIdStr}); + break; + } // ALREADY_KNOWN should not happen with ignoreIfKnown=true above // PARENT_UNKNOWN should not happen, we handled this in validateBeaconBlock() function above case BlockErrorCode.ALREADY_KNOWN: @@ -268,6 +293,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } metrics?.gossipBlock.processBlockErrors.inc({error: e instanceof BlockError ? e.type.code : "NOT_BLOCK_ERROR"}); logger.error("Error receiving block", {slot: signedBlock.message.slot, peer: peerIdStr}, e as Error); + chain.seenGossipBlockInput.prune(); }); } @@ -288,15 +314,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler peerIdStr, seenTimestampSec ); - if (blockInput !== null) { - handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); - } else { - // TODO DENEB: - // - // If block + blobs not fully received in the slot within some deadline, we should trigger block/blob - // pull using req/resp by root pre-emptively even though it will be trigged on seeing any block/blob - // gossip on next slot via missing parent checks - } + handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); }, [GossipType.blob_sidecar]: async ({ @@ -306,13 +324,25 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler seenTimestampSec, }: GossipHandlerParamGeneric) => { const {serializedData} = gossipData; - const signedBlob = sszDeserialize(topic, serializedData); - if (config.getForkSeq(signedBlob.message.slot) < ForkSeq.deneb) { + const blobSidecar = sszDeserialize(topic, serializedData); + if (config.getForkSeq(blobSidecar.signedBlockHeader.message.slot) < ForkSeq.deneb) { throw new GossipActionError(GossipAction.REJECT, {code: "PRE_DENEB_BLOCK"}); } - const blockInput = await validateBeaconBlob(signedBlob, serializedData, topic.index, peerIdStr, seenTimestampSec); + const blockInput = await validateBeaconBlob( + blobSidecar, + serializedData, + topic.index, + peerIdStr, + seenTimestampSec + ); if (blockInput !== null) { - handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); + // TODO DENEB: + // + // With blobsPromise the block import would have been attempted with the receipt of the block gossip + // and should have resolved the availability promise, however we could track if the block processing + // was halted and requeue it + // + // handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); } else { // TODO DENEB: // diff --git a/packages/beacon-node/src/network/processor/index.ts b/packages/beacon-node/src/network/processor/index.ts index 1d1fd82a4522..3d067c626f76 100644 --- a/packages/beacon-node/src/network/processor/index.ts +++ b/packages/beacon-node/src/network/processor/index.ts @@ -93,7 +93,7 @@ const PROCESS_UNKNOWN_BLOCK_GOSSIP_OBJECTS_YIELD_EVERY_MS = 50; /** * Reprocess reject reason for metrics */ -enum ReprocessRejectReason { +export enum ReprocessRejectReason { /** * There are too many attestations that have unknown block root. */ @@ -107,9 +107,9 @@ enum ReprocessRejectReason { /** * Cannot accept work reason for metrics */ -enum CannotAcceptWorkReason { +export enum CannotAcceptWorkReason { /** - * Validating or procesing gossip block at current slot. + * Validating or processing gossip block at current slot. */ processingCurrentSlotBlock = "processing_current_slot_block", /** @@ -344,7 +344,10 @@ export class NetworkProcessor { for (const gossipMessages of gossipMessagesByRoot.values()) { for (const message of gossipMessages) { this.metrics?.reprocessGossipAttestations.reject.inc({reason: ReprocessRejectReason.expired}); - this.metrics?.reprocessGossipAttestations.waitSecBeforeReject.set(nowSec - message.seenTimestampSec); + this.metrics?.reprocessGossipAttestations.waitSecBeforeReject.set( + {reason: ReprocessRejectReason.expired}, + nowSec - message.seenTimestampSec + ); // TODO: Should report the dropped job to gossip? It will be eventually pruned from the mcache } } diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts index 10e7071f4fdb..41d3e901c41d 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts @@ -78,7 +78,9 @@ export function matchBlockWithBlobs( const blobSidecars: deneb.BlobSidecar[] = []; let blobSidecar: deneb.BlobSidecar; - while ((blobSidecar = allBlobSidecars[blobSideCarIndex])?.slot === block.data.message.slot) { + while ( + (blobSidecar = allBlobSidecars[blobSideCarIndex])?.signedBlockHeader.message.slot === block.data.message.slot + ) { blobSidecars.push(blobSidecar); lastMatchedSlot = block.data.message.slot; blobSideCarIndex++; @@ -111,14 +113,14 @@ export function matchBlockWithBlobs( if ( allBlobSidecars[blobSideCarIndex] !== undefined && // If there are no blobs, the blobs request can give 1 block outside the requested range - allBlobSidecars[blobSideCarIndex].slot <= endSlot + allBlobSidecars[blobSideCarIndex].signedBlockHeader.message.slot <= endSlot ) { throw Error( `Unmatched blobSidecars, blocks=${allBlocks.length}, blobs=${ allBlobSidecars.length } lastMatchedSlot=${lastMatchedSlot}, pending blobSidecars slots=${allBlobSidecars .slice(blobSideCarIndex) - .map((blb) => blb.slot) + .map((blb) => blb.signedBlockHeader.message.slot) .join(",")}` ); } diff --git a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts index 2cd852492220..e3655cd90c6f 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts @@ -1,10 +1,10 @@ -import {GENESIS_SLOT, MAX_REQUEST_BLOCKS_DENEB} from "@lodestar/params"; +import {GENESIS_SLOT, MAX_REQUEST_BLOCKS_DENEB, BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; import {deneb, Slot} from "@lodestar/types"; import {fromHex} from "@lodestar/utils"; import {IBeaconChain} from "../../../chain/index.js"; import {IBeaconDb} from "../../../db/index.js"; -import {BLOB_SIDECARS_IN_WRAPPER_INDEX, BLOBSIDECAR_FIXED_SIZE} from "../../../db/repositories/blobSidecars.js"; +import {BLOB_SIDECARS_IN_WRAPPER_INDEX} from "../../../db/repositories/blobSidecars.js"; export async function* onBlobSidecarsByRange( request: deneb.BlobSidecarsByRangeRequest, diff --git a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts index 3bb162d019e3..6aa16a0c2629 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts @@ -1,9 +1,10 @@ import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; +import {BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; import {deneb, RootHex} from "@lodestar/types"; import {toHex, fromHex} from "@lodestar/utils"; import {IBeaconChain} from "../../../chain/index.js"; import {IBeaconDb} from "../../../db/index.js"; -import {BLOB_SIDECARS_IN_WRAPPER_INDEX, BLOBSIDECAR_FIXED_SIZE} from "../../../db/repositories/blobSidecars.js"; +import {BLOB_SIDECARS_IN_WRAPPER_INDEX} from "../../../db/repositories/blobSidecars.js"; export async function* onBlobSidecarsByRoot( requestBody: deneb.BlobSidecarsByRootRequest, diff --git a/packages/beacon-node/src/network/subnets/attnetsService.ts b/packages/beacon-node/src/network/subnets/attnetsService.ts index d76e56677ac6..7eabc2e4114c 100644 --- a/packages/beacon-node/src/network/subnets/attnetsService.ts +++ b/packages/beacon-node/src/network/subnets/attnetsService.ts @@ -34,7 +34,7 @@ const LAST_SEEN_VALIDATOR_TIMEOUT = 150; const gossipType = GossipType.beacon_attestation; -enum SubnetSource { +export enum SubnetSource { committee = "committee", random = "random", } diff --git a/packages/beacon-node/src/network/subnets/dllAttnetsService.ts b/packages/beacon-node/src/network/subnets/dllAttnetsService.ts index f7ae0e8d09c2..7236695cb11a 100644 --- a/packages/beacon-node/src/network/subnets/dllAttnetsService.ts +++ b/packages/beacon-node/src/network/subnets/dllAttnetsService.ts @@ -20,7 +20,7 @@ import {computeSubscribedSubnet} from "./util.js"; const gossipType = GossipType.beacon_attestation; -enum SubnetSource { +export enum DLLSubnetSource { committee = "committee", longLived = "long_lived", } @@ -179,7 +179,7 @@ export class DLLAttnetsService implements IAttnetsService { if (dutiedSlot === clockSlot + this.opts.slotsToSubscribeBeforeAggregatorDuty) { // Trigger gossip subscription first, in batch if (dutiedInfo.size > 0) { - this.subscribeToSubnets(Array.from(dutiedInfo.keys()), SubnetSource.committee); + this.subscribeToSubnets(Array.from(dutiedInfo.keys()), DLLSubnetSource.committee); } // Then, register the subscriptions for (const subnet of dutiedInfo.keys()) { @@ -276,7 +276,7 @@ export class DLLAttnetsService implements IAttnetsService { } // First, tell gossip to subscribe to the subnets if not connected already - this.subscribeToSubnets(newSubnets, SubnetSource.longLived); + this.subscribeToSubnets(newSubnets, DLLSubnetSource.longLived); // then update longLivedSubscriptions for (const subnet of toRemoveSubnets) { @@ -289,7 +289,7 @@ export class DLLAttnetsService implements IAttnetsService { } // Only tell gossip to unsubsribe last, longLivedSubscriptions has the latest state - this.unsubscribeSubnets(toRemoveSubnets, this.clock.currentSlot, SubnetSource.longLived); + this.unsubscribeSubnets(toRemoveSubnets, this.clock.currentSlot, DLLSubnetSource.longLived); this.updateMetadata(); } @@ -300,7 +300,7 @@ export class DLLAttnetsService implements IAttnetsService { private unsubscribeExpiredCommitteeSubnets(slot: Slot): void { const expired = this.shortLivedSubscriptions.getExpired(slot); if (expired.length > 0) { - this.unsubscribeSubnets(expired, slot, SubnetSource.committee); + this.unsubscribeSubnets(expired, slot, DLLSubnetSource.committee); } } @@ -333,7 +333,7 @@ export class DLLAttnetsService implements IAttnetsService { * Trigger a gossip subcription only if not already subscribed * shortLivedSubscriptions or longLivedSubscriptions should be updated right AFTER this called **/ - private subscribeToSubnets(subnets: number[], src: SubnetSource): void { + private subscribeToSubnets(subnets: number[], src: DLLSubnetSource): void { const forks = getActiveForks(this.config, this.clock.currentEpoch); for (const subnet of subnets) { if (!this.shortLivedSubscriptions.has(subnet) && !this.longLivedSubscriptions.has(subnet)) { @@ -349,7 +349,7 @@ export class DLLAttnetsService implements IAttnetsService { * Trigger a gossip un-subscription only if no-one is still subscribed * If unsubscribe long lived subnets, longLivedSubscriptions should be updated right BEFORE this called **/ - private unsubscribeSubnets(subnets: number[], slot: Slot, src: SubnetSource): void { + private unsubscribeSubnets(subnets: number[], slot: Slot, src: DLLSubnetSource): void { // No need to unsubscribeTopic(). Return early to prevent repetitive extra work if (this.opts.subscribeAllSubnets) return; diff --git a/packages/beacon-node/src/util/array.ts b/packages/beacon-node/src/util/array.ts index 72f81fbee72b..a154ee1bbf34 100644 --- a/packages/beacon-node/src/util/array.ts +++ b/packages/beacon-node/src/util/array.ts @@ -45,6 +45,9 @@ export class LinkedList { return this._length; } + /** + * Add to the end of the list + */ push(data: T): void { if (this._length === 0) { this.tail = this.head = new Node(data); @@ -64,6 +67,9 @@ export class LinkedList { this._length++; } + /** + * Add to the beginning of the list + */ unshift(data: T): void { if (this._length === 0) { this.tail = this.head = new Node(data); @@ -83,6 +89,25 @@ export class LinkedList { this._length++; } + insertAfter(after: T, data: T): void { + const node = this.findNode(after); + if (!node) { + return; + } + + if (node === this.tail) { + this.push(data); + return; + } + + const newNode = new Node(data); + newNode.next = node.next; + newNode.prev = node; + node.next = newNode; + if (newNode.next) newNode.next.prev = newNode; + this._length++; + } + pop(): T | null { const oldTail = this.tail; if (!oldTail) return null; @@ -173,6 +198,48 @@ export class LinkedList { return false; } + /** + * Move an existing item to the head of the list. + * If the item is not found, do nothing. + */ + moveToHead(item: T): void { + // if this is head, do nothing + if (this.head?.data === item) { + return; + } + + const found = this.deleteFirst(item); + if (found) { + this.unshift(item); + } + } + + /** + * Move an existing item to the second position of the list. + * If the item is not found, do nothing. + */ + moveToSecond(item: T): void { + // if this is head or second, do nothing + if (this.head?.data === item || this.head?.next?.data === item) { + return; + } + + const found = this.deleteFirst(item); + if (found) { + if (this.head?.next) { + const oldSecond = this.head.next; + const newSecond = new Node(item); + this.head.next = newSecond; + newSecond.next = oldSecond; + newSecond.prev = this.head; + oldSecond.prev = newSecond; + } else { + // only 1 item in the list + this.push(item); + } + } + } + next(): IteratorResult { if (!this.pointer) { return {done: true, value: undefined}; @@ -222,4 +289,23 @@ export class LinkedList { return arr; } + + /** + * Check if the item is in the list. + * @returns + */ + has(item: T): boolean { + return this.findNode(item) !== null; + } + + private findNode(item: T): Node | null { + let node = this.head; + while (node) { + if (node.data === item) { + return node; + } + node = node.next; + } + return null; + } } diff --git a/packages/beacon-node/src/util/blobs.ts b/packages/beacon-node/src/util/blobs.ts new file mode 100644 index 000000000000..bbad27f684ed --- /dev/null +++ b/packages/beacon-node/src/util/blobs.ts @@ -0,0 +1,48 @@ +import SHA256 from "@chainsafe/as-sha256"; +import {Tree} from "@chainsafe/persistent-merkle-tree"; +import {VERSIONED_HASH_VERSION_KZG, KZG_COMMITMENT_GINDEX0, ForkName} from "@lodestar/params"; +import {deneb, ssz, allForks} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {signedBlockToSignedHeader} from "@lodestar/state-transition"; + +type VersionHash = Uint8Array; + +export function kzgCommitmentToVersionedHash(kzgCommitment: deneb.KZGCommitment): VersionHash { + const hash = SHA256.digest(kzgCommitment); + // Equivalent to `VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]` + hash[0] = VERSIONED_HASH_VERSION_KZG; + return hash; +} + +export function computeInclusionProof( + fork: ForkName, + body: allForks.BeaconBlockBody, + index: number +): deneb.KzgCommitmentInclusionProof { + const bodyView = (ssz[fork].BeaconBlockBody as allForks.AllForksSSZTypes["BeaconBlockBody"]).toView(body); + const commitmentGindex = KZG_COMMITMENT_GINDEX0 + index; + return new Tree(bodyView.node).getSingleProof(BigInt(commitmentGindex)); +} + +export function computeBlobSidecars( + config: ChainForkConfig, + signedBlock: allForks.SignedBeaconBlock, + contents: deneb.Contents & {kzgCommitmentInclusionProofs?: deneb.KzgCommitmentInclusionProof[]} +): deneb.BlobSidecars { + const blobKzgCommitments = (signedBlock as deneb.SignedBeaconBlock).message.body.blobKzgCommitments; + if (blobKzgCommitments === undefined) { + throw Error("Invalid block with missing blobKzgCommitments for computeBlobSidecars"); + } + + const signedBlockHeader = signedBlockToSignedHeader(config, signedBlock); + const fork = config.getForkName(signedBlockHeader.message.slot); + + return blobKzgCommitments.map((kzgCommitment, index) => { + const blob = contents.blobs[index]; + const kzgProof = contents.kzgProofs[index]; + const kzgCommitmentInclusionProof = + contents.kzgCommitmentInclusionProofs?.[index] ?? computeInclusionProof(fork, signedBlock.message.body, index); + + return {index, blob, kzgCommitment, kzgProof, signedBlockHeader, kzgCommitmentInclusionProof}; + }); +} diff --git a/packages/beacon-node/src/util/queue/options.ts b/packages/beacon-node/src/util/queue/options.ts index c3846cd8be1f..e55d413088e3 100644 --- a/packages/beacon-node/src/util/queue/options.ts +++ b/packages/beacon-node/src/util/queue/options.ts @@ -1,4 +1,4 @@ -import {IGauge, IHistogram} from "../../metrics/index.js"; +import {Gauge, GaugeExtra, Histogram} from "@lodestar/utils"; export enum QueueType { FIFO = "FIFO", @@ -19,12 +19,12 @@ export type JobQueueOpts = { }; export type QueueMetrics = { - length: IGauge; - droppedJobs: IGauge; + length: GaugeExtra; + droppedJobs: Gauge; /** Compute async utilization rate with `rate(metrics_name[1m])` */ - jobTime: IHistogram; - jobWaitTime: IHistogram; - concurrency: IGauge; + jobTime: Histogram; + jobWaitTime: Histogram; + concurrency: Gauge; }; export const defaultQueueOpts: Required< diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index 0c258df35041..cd12c4bd9c18 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -1,6 +1,7 @@ import {BitArray, deserializeUint8ArrayBitListFromBytes} from "@chainsafe/ssz"; import {BLSSignature, RootHex, Slot} from "@lodestar/types"; import {toHex} from "@lodestar/utils"; +import {BYTES_PER_FIELD_ELEMENT, FIELD_ELEMENTS_PER_BLOB} from "@lodestar/params"; export type BlockRootHex = RootHex; export type AttDataBase64 = string; @@ -180,23 +181,18 @@ export function getSlotFromSignedBeaconBlockSerialized(data: Uint8Array): Slot | } /** - * 4 + 96 = 100 - * ``` - * class SignedBlobSidecar(Container): - * message: BlobSidecar [fixed] - * signature: BLSSignature [fixed] - * * class BlobSidecar(Container): - * blockRoot: Root [fixed - 32 bytes ], - * index: BlobIndex [fixed - 8 bytes ], - * slot: Slot [fixed - 8 bytes] - * ... - * ``` + * index: BlobIndex [fixed - 8 bytes ], + * blob: Blob, BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB + * kzgCommitment: Bytes48, + * kzgProof: Bytes48, + * signedBlockHeader: + * slot: 8 bytes */ -const SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR = 32 + 8; +const SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR = 8 + BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB + 48 + 48; -export function getSlotFromSignedBlobSidecarSerialized(data: Uint8Array): Slot | null { +export function getSlotFromBlobSidecarSerialized(data: Uint8Array): Slot | null { if (data.length < SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR + SLOT_SIZE) { return null; } diff --git a/packages/beacon-node/test/__mocks__/mockedBeaconChain.ts b/packages/beacon-node/test/__mocks__/mockedBeaconChain.ts index 3c5dacc9c971..c72d22471ce8 100644 --- a/packages/beacon-node/test/__mocks__/mockedBeaconChain.ts +++ b/packages/beacon-node/test/__mocks__/mockedBeaconChain.ts @@ -79,6 +79,7 @@ vi.mock("../../src/chain/index.js", async (requireActual) => { // @ts-expect-error beaconProposerCache: new BeaconProposerCache(), shufflingCache: new ShufflingCache(), + produceCommonBlockBody: vi.fn(), produceBlock: vi.fn(), produceBlindedBlock: vi.fn(), getCanonicalBlockAtSlot: vi.fn(), diff --git a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh index dee850740370..fbf9dcaaf929 100755 --- a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh @@ -5,4 +5,4 @@ currentDir=$(pwd) . $scriptDir/common-setup.sh -docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --dataDir /data/ethereumjs --gethGenesis /data/genesis.json --rpc --rpcEngine --jwt-secret /data/jwtsecret --logLevel debug --isSingleNode +docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --dataDir /data/ethereumjs --gethGenesis /data/genesis.json --rpc --rpcEngineAddr 0.0.0.0 --rpcAddr 0.0.0.0 --rpcEngine --jwt-secret /data/jwtsecret --logLevel debug --isSingleNode diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index 0ab7b3b363b5..47d72c1226e1 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -1,7 +1,7 @@ import path from "node:path"; import {expect} from "chai"; import {toHexString} from "@chainsafe/ssz"; -import {BeaconStateAllForks, isExecutionStateType} from "@lodestar/state-transition"; +import {BeaconStateAllForks, isExecutionStateType, signedBlockToSignedHeader} from "@lodestar/state-transition"; import {InputType} from "@lodestar/spec-test-util"; import {CheckpointWithHex, ForkChoice} from "@lodestar/fork-choice"; import {phase0, allForks, bellatrix, ssz, RootHex, deneb} from "@lodestar/types"; @@ -10,6 +10,7 @@ import {createBeaconConfig} from "@lodestar/config"; import {ACTIVE_PRESET, ForkSeq, isForkBlobs} from "@lodestar/params"; import {BeaconChain} from "../../../src/chain/index.js"; import {ClockEvent} from "../../../src/util/clock.js"; +import {computeInclusionProof} from "../../../src/util/blobs.js"; import {createCachedBeaconStateTest} from "../../utils/cachedBeaconState.js"; import {testLogger} from "../../utils/logger.js"; import {getConfig} from "../../utils/config.js"; @@ -195,20 +196,14 @@ const forkChoiceTest = throw Error("Invalid blobs or proofs lengths"); } - const blockRoot = config - .getForkTypes(signedBlock.message.slot) - .BeaconBlock.hashTreeRoot(signedBlock.message); const blobSidecars: deneb.BlobSidecars = blobs.map((blob, index) => { return { - blockRoot, index, - slot, blob, - // proofs isn't undefined here but typescript(check types) can't figure it out - kzgProof: (proofs ?? [])[index], kzgCommitment: commitments[index], - blockParentRoot: signedBlock.message.parentRoot, - proposerIndex: signedBlock.message.proposerIndex, + kzgProof: (proofs ?? [])[index], + signedBlockHeader: signedBlockToSignedHeader(config, signedBlock), + kzgCommitmentInclusionProof: computeInclusionProof(fork, signedBlock.message.body, index), }; }); diff --git a/packages/beacon-node/test/spec/specTestVersioning.ts b/packages/beacon-node/test/spec/specTestVersioning.ts index 3f1aad878e65..20125520321d 100644 --- a/packages/beacon-node/test/spec/specTestVersioning.ts +++ b/packages/beacon-node/test/spec/specTestVersioning.ts @@ -15,7 +15,7 @@ import {DownloadTestsOptions} from "@lodestar/spec-test-util"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); export const ethereumConsensusSpecsTests: DownloadTestsOptions = { - specVersion: "v1.4.0-beta.2-hotfix", + specVersion: "v1.4.0-beta.5", // Target directory is the host package root: 'packages/*/spec-tests' outputDir: path.join(__dirname, "../../spec-tests"), specTestsRepoUrl: "https://github.com/ethereum/consensus-spec-tests", diff --git a/packages/beacon-node/test/spec/utils/specTestIterator.ts b/packages/beacon-node/test/spec/utils/specTestIterator.ts index a9310d53ac81..084d3d00fd48 100644 --- a/packages/beacon-node/test/spec/utils/specTestIterator.ts +++ b/packages/beacon-node/test/spec/utils/specTestIterator.ts @@ -64,6 +64,7 @@ export const defaultSkipOpts: SkipOpts = { "capella/light_client/single_merkle_proof/BeaconBlockBody", "deneb/light_client/single_merkle_proof/BeaconBlockBody", ], + skippedRunners: ["merkle_proof"], }; /** diff --git a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts index 3e96f3b932c8..9ca426672efe 100644 --- a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts +++ b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts @@ -85,7 +85,11 @@ describe("api/validator - produceBlockV2", function () { const feeRecipient = "0xcccccccccccccccccccccccccccccccccccccccc"; const api = getValidatorApi(modules); - server.chainStub.produceBlock.mockResolvedValue({block: fullBlock, executionPayloadValue, consensusBlockValue}); + server.chainStub.produceBlock.mockResolvedValue({ + block: fullBlock, + executionPayloadValue, + consensusBlockValue, + }); // check if expectedFeeRecipient is passed to produceBlock await api.produceBlockV2(slot, randaoReveal, graffiti, {feeRecipient}); diff --git a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts index 83e1e7887510..f1aa2cb791df 100644 --- a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts +++ b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts @@ -9,6 +9,7 @@ import {getValidatorApi} from "../../../../../src/api/impl/validator/index.js"; import {testLogger} from "../../../../utils/logger.js"; import {ApiImplTestModules, setupApiImplTestServer} from "../../../../__mocks__/apiMocks.js"; import {ExecutionBuilderHttp} from "../../../../../src/execution/builder/http.js"; +import {CommonBlockBody} from "../../../../../src/chain/interface.js"; /* eslint-disable @typescript-eslint/naming-convention */ describe("api/validator - produceBlockV3", function () { @@ -41,23 +42,38 @@ describe("api/validator - produceBlockV3", function () { vi.clearAllMocks(); }); - const testCases: [routes.validator.BuilderSelection, number | null, number | null, number, string][] = [ - [routes.validator.BuilderSelection.MaxProfit, 1, 0, 0, "builder"], - [routes.validator.BuilderSelection.MaxProfit, 1, 2, 1, "engine"], - [routes.validator.BuilderSelection.MaxProfit, null, 0, 0, "engine"], - [routes.validator.BuilderSelection.MaxProfit, 0, null, 1, "builder"], - - [routes.validator.BuilderSelection.BuilderAlways, 1, 2, 0, "builder"], - [routes.validator.BuilderSelection.BuilderAlways, 1, 0, 1, "builder"], - [routes.validator.BuilderSelection.BuilderAlways, null, 0, 0, "engine"], - [routes.validator.BuilderSelection.BuilderAlways, 0, null, 1, "builder"], - - [routes.validator.BuilderSelection.BuilderOnly, 0, 2, 0, "builder"], - [routes.validator.BuilderSelection.ExecutionOnly, 2, 0, 1, "execution"], + const testCases: [routes.validator.BuilderSelection, number | null, number | null, number, boolean, string][] = [ + [routes.validator.BuilderSelection.MaxProfit, 1, 0, 0, false, "builder"], + [routes.validator.BuilderSelection.MaxProfit, 1, 2, 1, false, "engine"], + [routes.validator.BuilderSelection.MaxProfit, null, 0, 0, false, "engine"], + [routes.validator.BuilderSelection.MaxProfit, 0, null, 1, false, "builder"], + [routes.validator.BuilderSelection.MaxProfit, 0, null, 1, true, "builder"], + [routes.validator.BuilderSelection.MaxProfit, 1, 1, 1, true, "engine"], + [routes.validator.BuilderSelection.MaxProfit, 2, 1, 1, true, "engine"], + + [routes.validator.BuilderSelection.BuilderAlways, 1, 2, 0, false, "builder"], + [routes.validator.BuilderSelection.BuilderAlways, 1, 0, 1, false, "builder"], + [routes.validator.BuilderSelection.BuilderAlways, null, 0, 0, false, "engine"], + [routes.validator.BuilderSelection.BuilderAlways, 0, null, 1, false, "builder"], + [routes.validator.BuilderSelection.BuilderAlways, 0, 1, 1, true, "engine"], + [routes.validator.BuilderSelection.BuilderAlways, 1, 1, 1, true, "engine"], + [routes.validator.BuilderSelection.BuilderAlways, 1, null, 1, true, "builder"], + + [routes.validator.BuilderSelection.BuilderOnly, 0, 2, 0, false, "builder"], + [routes.validator.BuilderSelection.ExecutionOnly, 2, 0, 1, false, "engine"], + [routes.validator.BuilderSelection.BuilderOnly, 1, 1, 0, true, "builder"], + [routes.validator.BuilderSelection.ExecutionOnly, 1, 1, 1, true, "engine"], ]; testCases.forEach( - ([builderSelection, builderPayloadValue, enginePayloadValue, consensusBlockValue, finalSelection]) => { + ([ + builderSelection, + builderPayloadValue, + enginePayloadValue, + consensusBlockValue, + shouldOverrideBuilder, + finalSelection, + ]) => { it(`produceBlockV3 - ${finalSelection} produces block`, async () => { syncStub = server.syncStub; modules = { @@ -85,10 +101,26 @@ describe("api/validator - produceBlockV3", function () { const api = getValidatorApi(modules); if (enginePayloadValue !== null) { + const commonBlockBody: CommonBlockBody = { + attestations: fullBlock.body.attestations, + attesterSlashings: fullBlock.body.attesterSlashings, + deposits: fullBlock.body.deposits, + proposerSlashings: fullBlock.body.proposerSlashings, + eth1Data: fullBlock.body.eth1Data, + graffiti: fullBlock.body.graffiti, + randaoReveal: fullBlock.body.randaoReveal, + voluntaryExits: fullBlock.body.voluntaryExits, + blsToExecutionChanges: [], + syncAggregate: fullBlock.body.syncAggregate, + }; + + chainStub.produceCommonBlockBody.mockResolvedValue(commonBlockBody); + chainStub.produceBlock.mockResolvedValue({ block: fullBlock, executionPayloadValue: BigInt(enginePayloadValue), consensusBlockValue: BigInt(consensusBlockValue), + shouldOverrideBuilder, }); } else { chainStub.produceBlock.mockRejectedValue(Error("not produced")); diff --git a/packages/beacon-node/test/unit/chain/seenCache/seenGossipBlockInput.test.ts b/packages/beacon-node/test/unit/chain/seenCache/seenGossipBlockInput.test.ts new file mode 100644 index 000000000000..c389e1b81e70 --- /dev/null +++ b/packages/beacon-node/test/unit/chain/seenCache/seenGossipBlockInput.test.ts @@ -0,0 +1,165 @@ +import {describe, it, expect} from "vitest"; +import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {ssz} from "@lodestar/types"; + +import {SeenGossipBlockInput} from "../../../../src/chain/seenCache/seenGossipBlockInput.js"; +import {BlockInputType, GossipedInputType} from "../../../../src/chain/blocks/types.js"; + +/* eslint-disable @typescript-eslint/naming-convention */ +describe("SeenGossipBlockInput", () => { + const chainConfig = createChainForkConfig({ + ...defaultChainConfig, + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + }); + const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); + const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); + const seenGossipBlockInput = new SeenGossipBlockInput(); + + // array of numBlobs, events where events are array of + // [block|blob11|blob2, pd | bp | null | error string reflecting the expected result] + const testCases: [string, number, [string, string | null][]][] = [ + ["no blobs", 0, [["block", "pd"]]], + [ + "1 blob, block first", + 1, + [ + ["block", "bp"], + ["blob0", "pd"], + ], + ], + [ + "1 blob, blob first", + 1, + [ + ["blob0", null], + ["block", "pd"], + ], + ], + [ + "6 blobs, block first", + 6, + [ + ["block", "bp"], + ["blob1", "bp"], + ["blob0", "bp"], + ["blob5", "bp"], + ["blob4", "bp"], + ["blob2", "bp"], + ["blob3", "pd"], + ], + ], + [ + "4 blobs, block in mid", + 4, + [ + ["blob1", null], + ["blob3", null], + ["block", "bp"], + ["blob0", "bp"], + ["blob2", "pd"], + ], + ], + [ + "3 blobs, block in end", + 3, + [ + ["blob1", null], + ["blob0", null], + ["blob2", null], + ["block", "pd"], + ], + ], + ]; + + // lets start from a random slot to build cases + let slot = 7456; + for (const testCase of testCases) { + const [testName, numBlobs, events] = testCase; + + it(`${testName}`, () => { + const signedBlock = ssz.deneb.SignedBeaconBlock.defaultValue(); + // assign slot and increment for the next block so as to keep these block testcases distinguished + // in the cache + signedBlock.message.slot = slot++; + signedBlock.message.body.blobKzgCommitments = Array.from({length: numBlobs}, () => + ssz.deneb.KZGCommitment.defaultValue() + ); + + // create a dummy signed block header with matching body root + const bodyRoot = ssz.deneb.BeaconBlockBody.hashTreeRoot(signedBlock.message.body); + const signedBlockHeader = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + signedBlockHeader.message.slot = signedBlock.message.slot; + signedBlockHeader.message.bodyRoot = bodyRoot; + + const blobSidecars = Array.from({length: numBlobs}, (_val, index) => { + const message = {...ssz.deneb.BlobSidecar.defaultValue(), signedBlockHeader, index}; + return message; + }); + + for (const testEvent of events) { + const [inputEvent, expectedRes] = testEvent; + const eventType = inputEvent.includes("block") ? GossipedInputType.block : GossipedInputType.blob; + const expectedResponseType = parseResponseType(expectedRes); + + try { + if (eventType === GossipedInputType.block) { + const blockInputRes = seenGossipBlockInput.getGossipBlockInput(config, { + type: GossipedInputType.block, + signedBlock, + blockBytes: null, + }); + + if (expectedResponseType instanceof Error) { + expect.fail(`expected to fail with error: ${expectedResponseType.message}`); + } else if (expectedResponseType === null) { + expect(blockInputRes).toBeNull; + } else { + expect(blockInputRes.blockInput?.type).to.be.equal(expectedResponseType); + } + } else { + const index = parseInt(inputEvent.split("blob")[1] ?? "0"); + const blobSidecar = blobSidecars[index]; + expect(blobSidecar).not.equal(undefined); + + const blockInputRes = seenGossipBlockInput.getGossipBlockInput(config, { + type: GossipedInputType.blob, + blobSidecar, + blobBytes: null, + }); + + if (expectedResponseType instanceof Error) { + expect.fail(`expected to fail with error: ${expectedResponseType.message}`); + } else if (expectedResponseType === null) { + expect(blockInputRes).toBeNull; + } else { + expect(blockInputRes.blockInput?.type).to.equal(expectedResponseType); + } + } + } catch (e) { + if (!(e as Error).message.includes("expected to fail with error")) { + if (!(expectedResponseType instanceof Error)) { + expect.fail( + `expected not to fail with respose=${expectedResponseType} but errored: ${(e as Error).message}` + ); + } + } + } + } + }); + } +}); + +function parseResponseType(expectedRes: string | null): BlockInputType | null | Error { + switch (expectedRes) { + case null: + return null; + case "pd": + return BlockInputType.postDeneb; + case "bp": + return BlockInputType.blobsPromise; + default: + return Error(expectedRes); + } +} diff --git a/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts new file mode 100644 index 000000000000..62f2bff13d19 --- /dev/null +++ b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts @@ -0,0 +1,120 @@ +import {describe, it, expect, beforeEach} from "vitest"; +import {toHexString} from "@chainsafe/ssz"; +import {EpochShuffling} from "@lodestar/state-transition"; +import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition/src/types.js"; +import {FIFOBlockStateCache} from "../../../../src/chain/stateCache/index.js"; +import {generateCachedState} from "../../../utils/state.js"; + +describe("FIFOBlockStateCache", function () { + let cache: FIFOBlockStateCache; + const shuffling: EpochShuffling = { + epoch: 0, + activeIndices: [], + shuffling: [], + committees: [], + committeesPerSlot: 1, + }; + + const state1 = generateCachedState({slot: 0}); + const key1 = toHexString(state1.hashTreeRoot()); + state1.epochCtx.currentShuffling = {...shuffling, epoch: 0}; + + const state2 = generateCachedState({slot: 1 * SLOTS_PER_EPOCH}); + const key2 = toHexString(state2.hashTreeRoot()); + state2.epochCtx.currentShuffling = {...shuffling, epoch: 1}; + + const state3 = generateCachedState({slot: 2 * SLOTS_PER_EPOCH}); + const key3 = toHexString(state3.hashTreeRoot()); + state3.epochCtx.currentShuffling = {...shuffling, epoch: 2}; + + beforeEach(function () { + // max 2 items + cache = new FIFOBlockStateCache({maxBlockStates: 2}, {}); + cache.add(state1); + cache.add(state2); + }); + + const testCases: { + name: string; + headState: CachedBeaconStateAllForks; + addAsHeadArr: boolean[]; + keptStates: string[]; + prunedState: string; + }[] = [ + { + name: "add as head, prune key1", + headState: state2, + addAsHeadArr: [true], + keptStates: [key3, key2], + prunedState: key1, + }, + { + name: "add, prune key1", + headState: state2, + addAsHeadArr: [false], + keptStates: [key2, key3], + prunedState: key1, + }, + { + name: "add as head, prune key2", + headState: state1, + addAsHeadArr: [true], + keptStates: [key3, key1], + prunedState: key2, + }, + { + name: "add, prune key2", + headState: state1, + addAsHeadArr: [false], + keptStates: [key1, key3], + prunedState: key2, + }, + // same flow to importBlock + { + name: "add then set as head, prune key1", + headState: state2, + addAsHeadArr: [false, true], + keptStates: [key3, key2], + prunedState: key1, + }, + { + name: "add then set as head, prune key2", + headState: state1, + addAsHeadArr: [false, true], + keptStates: [key3, key1], + prunedState: key2, + }, + ]; + + for (const {name, headState, addAsHeadArr, keptStates, prunedState} of testCases) { + it(name, () => { + // move to head this state + cache.setHeadState(headState); + expect(cache.size).to.be.equal(2, "Size must be same as initial 2"); + for (const addAsHead of addAsHeadArr) { + cache.add(state3, addAsHead); + } + expect(cache.size).to.be.equal(2, "Size should reduce to initial 2 after prunning"); + expect(cache.dumpKeyOrder()).toEqual(keptStates); + expect(cache.get(prunedState)).toBeNull(); + for (const key of keptStates) { + expect(cache.get(key), `must have key ${key}`).to.be.not.null; + } + }); + } + + it("Should not prune newly added state", () => { + cache = new FIFOBlockStateCache({maxBlockStates: 1}, {}); + cache.setHeadState(state1); + // Size must be same as initial 1 + expect(cache.size).toEqual(1); + cache.add(state2); + // Should not deleted newly added state + expect(cache.size).toEqual(2); + cache.add(state3); + // Should delete 1 state + expect(cache.size).toEqual(2); + expect(cache.dumpKeyOrder()).toEqual([key1, key3]); + }); +}); diff --git a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts new file mode 100644 index 000000000000..83a2dddd65dd --- /dev/null +++ b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts @@ -0,0 +1,954 @@ +import {describe, it, expect, beforeAll, beforeEach} from "vitest"; +import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; +import {CachedBeaconStateAllForks, computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; +import {RootHex, phase0} from "@lodestar/types"; +import {mapValues, toHexString} from "@lodestar/utils"; +import {PersistentCheckpointStateCache} from "../../../../src/chain/stateCache/persistentCheckpointsCache.js"; +import {checkpointToDatastoreKey} from "../../../../src/chain/stateCache/datastore/index.js"; +import {generateCachedState} from "../../../utils/state.js"; +import {ShufflingCache} from "../../../../src/chain/shufflingCache.js"; +import {testLogger} from "../../../utils/logger.js"; +import {getTestDatastore} from "../../../utils/chain/stateCache/datastore.js"; +import {CheckpointHex} from "../../../../src/chain/stateCache/types.js"; +import {toCheckpointHex} from "../../../../src/chain/index.js"; + +describe("PersistentCheckpointStateCache", function () { + let root0a: Buffer, root0b: Buffer, root1: Buffer, root2: Buffer; + let cp0a: phase0.Checkpoint, cp0b: phase0.Checkpoint, cp1: phase0.Checkpoint, cp2: phase0.Checkpoint; + let cp0aHex: CheckpointHex, cp0bHex: CheckpointHex, cp1Hex: CheckpointHex, cp2Hex: CheckpointHex; + let persistent0bKey: RootHex; + const startSlotEpoch20 = computeStartSlotAtEpoch(20); + const startSlotEpoch21 = computeStartSlotAtEpoch(21); + const startSlotEpoch22 = computeStartSlotAtEpoch(22); + let cache: PersistentCheckpointStateCache; + let fileApisBuffer: Map; + let states: Record<"cp0a" | "cp0b" | "cp1" | "cp2", CachedBeaconStateAllForks>; + let stateBytes: Record<"cp0a" | "cp0b" | "cp1" | "cp2", Uint8Array>; + + beforeAll(() => { + root0a = Buffer.alloc(32); + root0b = Buffer.alloc(32, 1); + root1 = Buffer.alloc(32, 2); + root2 = Buffer.alloc(32, 3); + root0b[31] = 1; + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1--------root2 + // | + // 0a + // root0a is of the last slot of epoch 19 + cp0a = {epoch: 20, root: root0a}; + // root0b is of the first slot of epoch 20 + cp0b = {epoch: 20, root: root0b}; + cp1 = {epoch: 21, root: root1}; + cp2 = {epoch: 22, root: root2}; + [cp0aHex, cp0bHex, cp1Hex, cp2Hex] = [cp0a, cp0b, cp1, cp2].map((cp) => toCheckpointHex(cp)); + persistent0bKey = toHexString(checkpointToDatastoreKey(cp0b)); + const allStates = [cp0a, cp0b, cp1, cp2] + .map((cp) => generateCachedState({slot: cp.epoch * SLOTS_PER_EPOCH})) + .map((state, i) => { + const stateEpoch = computeEpochAtSlot(state.slot); + if (stateEpoch === 20 && i === 0) { + // cp0a + state.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + state.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0a); + return state; + } + + // other states based on cp0b + state.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + state.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0b); + + if (stateEpoch >= 21) { + state.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root1); + } + if (stateEpoch >= 22) { + state.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root2); + } + return state; + }); + + states = { + // Previous Root Checkpoint State of epoch 20 + cp0a: allStates[0], + // Current Root Checkpoint State of epoch 20 + cp0b: allStates[1], + // Current Root Checkpoint State of epoch 21 + cp1: allStates[2], + // Current Root Checkpoint State of epoch 22 + cp2: allStates[3], + }; + stateBytes = mapValues(states, (state) => state.serialize()); + }); + + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + it("getLatest", () => { + // cp0 + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch)?.hashTreeRoot()).toEqual(states["cp0a"].hashTreeRoot()); + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch + 1)?.hashTreeRoot()).toEqual(states["cp0a"].hashTreeRoot()); + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch - 1)?.hashTreeRoot()).to.be.undefined; + + // cp1 + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch + 1)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch - 1)?.hashTreeRoot()).to.be.undefined; + + // cp2 + expect(cache.getLatest(cp2Hex.rootHex, cp2.epoch)?.hashTreeRoot()).to.be.undefined; + }); + + it("getOrReloadLatest", async () => { + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + + // cp0b is persisted + expect(fileApisBuffer.size).toEqual(1); + expect(Array.from(fileApisBuffer.keys())).toEqual([persistent0bKey]); + + // getLatest() does not reload from disk + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch)).to.be.null; + expect(cache.getLatest(cp0bHex.rootHex, cp0b.epoch)).to.be.null; + + // cp0a has the root from previous epoch so we only prune it from db + expect(await cache.getOrReloadLatest(cp0aHex.rootHex, cp0a.epoch)).to.be.null; + // but getOrReloadLatest() does for cp0b + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch))?.serialize()).toEqual(stateBytes["cp0b"]); + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch + 1))?.serialize()).toEqual(stateBytes["cp0b"]); + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch - 1))?.serialize()).to.be.undefined; + }); + + it("pruneFinalized and getStateOrBytes", async function () { + cache.add(cp2, states["cp2"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + // cp0 is persisted + expect(fileApisBuffer.size).toEqual(1); + expect(Array.from(fileApisBuffer.keys())).toEqual([persistent0bKey]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // cp1 is in memory + expect(cache.get(cp1Hex)).to.be.not.null; + // cp2 is in memory + expect(cache.get(cp2Hex)).to.be.not.null; + // finalize epoch cp2 + cache.pruneFinalized(cp2.epoch); + expect(fileApisBuffer.size).toEqual(0); + expect(cache.get(cp1Hex)).to.be.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(await cache.getStateOrBytes(cp0bHex)).to.be.null; + }); + + describe("findSeedStateToReload", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1--------root2 + // | + // 0a + it("single state at lowest memory epoch", async function () { + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2 | + // | | + // 0a------------------------------root3 + // ^ ^ + // cp1a={0a, 21} {0a, 22}=cp2a + it("multiple states at lowest memory epoch", async function () { + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = states["cp0a"].clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + cp1aState.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.commit(); + cache.add(cp1a, cp1aState); + + const cp2a = {epoch: 22, root: root0a}; + const cp2aState = cp1aState.clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + cp2aState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp2aState.commit(); + cache.add(cp2a, cp2aState); + + const root3 = Buffer.alloc(32, 100); + const state3 = cp2aState.clone(); + state3.slot = 22 * SLOTS_PER_EPOCH + 3; + state3.commit(); + await cache.processState(toHexString(root3), state3); + + // state of {0a, 21} is choosen because it was built from cp0a + expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(cp1aState.hashTreeRoot()); + // cp1 is choosen for 0b because it was built from cp0b + expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + const randomRoot = Buffer.alloc(32, 101); + // for other random root it'll pick the first state of epoch 21 which is states["cp1"] + expect(cache.findSeedStateToReload({epoch: 20, rootHex: toHexString(randomRoot)})?.hashTreeRoot()).toEqual( + states["cp1"].hashTreeRoot() + ); + }); + }); + + describe("processState, maxEpochsInMemory = 2", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2-----root3 + // | + // 0a + it("no reorg", async function () { + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot3 = states["cp2"].clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + const root3 = Buffer.alloc(32, 100); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // epoch 22 has 1 checkpoint state + expect(cache.get(cp2Hex)).to.be.not.null; + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ ^ + // || | | | | + // |0b--------root1--------root2-root3 | + // | | + // 0a |---------root4 + it("reorg in same epoch", async function () { + // mostly the same to the above test + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot3 = states["cp2"].clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + const root3 = Buffer.alloc(32, 100); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot4 = states["cp2"].clone(); + blockStateRoot4.slot = 22 * SLOTS_PER_EPOCH + 4; + const root4 = Buffer.alloc(32, 101); + // process state of root4 + await cache.processState(toHexString(root4), blockStateRoot4); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // epoch 22 has 1 checkpoint state + expect(cache.get(cp2Hex)).to.be.not.null; + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^ ^ ^ ^ ^ + // | | | | | + // 0b---------root1-----|-root2 | + // | | + // |------root3 + // 1a ^ + // | + // {1a, 22}=cp2a + it("reorg 1 epoch", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // regen generates cp2a + const root1a = Buffer.alloc(32, 100); + const cp2a = {epoch: 22, root: root1a}; + const cp2aState = states["cp1"].clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + // assuming reorg block is at slot 5 of epoch 21 + cp2aState.blockRoots.set((startSlotEpoch21 + 5) % SLOTS_PER_HISTORICAL_ROOT, root1a); + cp2aState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root1a); + cache.add(cp2a, cp2aState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2aState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + + const root3 = Buffer.alloc(32, 101); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // epoch 22 has 2 checkpoint states + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2a))).to.be.not.null; + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^ ^ ^ ^ ^ + // | | | | | + // 0b--------|root1-------root2 | + // | | + // |-----------------root3 + // 0a ^ ^ + // | | + // cp1a={0a, 21} {0a, 22}=cp2a + it("reorg 2 epochs", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // reload cp0b from disk + expect((await cache.getOrReload(toCheckpointHex(cp0b)))?.serialize()).toStrictEqual(stateBytes["cp0b"]); + + // regen generates cp1a + const root0a = Buffer.alloc(32, 100); + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = states["cp0b"].clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + // assuming reorg block is at slot 5 of epoch 20 + cp1aState.blockRoots.set((startSlotEpoch20 + 5) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp1a, cp1aState); + + // regen generates cp2a + const cp2a = {epoch: 22, root: root0a}; + const cp2aState = cp1aState.clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + cp2aState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp2a, cp2aState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2aState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + + const root3 = Buffer.alloc(32, 101); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // epoch 21 and 22 have 2 checkpoint states + expect(cache.get(cp1Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2a))).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2 | + // |/ | + // 0a---------------------------root3 + // ^ ^ + // | | + // cp1a={0a, 21} {0a, 22}=cp2a + it("reorg 3 epochs, persist cp 0a", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // cp0a was pruned from memory and not in disc + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // regen needs to regen cp0a + cache.add(cp0a, states["cp0a"]); + + // regen generates cp1a + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = generateCachedState({slot: 21 * SLOTS_PER_EPOCH}); + cp1aState.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp1a, cp1aState); + + // regen generates cp2a + const cp2a = {epoch: 22, root: root0a}; + const cp2aState = cp1aState.clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + cp2aState.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp2a, cp2aState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2aState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + blockStateRoot3.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0a); + + // regen populates cache when producing blockStateRoot3 + + const root3 = Buffer.alloc(32, 100); + // process state of root3 + expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + // epoch 21 and 22 have 2 checkpoint states + expect(cache.get(cp1Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2a))).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2 | + // || | + // ||---------------------------root3 + // 0a ^ ^ + // | | + // cp1b={0b, 21} {0b, 22}=cp2b + it("reorg 3 epochs, prune but no persist", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // cp0a was pruned from memory and not in disc + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // regen needs to reload cp0b + cache.add(cp0b, states["cp0b"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + + // regen generates cp1b + const cp1b = {epoch: 21, root: root0b}; + const cp1bState = states["cp0b"].clone(); + cp1bState.slot = 21 * SLOTS_PER_EPOCH; + cp1bState.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root0b); + cache.add(cp1b, cp1bState); + + // regen generates cp2b + const cp2b = {epoch: 22, root: root0b}; + const cp2bState = cp1bState.clone(); + cp2bState.slot = 22 * SLOTS_PER_EPOCH; + cp2bState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0b); + cache.add(cp2b, cp2bState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2bState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + const root3 = Buffer.alloc(32, 100); + // process state of root3, nothing is persisted + expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(0); + // but state of cp0b is pruned from memory + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // epoch 21 and 22 have 2 checkpoint states + expect(cache.get(cp1Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1b))).to.be.not.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2b))).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + }); + + describe("processState, maxEpochsInMemory = 1", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 1} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1--root2 + // | + // 0a + it("no reorg", async () => { + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot2 = states["cp1"].clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2 + await cache.processState(toHexString(root2), blockStateRoot2); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--root2 | + // | |---------root3 + // 0a + it("reorg in same epoch", async () => { + // almost the same to "no reorg" test + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot2 = states["cp1"].clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2 + await cache.processState(toHexString(root2), blockStateRoot2); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot3 = states["cp1"].clone(); + blockStateRoot3.slot = 21 * SLOTS_PER_EPOCH + 4; + const root3 = Buffer.alloc(32, 101); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b----1a--root1 | + // | |----|-------root2 + // 0a | + // cp1a={1a, 21} + it("reorg 1 epoch, no persist 1a", async () => { + // root 1a + expect(fileApisBuffer.size).toEqual(0); + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); + expect(fileApisBuffer.size).toEqual(0); + await assertPersistedCheckpointState([], []); + + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + + // root2, regen cp1a + const cp1aState = state1a.clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + const cp1a = {epoch: 21, root: root1a}; + cache.add(cp1a, cp1aState); + const blockStateRoot2 = cp1aState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2 + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1 | + // ||-----------|-------root2 + // 0a {21, 1b}=cp1b + it("reorg 1 epoch, no persist 0b", async () => { + expect(fileApisBuffer.size).toEqual(0); + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + + // simulate regen + cache.add(cp0b, states["cp0b"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + // root2, regen cp0b + const cp1bState = states["cp0b"].clone(); + cp1bState.slot = 21 * SLOTS_PER_EPOCH; + const cp1b = {epoch: 21, root: root0b}; + cache.add(cp1b, cp1bState); + const blockStateRoot2 = cp1bState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2, nothing is persisted + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); + + // but cp0b in-memory state is pruned + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1b))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b-----|--root1 | + // | | | | + // 0a-----1a----|-------root2 + // | + // cp1a={1a, 21} + it("reorg 1 epoch, persist one more checkpoint state", async () => { + // root 1a + expect(fileApisBuffer.size).toEqual(0); + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0a"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); + expect(fileApisBuffer.size).toEqual(0); + // at epoch 20, there should be 2 cps in memory + expect(cache.get(cp0aHex)).to.be.not.null; + expect(cache.get(cp0bHex)).to.be.not.null; + await assertPersistedCheckpointState([], []); + + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // 2 checkpoint states at epoch 20 are pruned + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // only cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // root2, regen cp0a + cache.add(cp0a, states["cp0a"]); + const cp1aState = state1a.clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + const cp1a = {epoch: 21, root: root1a}; + cache.add(cp1a, cp1aState); + const blockStateRoot2 = cp1aState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2, persist cp0a + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1 | + // | | | + // 0a-----------|-------root2 + // | + // cp1a={0a, 21} + it("reorg 2 epochs", async () => { + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // 2 checkpoint states at epoch 20 are pruned + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // only cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // root2, regen cp0a + cache.add(cp0a, states["cp0a"]); + const cp1aState = states["cp0a"].clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + const cp1a = {epoch: 21, root: root0a}; + cache.add(cp1a, cp1aState); + const blockStateRoot2 = cp1aState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2, persist cp0a + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + describe("processState, maxEpochsInMemory = 0", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 0} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ + // || | + // |0b --root1a + // | + // 0a + it("no reorg", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + + // nothing change + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b --root1a| + // | \ | + // 0a \------root1b + it("reorg in same epoch", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + + // nothing change + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + // simulate reload cp1b + cache.add(cp0b, states["cp0b"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + const root1b = Buffer.alloc(32, 101); + const state1b = states["cp0b"].clone(); + state1b.slot = state1a.slot + 1; + state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); + // but no need to persist cp1b + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(0); + // although states["cp0b"] is pruned + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b --root1a| + // | | + // 0a---------root1b + it("reorg 1 epoch", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + + // nothing change + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1b = Buffer.alloc(32, 101); + const state1b = states["cp0a"].clone(); + state1b.slot = state1a.slot + 1; + state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); + // regen should reload cp0a from disk + cache.add(cp0a, states["cp0a"]); + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + + // both cp0a and cp0b are persisted + expect(await cache.getStateOrBytes(cp0aHex)).toEqual(stateBytes["cp0a"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1 | + // | | + // 0a-----------------root2 + // ^ + // {0a, 21}=cp1a + it("reorg 2 epochs", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp1], [stateBytes["cp0b"], stateBytes["cp1"]]); + + // regen should populate cp0a and cp1a checkpoint states + cache.add(cp0a, states["cp0a"]); + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = states["cp0a"].clone(); + cp1aState.blockRoots.set((20 * SLOTS_PER_EPOCH) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.blockRoots.set((21 * SLOTS_PER_EPOCH) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + cache.add(cp1a, cp1aState); + + const root2 = Buffer.alloc(32, 100); + const state2 = cp1aState.clone(); + state2.slot = 21 * SLOTS_PER_EPOCH + 3; + state2.blockRoots.set(state2.slot % SLOTS_PER_HISTORICAL_ROOT, root2); + expect(await cache.processState(toHexString(root2), state2)).toEqual(2); + // expect 4 cp states are persisted + await assertPersistedCheckpointState( + [cp0b, cp1, cp0a, cp1a], + [stateBytes["cp0b"], stateBytes["cp1"], stateBytes["cp0a"], cp1aState.serialize()] + ); + }); + }); + }); + + async function assertPersistedCheckpointState(cps: phase0.Checkpoint[], stateBytesArr: Uint8Array[]): Promise { + const persistedKeys = cps.map((cp) => toHexString(checkpointToDatastoreKey(cp))); + expect(Array.from(fileApisBuffer.keys())).toStrictEqual(persistedKeys); + for (const [i, persistedKey] of persistedKeys.entries()) { + expect(fileApisBuffer.get(persistedKey)).toStrictEqual(stateBytesArr[i]); + } + for (const [i, cp] of cps.entries()) { + const cpHex = toCheckpointHex(cp); + expect(await cache.getStateOrBytes(cpHex)).toStrictEqual(stateBytesArr[i]); + // simple get() does not reload from disk + expect(cache.get(cpHex)).to.be.null; + } + } +}); diff --git a/packages/beacon-node/test/unit/monitoring/properties.test.ts b/packages/beacon-node/test/unit/monitoring/properties.test.ts index 639161eefc9e..1a2e2c58377a 100644 --- a/packages/beacon-node/test/unit/monitoring/properties.test.ts +++ b/packages/beacon-node/test/unit/monitoring/properties.test.ts @@ -91,7 +91,11 @@ describe("monitoring / properties", () => { const labelValue = "test_label_value"; const metricValue = 10; - const metric = metrics.register.gauge({name: metricName, help: "withLabel test", labelNames: [labelName]}); + const metric = metrics.register.gauge<{[labelName]: string}>({ + name: metricName, + help: "withLabel test", + labelNames: [labelName], + }); metric.set({[labelName]: "different_value"}, metricValue + 1); metric.set({[labelName]: labelValue}, metricValue); diff --git a/packages/beacon-node/test/unit/monitoring/service.test.ts b/packages/beacon-node/test/unit/monitoring/service.test.ts index 27b8ca35c307..068f35634f81 100644 --- a/packages/beacon-node/test/unit/monitoring/service.test.ts +++ b/packages/beacon-node/test/unit/monitoring/service.test.ts @@ -1,7 +1,7 @@ import {describe, it, expect, beforeEach, beforeAll, afterAll, vi, afterEach, MockInstance} from "vitest"; +import {Histogram} from "prom-client"; import {ErrorAborted, TimeoutError} from "@lodestar/utils"; import {RegistryMetricCreator} from "../../../src/index.js"; -import {HistogramExtra} from "../../../src/metrics/utils/histogram.js"; import {MonitoringService} from "../../../src/monitoring/service.js"; import {MonitoringOptions} from "../../../src/monitoring/options.js"; import {sleep} from "../../utils/sleep.js"; @@ -41,8 +41,8 @@ describe("monitoring / service", () => { it("should register metrics for collecting and sending data", () => { service = new MonitoringService("beacon", {endpoint}, {register, logger}); - expect(register.getSingleMetric("lodestar_monitoring_collect_data_seconds")).toBeInstanceOf(HistogramExtra); - expect(register.getSingleMetric("lodestar_monitoring_send_data_seconds")).toBeInstanceOf(HistogramExtra); + expect(register.getSingleMetric("lodestar_monitoring_collect_data_seconds")).toBeInstanceOf(Histogram); + expect(register.getSingleMetric("lodestar_monitoring_send_data_seconds")).toBeInstanceOf(Histogram); }); it("should log a warning message if insecure monitoring endpoint is provided ", () => { diff --git a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts index 189327a6a5ab..56fb64104744 100644 --- a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts +++ b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts @@ -30,16 +30,21 @@ describe("beaconBlocksMaybeBlobsByRange", () => { rangeRequest.count = 1; const block1 = ssz.deneb.SignedBeaconBlock.defaultValue(); + const blockheader1 = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + blockheader1.message.slot = 1; block1.message.slot = 1; block1.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); const blobSidecar1 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar1.slot = 1; + blobSidecar1.signedBlockHeader = blockheader1; const block2 = ssz.deneb.SignedBeaconBlock.defaultValue(); block2.message.slot = 2; + const blockheader2 = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + blockheader2.message.slot = 2; + block2.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); const blobSidecar2 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar2.slot = 2; + blobSidecar2.signedBlockHeader = blockheader2; const block3 = ssz.deneb.SignedBeaconBlock.defaultValue(); block3.message.slot = 3; @@ -47,13 +52,18 @@ describe("beaconBlocksMaybeBlobsByRange", () => { const block4 = ssz.deneb.SignedBeaconBlock.defaultValue(); block4.message.slot = 4; + const blockheader4 = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + blockheader4.message.slot = 4; + // two blobsidecars block4.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); block4.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); const blobSidecar41 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar41.slot = 4; + + blobSidecar41.signedBlockHeader = blockheader4; + const blobSidecar42 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar42.slot = 4; + blobSidecar42.signedBlockHeader = blockheader4; blobSidecar42.index = 1; // Array of testcases which are array of matched blocks with/without (if empty) sidecars diff --git a/packages/beacon-node/test/unit/util/array.test.ts b/packages/beacon-node/test/unit/util/array.test.ts index 5ca275d5a278..d505d27c2e9f 100644 --- a/packages/beacon-node/test/unit/util/array.test.ts +++ b/packages/beacon-node/test/unit/util/array.test.ts @@ -102,6 +102,72 @@ describe("LinkedList", () => { expect(list.last()).toBe(98); }); + describe("moveToHead", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("item is head", () => { + list.moveToHead(1); + expect(list.toArray()).toEqual([1, 2, 3]); + expect(list.first()).toBe(1); + }); + + it("item is middle", () => { + list.moveToHead(2); + expect(list.toArray()).toEqual([2, 1, 3]); + expect(list.first()).toBe(2); + }); + + it("item is tail", () => { + list.moveToHead(3); + expect(list.toArray()).toEqual([3, 1, 2]); + expect(list.first()).toBe(3); + }); + }); + + describe("moveToSecond", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + list.push(4); + }); + + it("item is head", () => { + list.moveToSecond(1); + expect(list.toArray()).toEqual([1, 2, 3, 4]); + expect(list.first()).toBe(1); + }); + + it("item is second", () => { + list.moveToSecond(2); + expect(list.toArray()).toEqual([1, 2, 3, 4]); + expect(list.first()).toBe(1); + }); + + it("item is third", () => { + list.moveToSecond(3); + expect(list.toArray()).toEqual([1, 3, 2, 4]); + expect(list.first()).toBe(1); + }); + + it("item is tail", () => { + list.moveToSecond(4); + expect(list.toArray()).toEqual([1, 4, 2, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + }); + it("values", () => { expect(Array.from(list.values())).toEqual([]); const count = 100; @@ -165,6 +231,46 @@ describe("LinkedList", () => { }); }); + describe("insertAfter", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("insert after 0", () => { + // should do nothing + list.insertAfter(0, 4); + expect(list.toArray()).toEqual([1, 2, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + + it("insert after 1", () => { + list.insertAfter(1, 4); + expect(list.toArray()).toEqual([1, 4, 2, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + + it("insert after 2", () => { + list.insertAfter(2, 4); + expect(list.toArray()).toEqual([1, 2, 4, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + + it("insert after 3", () => { + list.insertAfter(3, 4); + expect(list.toArray()).toEqual([1, 2, 3, 4]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(4); + }); + }); + it("toArray", () => { expect(list.toArray()).toEqual([]); @@ -205,4 +311,22 @@ describe("LinkedList", () => { }); } }); + + describe("has", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("should return true if the item is in the list", () => { + expect(list.has(1)).toBe(true); + expect(list.has(2)).toBe(true); + expect(list.has(3)).toBe(true); + expect(list.has(4)).toBe(false); + }); + }); }); diff --git a/packages/beacon-node/test/unit/util/kzg.test.ts b/packages/beacon-node/test/unit/util/kzg.test.ts index 5bcaf1071cf6..cfe35e8fc76e 100644 --- a/packages/beacon-node/test/unit/util/kzg.test.ts +++ b/packages/beacon-node/test/unit/util/kzg.test.ts @@ -1,7 +1,8 @@ import {describe, it, expect, afterEach, beforeAll} from "vitest"; import {bellatrix, deneb, ssz} from "@lodestar/types"; import {BYTES_PER_FIELD_ELEMENT, BLOB_TX_TYPE} from "@lodestar/params"; -import {kzgCommitmentToVersionedHash} from "@lodestar/state-transition"; +import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {computeBlobSidecars, kzgCommitmentToVersionedHash} from "../../../src/util/blobs.js"; import {loadEthereumTrustedSetup, initCKZG, ckzg, FIELD_ELEMENTS_PER_BLOB_MAINNET} from "../../../src/util/kzg.js"; import {validateBlobSidecars, validateGossipBlobSidecar} from "../../../src/chain/validation/blobSidecar.js"; import {getMockedBeaconChain} from "../../__mocks__/mockedBeaconChain.js"; @@ -30,8 +31,18 @@ describe("C-KZG", async () => { expect(ckzg.verifyBlobKzgProofBatch(blobs, commitments, proofs)).toBe(true); }); + /* eslint-disable @typescript-eslint/naming-convention */ it("BlobSidecars", async () => { - const chain = getMockedBeaconChain(); + const chainConfig = createChainForkConfig({ + ...defaultChainConfig, + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + }); + const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); + const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); + + const chain = getMockedBeaconChain({config}); afterEachCallbacks.push(() => chain.close()); const slot = 0; @@ -45,34 +56,17 @@ describe("C-KZG", async () => { signedBeaconBlock.message.body.blobKzgCommitments.push(kzgCommitment); } const blockRoot = ssz.deneb.BeaconBlock.hashTreeRoot(signedBeaconBlock.message); + const kzgProofs = blobs.map((blob, index) => ckzg.computeBlobKzgProof(blob, kzgCommitments[index])); + const blobSidecars: deneb.BlobSidecars = computeBlobSidecars(chain.config, signedBeaconBlock, {blobs, kzgProofs}); - const blobSidecars: deneb.BlobSidecars = blobs.map((blob, index) => { - return { - blockRoot, - index, - slot, - blob, - kzgProof: ckzg.computeBlobKzgProof(blob, kzgCommitments[index]), - kzgCommitment: kzgCommitments[index], - blockParentRoot: Buffer.alloc(32), - proposerIndex: 0, - }; - }); - - const signedBlobSidecars: deneb.SignedBlobSidecar[] = blobSidecars.map((blobSidecar) => { - const signedBlobSidecar = ssz.deneb.SignedBlobSidecar.defaultValue(); - signedBlobSidecar.message = blobSidecar; - return signedBlobSidecar; - }); - - expect(signedBlobSidecars.length).toBe(2); + expect(blobSidecars.length).toBe(2); // Full validation validateBlobSidecars(slot, blockRoot, kzgCommitments, blobSidecars); - signedBlobSidecars.forEach(async (signedBlobSidecar) => { + blobSidecars.forEach(async (blobSidecar) => { try { - await validateGossipBlobSidecar(chain.config, chain, signedBlobSidecar, signedBlobSidecar.message.index); + await validateGossipBlobSidecar(chain, blobSidecar, blobSidecar.index); } catch (error) { // We expect some error from here // console.log(error); diff --git a/packages/beacon-node/test/unit/util/sszBytes.test.ts b/packages/beacon-node/test/unit/util/sszBytes.test.ts index 2ffaa98e6cfe..bb5fc67a7ce6 100644 --- a/packages/beacon-node/test/unit/util/sszBytes.test.ts +++ b/packages/beacon-node/test/unit/util/sszBytes.test.ts @@ -11,7 +11,7 @@ import { getSlotFromSignedAggregateAndProofSerialized, getSignatureFromAttestationSerialized, getSlotFromSignedBeaconBlockSerialized, - getSlotFromSignedBlobSidecarSerialized, + getSlotFromBlobSidecarSerialized, } from "../../../src/util/sszBytes.js"; describe("attestation SSZ serialized picking", () => { @@ -146,20 +146,20 @@ describe("signedBeaconBlock SSZ serialized picking", () => { }); }); -describe("signedBlobSidecar SSZ serialized picking", () => { - const testCases = [ssz.deneb.SignedBlobSidecar.defaultValue(), signedBlobSidecarFromValues(1_000_000)]; +describe("BlobSidecar SSZ serialized picking", () => { + const testCases = [ssz.deneb.BlobSidecar.defaultValue(), blobSidecarFromValues(1_000_000)]; - for (const [i, signedBlobSidecar] of testCases.entries()) { - const bytes = ssz.deneb.SignedBlobSidecar.serialize(signedBlobSidecar); - it(`signedBlobSidecar ${i}`, () => { - expect(getSlotFromSignedBlobSidecarSerialized(bytes)).toBe(signedBlobSidecar.message.slot); + for (const [i, blobSidecar] of testCases.entries()) { + const bytes = ssz.deneb.BlobSidecar.serialize(blobSidecar); + it(`blobSidecar ${i}`, () => { + expect(getSlotFromBlobSidecarSerialized(bytes)).toBe(blobSidecar.signedBlockHeader.message.slot); }); } - it("signedBlobSidecar - invalid data", () => { + it("blobSidecar - invalid data", () => { const invalidSlotDataSizes = [0, 20, 38]; for (const size of invalidSlotDataSizes) { - expect(getSlotFromSignedBlobSidecarSerialized(Buffer.alloc(size))).toBeNull(); + expect(getSlotFromBlobSidecarSerialized(Buffer.alloc(size))).toBeNull(); } }); }); @@ -198,8 +198,8 @@ function signedBeaconBlockFromValues(slot: Slot): phase0.SignedBeaconBlock { return signedBeaconBlock; } -function signedBlobSidecarFromValues(slot: Slot): deneb.SignedBlobSidecar { - const signedBlobSidecar = ssz.deneb.SignedBlobSidecar.defaultValue(); - signedBlobSidecar.message.slot = slot; - return signedBlobSidecar; +function blobSidecarFromValues(slot: Slot): deneb.BlobSidecar { + const blobSidecar = ssz.deneb.BlobSidecar.defaultValue(); + blobSidecar.signedBlockHeader.message.slot = slot; + return blobSidecar; } diff --git a/packages/beacon-node/test/utils/chain/stateCache/datastore.ts b/packages/beacon-node/test/utils/chain/stateCache/datastore.ts new file mode 100644 index 000000000000..8a944f4c2d88 --- /dev/null +++ b/packages/beacon-node/test/utils/chain/stateCache/datastore.ts @@ -0,0 +1,26 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {CPStateDatastore, checkpointToDatastoreKey} from "../../../../src/chain/stateCache/datastore/index.js"; + +export function getTestDatastore(fileApisBuffer: Map): CPStateDatastore { + const datastore: CPStateDatastore = { + write: (cp, state) => { + const persistentKey = checkpointToDatastoreKey(cp); + const stringKey = toHexString(persistentKey); + if (!fileApisBuffer.has(stringKey)) { + fileApisBuffer.set(stringKey, state.serialize()); + } + return Promise.resolve(persistentKey); + }, + remove: (persistentKey) => { + const stringKey = toHexString(persistentKey); + if (fileApisBuffer.has(stringKey)) { + fileApisBuffer.delete(stringKey); + } + return Promise.resolve(); + }, + read: (persistentKey) => Promise.resolve(fileApisBuffer.get(toHexString(persistentKey)) ?? null), + readKeys: () => Promise.resolve(Array.from(fileApisBuffer.keys()).map((key) => fromHexString(key))), + }; + + return datastore; +} diff --git a/packages/beacon-node/test/utils/mocks/db.ts b/packages/beacon-node/test/utils/mocks/db.ts index 731091bc8e6e..16d7b32a1bcc 100644 --- a/packages/beacon-node/test/utils/mocks/db.ts +++ b/packages/beacon-node/test/utils/mocks/db.ts @@ -1,4 +1,5 @@ import {IBeaconDb} from "../../../src/db/index.js"; +import {CheckpointStateRepository} from "../../../src/db/repositories/checkpointState.js"; import { AttesterSlashingRepository, BlockArchiveRepository, @@ -38,6 +39,7 @@ export function getStubbedBeaconDb(): IBeaconDb { // finalized states stateArchive: createStubInstance(StateArchiveRepository), + checkpointState: createStubInstance(CheckpointStateRepository), // op pool voluntaryExit: createStubInstance(VoluntaryExitRepository), diff --git a/packages/cli/.mocharc.yaml b/packages/cli/.mocharc.yaml deleted file mode 100644 index b923bc39eb4c..000000000000 --- a/packages/cli/.mocharc.yaml +++ /dev/null @@ -1,7 +0,0 @@ -exit: true -extension: ["ts"] -colors: true -require: - - ./test/setup.ts -node-option: - - "loader=ts-node/esm" diff --git a/packages/cli/.nycrc.json b/packages/cli/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/cli/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/cli/package.json b/packages/cli/package.json index a318eb6193e9..df83d79c083c 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@chainsafe/lodestar", - "version": "1.13.0", + "version": "1.14.0", "description": "Command line interface for lodestar", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -31,8 +31,8 @@ "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", - "test:e2e": "LODESTAR_PRESET=minimal mocha --timeout 30000 'test/e2e/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:e2e": "vitest --run --poolOptions.threads.singleThread true --dir test/e2e/", "test:sim:multifork": "LODESTAR_PRESET=minimal node --loader ts-node/esm test/sim/multi_fork.test.ts", "test:sim:mixedclient": "LODESTAR_PRESET=minimal node --loader ts-node/esm test/sim/mixed_client.test.ts", "test:sim:endpoints": "LODESTAR_PRESET=minimal node --loader ts-node/esm test/sim/endpoints.test.ts", @@ -56,7 +56,7 @@ "@chainsafe/as-sha256": "^0.3.1", "@chainsafe/bls": "7.1.1", "@chainsafe/bls-keygen": "^0.3.0", - "@chainsafe/bls-keystore": "^2.0.0", + "@chainsafe/bls-keystore": "^3.0.0", "@chainsafe/blst": "^0.2.9", "@chainsafe/discv5": "^5.1.0", "@chainsafe/persistent-merkle-tree": "^0.6.1", @@ -65,17 +65,17 @@ "@libp2p/crypto": "^2.0.4", "@libp2p/peer-id": "^3.0.2", "@libp2p/peer-id-factory": "^3.0.4", - "@lodestar/api": "^1.13.0", - "@lodestar/beacon-node": "^1.13.0", - "@lodestar/config": "^1.13.0", - "@lodestar/db": "^1.13.0", - "@lodestar/light-client": "^1.13.0", - "@lodestar/logger": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/state-transition": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", - "@lodestar/validator": "^1.13.0", + "@lodestar/api": "^1.14.0", + "@lodestar/beacon-node": "^1.14.0", + "@lodestar/config": "^1.14.0", + "@lodestar/db": "^1.14.0", + "@lodestar/light-client": "^1.14.0", + "@lodestar/logger": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/state-transition": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", + "@lodestar/validator": "^1.14.0", "@multiformats/multiaddr": "^12.1.3", "@types/lockfile": "^1.0.2", "bip39": "^3.1.0", @@ -88,7 +88,7 @@ "js-yaml": "^4.1.0", "lockfile": "^1.0.4", "lodash": "^4.17.21", - "prom-client": "^14.2.0", + "prom-client": "^15.1.0", "rimraf": "^4.4.1", "source-map-support": "^0.5.21", "uint8arrays": "^4.0.3", @@ -96,7 +96,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.13.0", + "@lodestar/test-utils": "^1.14.0", "@types/debug": "^4.1.7", "@types/expand-tilde": "^2.0.0", "@types/got": "^9.6.12", diff --git a/packages/cli/src/cmds/bootnode/handler.ts b/packages/cli/src/cmds/bootnode/handler.ts index be639eb1bf4b..7bf9169cfdc6 100644 --- a/packages/cli/src/cmds/bootnode/handler.ts +++ b/packages/cli/src/cmds/bootnode/handler.ts @@ -1,6 +1,6 @@ import path from "node:path"; import {Multiaddr, multiaddr} from "@multiformats/multiaddr"; -import {Discv5, ENR} from "@chainsafe/discv5"; +import {Discv5, ENR, IDiscv5CreateOptions} from "@chainsafe/discv5"; import {ErrorAborted} from "@lodestar/utils"; import {HttpMetricsServer, RegistryMetricCreator, getHttpMetricsServer} from "@lodestar/beacon-node"; @@ -58,7 +58,7 @@ export async function bootnodeHandler(args: BootnodeArgs & GlobalArgs): Promise< ip6: bindAddrs.ip6 ? multiaddr(bindAddrs.ip6) : undefined, }, config: {enrUpdate: !enr.ip && !enr.ip6}, - metricsRegistry, + metricsRegistry: metricsRegistry as IDiscv5CreateOptions["metricsRegistry"], }); // If there are any bootnodes, add them to the routing table diff --git a/packages/cli/src/cmds/validator/handler.ts b/packages/cli/src/cmds/validator/handler.ts index fe14cedbcca1..5eca9bc741d9 100644 --- a/packages/cli/src/cmds/validator/handler.ts +++ b/packages/cli/src/cmds/validator/handler.ts @@ -9,7 +9,7 @@ import { defaultOptions, } from "@lodestar/validator"; import {routes} from "@lodestar/api"; -import {getMetrics, MetricsRegister} from "@lodestar/validator"; +import {getMetrics} from "@lodestar/validator"; import { RegistryMetricCreator, collectNodeJSMetrics, @@ -112,7 +112,7 @@ export async function validatorHandler(args: IValidatorCliArgs & GlobalArgs): Pr // Send version and network data for static registries const register = args["metrics"] || args["monitoring.endpoint"] ? new RegistryMetricCreator() : null; - const metrics = register && getMetrics(register as unknown as MetricsRegister, {version, commit, network}); + const metrics = register && getMetrics(register, {version, commit, network}); // Start metrics server if metrics are enabled. // Collect NodeJS metrics defined in the Lodestar repo @@ -170,6 +170,7 @@ export async function validatorHandler(args: IValidatorCliArgs & GlobalArgs): Pr distributed: args.distributed, useProduceBlockV3: args.useProduceBlockV3, broadcastValidation: parseBroadcastValidation(args.broadcastValidation), + blindedLocal: args.blindedLocal, }, metrics ); @@ -226,6 +227,7 @@ function getProposerConfigFromArgs( selection: parseBuilderSelection( args["builder.selection"] ?? (args["builder"] ? defaultOptions.builderAliasSelection : undefined) ), + boostFactor: args["builder.boostFactor"] !== undefined ? BigInt(args["builder.boostFactor"]) : undefined, }, }; diff --git a/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts b/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts index 90b502d79ac3..169ddb74ffda 100644 --- a/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts +++ b/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts @@ -1,7 +1,12 @@ +import path from "node:path"; import {spawn, Pool, Worker, ModuleThread, QueuedTask} from "@chainsafe/threads"; import {DecryptKeystoreArgs, DecryptKeystoreWorkerAPI} from "./types.js"; import {maxPoolSize} from "./poolSize.js"; +// Worker constructor consider the path relative to the current working directory +const workerDir = + process.env.NODE_ENV === "test" ? "../../../../../lib/cmds/validator/keymanager/decryptKeystores" : "./"; + /** * Thread pool to decrypt keystores */ @@ -16,7 +21,7 @@ export class DecryptKeystoresThreadPool { ) { this.pool = Pool( () => - spawn(new Worker("./worker.js"), { + spawn(new Worker(path.join(workerDir, "worker.js")), { // The number below is big enough to almost disable the timeout // which helps during tests run on unpredictably slow hosts timeout: 5 * 60 * 1000, diff --git a/packages/cli/src/cmds/validator/keymanager/impl.ts b/packages/cli/src/cmds/validator/keymanager/impl.ts index 2abda3c9642e..4628c96285df 100644 --- a/packages/cli/src/cmds/validator/keymanager/impl.ts +++ b/packages/cli/src/cmds/validator/keymanager/impl.ts @@ -390,6 +390,29 @@ export class KeymanagerApi implements Api { }; } + async getBuilderBoostFactor(pubkeyHex: string): ReturnType { + const builderBoostFactor = this.validator.validatorStore.getBuilderBoostFactor(pubkeyHex); + return {data: {pubkey: pubkeyHex, builderBoostFactor}}; + } + + async setBuilderBoostFactor(pubkeyHex: string, builderBoostFactor: bigint): Promise { + this.checkIfProposerWriteEnabled(); + this.validator.validatorStore.setBuilderBoostFactor(pubkeyHex, builderBoostFactor); + this.persistedKeysBackend.writeProposerConfig( + pubkeyHex, + this.validator.validatorStore.getProposerConfig(pubkeyHex) + ); + } + + async deleteBuilderBoostFactor(pubkeyHex: string): Promise { + this.checkIfProposerWriteEnabled(); + this.validator.validatorStore.deleteBuilderBoostFactor(pubkeyHex); + this.persistedKeysBackend.writeProposerConfig( + pubkeyHex, + this.validator.validatorStore.getProposerConfig(pubkeyHex) + ); + } + /** * Create and sign a voluntary exit message for an active validator */ diff --git a/packages/cli/src/cmds/validator/options.ts b/packages/cli/src/cmds/validator/options.ts index 4f0ec476f01c..cddb34981ce1 100644 --- a/packages/cli/src/cmds/validator/options.ts +++ b/packages/cli/src/cmds/validator/options.ts @@ -45,9 +45,11 @@ export type IValidatorCliArgs = AccountValidatorArgs & builder?: boolean; "builder.selection"?: string; + "builder.boostFactor"?: bigint; useProduceBlockV3?: boolean; broadcastValidation?: string; + blindedLocal?: boolean; importKeystores?: string[]; importKeystoresPassword?: string; @@ -245,10 +247,17 @@ export const validatorOptions: CliCommandOptions = { group: "builder", }, + "builder.boostFactor": { + type: "string", + description: + "Percentage multiplier the block producing beacon node must apply to boost (>100) or dampen (<100) builder block value for selection against execution block. The multiplier is ignored if `--builder.selection` is set to anything other than `maxprofit`", + defaultDescription: `${defaultOptions.builderBoostFactor}`, + group: "builder", + }, + useProduceBlockV3: { type: "boolean", - description: "Enable/disable usage of produceBlockV3 that might not be supported by all beacon clients yet", - defaultDescription: `${defaultOptions.useProduceBlockV3}`, + description: "Enable/disable usage of produceBlockV3 for block production, is auto enabled on deneb+ blocks", }, broadcastValidation: { @@ -257,6 +266,12 @@ export const validatorOptions: CliCommandOptions = { defaultDescription: `${defaultOptions.broadcastValidation}`, }, + blindedLocal: { + type: "string", + description: "Request fetching local block in blinded format for produceBlockV3", + defaultDescription: `${defaultOptions.blindedLocal}`, + }, + importKeystores: { alias: ["keystore"], // Backwards compatibility with old `validator import` cmdx description: "Path(s) to a directory or single file path to validator keystores, i.e. Launchpad validators", diff --git a/packages/cli/src/networks/goerli.ts b/packages/cli/src/networks/goerli.ts index b076562398a3..c58dcabae6a0 100644 --- a/packages/cli/src/networks/goerli.ts +++ b/packages/cli/src/networks/goerli.ts @@ -1,10 +1,8 @@ export {goerliChainConfig as chainConfig} from "@lodestar/config/networks"; export const depositContractDeployBlock = 4367322; -export const genesisFileUrl = - "https://raw.githubusercontent.com/eth-clients/eth2-networks/master/shared/prater/genesis.ssz"; -export const bootnodesFileUrl = - "https://raw.githubusercontent.com/eth-clients/eth2-networks/master/shared/prater/bootstrap_nodes.txt"; +export const genesisFileUrl = "https://raw.githubusercontent.com/eth-clients/goerli/main/prater/genesis.ssz"; +export const bootnodesFileUrl = "https://raw.githubusercontent.com/eth-clients/goerli/main/prater/bootstrap_nodes.txt"; export const bootEnrs = [ "enr:-LK4QH1xnjotgXwg25IDPjrqRGFnH1ScgNHA3dv1Z8xHCp4uP3N3Jjl_aYv_WIxQRdwZvSukzbwspXZ7JjpldyeVDzMCh2F0dG5ldHOIAAAAAAAAAACEZXRoMpB53wQoAAAQIP__________gmlkgnY0gmlwhIe1te-Jc2VjcDI1NmsxoQOkcGXqbCJYbcClZ3z5f6NWhX_1YPFRYRRWQpJjwSHpVIN0Y3CCIyiDdWRwgiMo", diff --git a/packages/cli/src/networks/mainnet.ts b/packages/cli/src/networks/mainnet.ts index 5fa2477242ed..0972b62f7458 100644 --- a/packages/cli/src/networks/mainnet.ts +++ b/packages/cli/src/networks/mainnet.ts @@ -8,7 +8,7 @@ export const bootnodesFileUrl = export const bootEnrs = [ // Teku team's bootnodes - "enr:-KG4QMOEswP62yzDjSwWS4YEjtTZ5PO6r65CPqYBkgTTkrpaedQ8uEUo1uMALtJIvb2w_WWEVmg5yt1UAuK1ftxUU7QDhGV0aDKQu6TalgMAAAD__________4JpZIJ2NIJpcIQEnfA2iXNlY3AyNTZrMaEDfol8oLr6XJ7FsdAYE7lpJhKMls4G_v6qQOGKJUWGb_uDdGNwgiMog3VkcIIjKA", + "enr:-KG4QNTx85fjxABbSq_Rta9wy56nQ1fHK0PewJbGjLm1M4bMGx5-3Qq4ZX2-iFJ0pys_O90sVXNNOxp2E7afBsGsBrgDhGV0aDKQu6TalgMAAAD__________4JpZIJ2NIJpcIQEnfA2iXNlY3AyNTZrMaECGXWQ-rQ2KZKRH1aOW4IlPDBkY4XDphxg9pxKytFCkayDdGNwgiMog3VkcIIjKA", "enr:-KG4QF4B5WrlFcRhUU6dZETwY5ZzAXnA0vGC__L1Kdw602nDZwXSTs5RFXFIFUnbQJmhNGVU6OIX7KVrCSTODsz1tK4DhGV0aDKQu6TalgMAAAD__________4JpZIJ2NIJpcIQExNYEiXNlY3AyNTZrMaECQmM9vp7KhaXhI-nqL_R0ovULLCFSFTa9CPPSdb1zPX6DdGNwgiMog3VkcIIjKA", // Prylab team's bootnodes "enr:-Ku4QImhMc1z8yCiNJ1TyUxdcfNucje3BGwEHzodEZUan8PherEo4sF7pPHPSIB1NNuSg5fZy7qFsjmUKs2ea1Whi0EBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpD1pf1CAAAAAP__________gmlkgnY0gmlwhBLf22SJc2VjcDI1NmsxoQOVphkDqal4QzPMksc5wnpuC3gvSC8AfbFOnZY_On34wIN1ZHCCIyg", diff --git a/packages/cli/test/e2e/blsToExecutionchange.test.ts b/packages/cli/test/e2e/blsToExecutionchange.test.ts index 9ea73e3b4afd..31b4d76d8f00 100644 --- a/packages/cli/test/e2e/blsToExecutionchange.test.ts +++ b/packages/cli/test/e2e/blsToExecutionchange.test.ts @@ -1,4 +1,5 @@ import path from "node:path"; +import {afterAll, describe, it, vi, beforeEach, afterEach} from "vitest"; import {toHexString} from "@chainsafe/ssz"; import {sleep, retry} from "@lodestar/utils"; import {ApiError, getClient} from "@lodestar/api"; @@ -8,7 +9,7 @@ import {execCliCommand, spawnCliCommand, stopChildProcess} from "@lodestar/test- import {testFilesDir} from "../utils.js"; describe("bLSToExecutionChange cmd", function () { - this.timeout("60s"); + vi.setConfig({testTimeout: 60_000}); it("Perform bLSToExecutionChange", async () => { const restPort = 9596; @@ -25,7 +26,7 @@ describe("bLSToExecutionChange cmd", function () { // Speed up test to make genesis happen faster "--params.SECONDS_PER_SLOT=2", ], - {pipeStdioToParent: false, logPrefix: "dev"} + {pipeStdioToParent: true, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/e2e/importFromFsDirect.test.ts b/packages/cli/test/e2e/importFromFsDirect.test.ts index 9d64421c97af..df53e0f973bb 100644 --- a/packages/cli/test/e2e/importFromFsDirect.test.ts +++ b/packages/cli/test/e2e/importFromFsDirect.test.ts @@ -1,21 +1,20 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; describe("import from fs same cmd as validate", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-and-validate-test"); const importFromDir = path.join(dataDir, "eth2.0_deposit_out"); const passphraseFilepath = path.join(importFromDir, "password.text"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); rimraf.sync(importFromDir); }); @@ -25,7 +24,7 @@ describe("import from fs same cmd as validate", function () { const pubkeys = cachedPubkeysHex.slice(0, keyCount); const secretKeys = cachedSeckeysHex.slice(0, keyCount); - before("write keystores to disk", async () => { + beforeAll(async () => { // Produce and encrypt keystores const keystoresStr = await getKeystoresStr(passphrase, secretKeys); @@ -41,7 +40,6 @@ describe("import from fs same cmd as validate", function () { const {keymanagerClient} = await startValidatorWithKeyManager([], { dataDir, logPrefix: "case-1", - testContext, }); await expectKeys(keymanagerClient, [], "Wrong listKeys response data"); @@ -51,7 +49,7 @@ describe("import from fs same cmd as validate", function () { it("run 'validator' check keys are loaded", async () => { const {keymanagerClient} = await startValidatorWithKeyManager( [`--importKeystores=${importFromDir}`, `--importKeystoresPassword=${passphraseFilepath}`], - {dataDir, logPrefix: "case-2", testContext} + {dataDir, logPrefix: "case-2"} ); await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); diff --git a/packages/cli/test/e2e/importFromFsPreStep.test.ts b/packages/cli/test/e2e/importFromFsPreStep.test.ts index efbe7a6b35e4..ae9ac3321a05 100644 --- a/packages/cli/test/e2e/importFromFsPreStep.test.ts +++ b/packages/cli/test/e2e/importFromFsPreStep.test.ts @@ -1,8 +1,7 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, expect, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {execCliCommand} from "@lodestar/test-utils"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; @@ -10,14 +9,13 @@ import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; describe("import from fs then validate", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-then-validate-test"); const importFromDir = path.join(dataDir, "eth2.0_deposit_out"); const passphraseFilepath = path.join(importFromDir, "password.text"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); rimraf.sync(importFromDir); }); @@ -45,7 +43,7 @@ describe("import from fs then validate", function () { ]); for (let i = 0; i < keyCount; i++) { - expect(stdout).includes(pubkeys[i], `stdout should include imported pubkey[${i}]`); + expect(stdout).toContain(pubkeys[i]); } }); @@ -56,12 +54,12 @@ describe("import from fs then validate", function () { const stdout = await execCliCommand("packages/cli/bin/lodestar.js", ["validator list", `--dataDir ${dataDir}`]); for (let i = 0; i < keyCount; i++) { - expect(stdout).includes(pubkeys[i], `stdout should include imported pubkey[${i}]`); + expect(stdout).toContain(pubkeys[i]); } }); it("run 'validator' check keys are loaded", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); }); diff --git a/packages/cli/test/e2e/importKeystoresFromApi.test.ts b/packages/cli/test/e2e/importKeystoresFromApi.test.ts index dcd0f38b2182..bb91d467b86a 100644 --- a/packages/cli/test/e2e/importKeystoresFromApi.test.ts +++ b/packages/cli/test/e2e/importKeystoresFromApi.test.ts @@ -1,12 +1,11 @@ import path from "node:path"; +import {describe, it, expect, beforeAll, vi, afterAll, beforeEach, afterEach} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {DeletionStatus, getClient, ImportStatus} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {Interchange} from "@lodestar/validator"; import {ApiError, HttpStatusCode} from "@lodestar/api"; import {bufferStderr, spawnCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; @@ -14,12 +13,11 @@ import {expectDeepEquals} from "../utils/runUtils.js"; import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; describe("import keystores from api", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-keystores-test"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); @@ -57,7 +55,7 @@ describe("import keystores from api", function () { const slashingProtectionStr = JSON.stringify(slashingProtection); it("run 'validator' and import remote keys from API", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Produce and encrypt keystores const keystoresStr = await getKeystoresStr(passphrase, secretKeys); @@ -88,6 +86,7 @@ describe("import keystores from api", function () { // Attempt to run a second process and expect the keystore lock to throw const validator = await spawnCliCommand("packages/cli/bin/lodestar.js", ["validator", "--dataDir", dataDir], { logPrefix: "vc-2", + testContext: {beforeEach, afterEach, afterAll}, }); await new Promise((resolve, reject) => { @@ -112,7 +111,7 @@ describe("import keystores from api", function () { }); it("run 'validator' check keys are loaded + delete", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Check that keys imported in previous it() are still there await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys before deleting"); @@ -131,17 +130,17 @@ describe("import keystores from api", function () { }); it("different process check no keys are loaded", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // After deleting there should be no keys await expectKeys(keymanagerClient, [], "Wrong listKeys"); }); it("reject calls without bearerToken", async function () { - await startValidatorWithKeyManager([], {dataDir, testContext}); + await startValidatorWithKeyManager([], {dataDir}); const keymanagerClientNoAuth = getClient({baseUrl: "http://localhost:38011", bearerToken: undefined}, {config}); const res = await keymanagerClientNoAuth.listRemoteKeys(); - expect(res.ok).to.be.false; - expect(res.error?.code).to.be.eql(HttpStatusCode.UNAUTHORIZED); + expect(res.ok).toBe(false); + expect(res.error?.code).toEqual(HttpStatusCode.UNAUTHORIZED); }); }); diff --git a/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts b/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts index 7f36a6876fd0..fd2193060ddd 100644 --- a/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts +++ b/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts @@ -1,10 +1,9 @@ import path from "node:path"; +import {describe, it, expect, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {Api, DeleteRemoteKeyStatus, getClient, ImportRemoteKeyStatus} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {ApiError, HttpStatusCode} from "@lodestar/api"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex} from "../utils/cachedKeys.js"; import {expectDeepEquals} from "../utils/runUtils.js"; @@ -23,12 +22,11 @@ async function expectKeys(keymanagerClient: Api, expectedPubkeys: string[], mess } describe("import remoteKeys from api", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-remoteKeys-test"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); @@ -36,7 +34,7 @@ describe("import remoteKeys from api", function () { const pubkeysToAdd = [cachedPubkeysHex[0], cachedPubkeysHex[1]]; it("run 'validator' and import remote keys from API", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Wrap in retry since the API may not be listening yet await expectKeys(keymanagerClient, [], "Wrong listRemoteKeys before importing"); @@ -64,7 +62,7 @@ describe("import remoteKeys from api", function () { }); it("run 'validator' check keys are loaded + delete", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Check that keys imported in previous it() are still there await expectKeys(keymanagerClient, pubkeysToAdd, "Wrong listRemoteKeys before deleting"); @@ -82,11 +80,11 @@ describe("import remoteKeys from api", function () { }); it("reject calls without bearerToken", async function () { - await startValidatorWithKeyManager([], {dataDir, testContext}); + await startValidatorWithKeyManager([], {dataDir}); const keymanagerUrl = "http://localhost:38011"; const keymanagerClientNoAuth = getClient({baseUrl: keymanagerUrl, bearerToken: undefined}, {config}); const res = await keymanagerClientNoAuth.listRemoteKeys(); - expect(res.ok).to.be.false; - expect(res.error?.code).to.be.eql(HttpStatusCode.UNAUTHORIZED); + expect(res.ok).toBe(false); + expect(res.error?.code).toEqual(HttpStatusCode.UNAUTHORIZED); }); }); diff --git a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts index 9d6eeafedfd3..eff3a488c898 100644 --- a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts +++ b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts @@ -1,8 +1,8 @@ import path from "node:path"; +import {describe, it, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; import {Interchange} from "@lodestar/validator"; import {ApiError} from "@lodestar/api"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; @@ -10,8 +10,8 @@ import {expectDeepEquals} from "../utils/runUtils.js"; import {startValidatorWithKeyManager} from "../utils/validator.js"; describe("import keystores from api, test DefaultProposerConfig", function () { - this.timeout("30s"); - const testContext = getMochaContext(this); + vi.setConfig({testTimeout: 30_000}); + const dataDir = path.join(testFilesDir, "proposer-config-test"); const defaultOptions = { @@ -26,7 +26,7 @@ describe("import keystores from api, test DefaultProposerConfig", function () { graffiti: "bbbb", }; - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); @@ -51,7 +51,6 @@ describe("import keystores from api, test DefaultProposerConfig", function () { it("1 . run 'validator' import keys from API, getdefaultfeeRecipient", async () => { const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { dataDir, - testContext, }); // Produce and encrypt keystores // Import test keys @@ -122,7 +121,6 @@ describe("import keystores from api, test DefaultProposerConfig", function () { it("2 . run 'validator' Check last feeRecipient and gasLimit persists", async () => { const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { dataDir, - testContext, }); // next time check edited feeRecipient persists @@ -185,7 +183,6 @@ describe("import keystores from api, test DefaultProposerConfig", function () { it("3 . run 'validator' FeeRecipient and GasLimit should be default after delete", async () => { const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { dataDir, - testContext, }); const feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); diff --git a/packages/cli/test/e2e/runDevCmd.test.ts b/packages/cli/test/e2e/runDevCmd.test.ts index 69c8989f1788..c7f51b45045e 100644 --- a/packages/cli/test/e2e/runDevCmd.test.ts +++ b/packages/cli/test/e2e/runDevCmd.test.ts @@ -1,12 +1,11 @@ +import {describe, it, vi, beforeEach, afterEach, afterAll} from "vitest"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {retry} from "@lodestar/utils"; import {spawnCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; describe("Run dev command", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); it("Run dev command with no --dataDir until beacon api is listening", async () => { const beaconPort = 39011; @@ -14,7 +13,7 @@ describe("Run dev command", function () { const devProc = await spawnCliCommand( "packages/cli/bin/lodestar.js", ["dev", "--reset", "--startValidators=0..7", `--rest.port=${beaconPort}`], - {pipeStdioToParent: true, logPrefix: "dev", testContext} + {pipeStdioToParent: true, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/e2e/validatorList.test.ts b/packages/cli/test/e2e/validatorList.test.ts index ba2102f07fee..b6fe4da5faeb 100644 --- a/packages/cli/test/e2e/validatorList.test.ts +++ b/packages/cli/test/e2e/validatorList.test.ts @@ -1,25 +1,35 @@ /* eslint-disable no-console */ import fs from "node:fs"; import path from "node:path"; +import {describe, it, beforeAll, vi, expect, afterEach, beforeEach} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {Keystore} from "@chainsafe/bls-keystore"; import {fromHex} from "@lodestar/utils"; import {runCliCommand} from "@lodestar/test-utils"; -import {stubLogger} from "@lodestar/test-utils/sinon"; import {testFilesDir} from "../utils.js"; import {getLodestarCli} from "../../src/cli.js"; describe("cmds / validator", function () { - this.timeout("30s"); - stubLogger(this, console); + vi.setConfig({testTimeout: 30_000}); + const lodestar = getLodestarCli(); const dataDir = testFilesDir; - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); + beforeEach(() => { + vi.spyOn(console, "log"); + vi.spyOn(console, "info"); + vi.spyOn(console, "error"); + vi.spyOn(console, "debug"); + }); + + afterEach(() => { + vi.resetAllMocks(); + }); + /** Generated from const sk = bls.SecretKey.fromKeygen(Buffer.alloc(32, 0xaa)); */ const skHex = "0x0e5bd52621b6a8956086dcf0ecc89f0cdca56cebb2a8516c2d4252a9867fc551"; const pkHex = "0x8be678633e927aa0435addad5dcd5283fef6110d91362519cd6d43e61f6c017d724fa579cc4b2972134e050b6ba120c0"; @@ -41,7 +51,7 @@ describe("cmds / validator", function () { `--passphraseFile ${passphraseFilepath}`, ]); - expect(console.log).be.calledWith(`Imported keystore ${pkHex} ${keystoreFilepath}`); + expect(console.log).toHaveBeenCalledWith(`Imported keystore ${pkHex} ${keystoreFilepath}`); }); it("should list validators", async function () { @@ -50,7 +60,7 @@ describe("cmds / validator", function () { await runCliCommand(lodestar, ["validator list", `--dataDir ${dataDir}`], {timeoutMs: 5000}); - expect(console.info).calledWith("1 local keystores"); - expect(console.info).calledWith(pkHex); + expect(console.info).toHaveBeenCalledWith("1 local keystores"); + expect(console.info).toHaveBeenCalledWith(pkHex); }); }); diff --git a/packages/cli/test/e2e/voluntaryExit.test.ts b/packages/cli/test/e2e/voluntaryExit.test.ts index b3a539473581..89841fb7c3e4 100644 --- a/packages/cli/test/e2e/voluntaryExit.test.ts +++ b/packages/cli/test/e2e/voluntaryExit.test.ts @@ -1,15 +1,14 @@ import path from "node:path"; +import {afterAll, describe, it, vi, beforeEach, afterEach} from "vitest"; import {retry} from "@lodestar/utils"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {interopSecretKey} from "@lodestar/state-transition"; import {spawnCliCommand, execCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; describe("voluntaryExit cmd", function () { - const testContext = getMochaContext(this); - this.timeout("60s"); + vi.setConfig({testTimeout: 60_000}); it("Perform a voluntary exit", async () => { const restPort = 9596; @@ -29,7 +28,7 @@ describe("voluntaryExit cmd", function () { // Allow voluntary exists to be valid immediately "--params.SHARD_COMMITTEE_PERIOD=0", ], - {pipeStdioToParent: false, logPrefix: "dev", testContext} + {pipeStdioToParent: true, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/e2e/voluntaryExitFromApi.test.ts b/packages/cli/test/e2e/voluntaryExitFromApi.test.ts index a06cc2025af3..ed4439d36ab2 100644 --- a/packages/cli/test/e2e/voluntaryExitFromApi.test.ts +++ b/packages/cli/test/e2e/voluntaryExitFromApi.test.ts @@ -1,17 +1,15 @@ import path from "node:path"; -import {expect} from "chai"; +import {describe, it, vi, expect, afterAll, beforeEach, afterEach} from "vitest"; import {ApiError, getClient} from "@lodestar/api"; import {getClient as getKeymanagerClient} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {interopSecretKey} from "@lodestar/state-transition"; import {spawnCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {retry} from "@lodestar/utils"; import {testFilesDir} from "../utils.js"; describe("voluntary exit from api", function () { - const testContext = getMochaContext(this); - this.timeout("60s"); + vi.setConfig({testTimeout: 60_000}); it("Perform a voluntary exit", async () => { // Start dev node with keymanager @@ -39,7 +37,7 @@ describe("voluntary exit from api", function () { // Disable bearer token auth to simplify testing "--keymanager.authEnabled=false", ], - {pipeStdioToParent: false, logPrefix: "dev", testContext} + {pipeStdioToParent: false, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits @@ -71,10 +69,10 @@ describe("voluntary exit from api", function () { ApiError.assert(res); const signedVoluntaryExit = res.response.data; - expect(signedVoluntaryExit.message.epoch).to.equal(exitEpoch); - expect(signedVoluntaryExit.message.validatorIndex).to.equal(indexToExit); + expect(signedVoluntaryExit.message.epoch).toBe(exitEpoch); + expect(signedVoluntaryExit.message.validatorIndex).toBe(indexToExit); // Signature will be verified when submitting to beacon node - expect(signedVoluntaryExit.signature).to.not.be.undefined; + expect(signedVoluntaryExit.signature).toBeDefined(); // 2. submit signed voluntary exit message to beacon node ApiError.assert(await beaconClient.submitPoolVoluntaryExit(signedVoluntaryExit)); diff --git a/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts b/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts index 4c6fdab11f2a..b2f902c0e6dd 100644 --- a/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts +++ b/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts @@ -1,4 +1,5 @@ import path from "node:path"; +import {describe, it, beforeAll, afterAll, beforeEach, afterEach, vi} from "vitest"; import {retry} from "@lodestar/utils"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; @@ -10,15 +11,14 @@ import { StartedExternalSigner, getKeystoresStr, } from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; describe("voluntaryExit using remote signer", function () { - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); let externalSigner: StartedExternalSigner; - before("start external signer container", async () => { + beforeAll(async () => { const password = "password"; externalSigner = await startExternalSigner({ keystoreStrings: await getKeystoresStr( @@ -29,13 +29,11 @@ describe("voluntaryExit using remote signer", function () { }); }); - after("stop external signer container", async () => { + afterAll(async () => { await externalSigner.container.stop(); }); it("Perform a voluntary exit", async () => { - const testContext = getMochaContext(this); - const restPort = 9596; const devBnProc = await spawnCliCommand( "packages/cli/bin/lodestar.js", @@ -52,7 +50,7 @@ describe("voluntaryExit using remote signer", function () { // Allow voluntary exists to be valid immediately "--params.SHARD_COMMITTEE_PERIOD=0", ], - {pipeStdioToParent: false, logPrefix: "dev", testContext} + {pipeStdioToParent: false, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/globalSetup.ts b/packages/cli/test/globalSetup.ts new file mode 100644 index 000000000000..02074bb24d11 --- /dev/null +++ b/packages/cli/test/globalSetup.ts @@ -0,0 +1,4 @@ +export async function setup(): Promise { + process.env.NODE_ENV = "test"; +} +export async function teardown(): Promise {} diff --git a/packages/cli/test/setup.ts b/packages/cli/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/cli/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/cli/test/sim/multi_fork.test.ts b/packages/cli/test/sim/multi_fork.test.ts index 71d6eb4a42ea..734ae5c5a380 100644 --- a/packages/cli/test/sim/multi_fork.test.ts +++ b/packages/cli/test/sim/multi_fork.test.ts @@ -65,10 +65,11 @@ const env = await SimulationEnvironment.initWithDefaults( validator: { type: ValidatorClient.Lodestar, options: { + // this will cause race in beacon but since builder is not attached will + // return with engine full block and publish via publishBlockV2 clientOptions: { useProduceBlockV3: true, - // default builder selection will cause a race try in beacon even if builder is not set - // but not to worry, execution block will be selected as fallback anyway + "builder.selection": "maxprofit", }, }, }, @@ -82,12 +83,12 @@ const env = await SimulationEnvironment.initWithDefaults( validator: { type: ValidatorClient.Lodestar, options: { + // this will make the beacon respond with blinded version of the local block as no + // builder is attached to beacon, and publish via publishBlindedBlockV2 clientOptions: { - useProduceBlockV3: false, - // default builder selection of max profit will make it use produceBlindedBlock - // but not to worry, execution block will be selected as fallback anyway - // but returned in blinded format for validator to use publish blinded block - // which assembles block beacon side from local cache before publishing + useProduceBlockV3: true, + "builder.selection": "maxprofit", + blindedLocal: true, }, }, }, @@ -101,9 +102,9 @@ const env = await SimulationEnvironment.initWithDefaults( validator: { type: ValidatorClient.Lodestar, options: { + // this builder selection will make it use produceBlockV2 and respond with full block clientOptions: { useProduceBlockV3: false, - // this builder selection will make it use produceBlockV2 "builder.selection": "executiononly", }, }, @@ -111,7 +112,24 @@ const env = await SimulationEnvironment.initWithDefaults( execution: ExecutionClient.Nethermind, keysCount: 32, }, - {id: "node-4", beacon: BeaconClient.Lighthouse, execution: ExecutionClient.Geth, keysCount: 32}, + { + id: "node-4", + beacon: BeaconClient.Lodestar, + validator: { + type: ValidatorClient.Lodestar, + options: { + // this builder selection will make it use produceBlindedBlockV2 and respond with blinded version + // of local block and subsequent publishing via publishBlindedBlock + clientOptions: { + useProduceBlockV3: false, + "builder.selection": "maxprofit", + }, + }, + }, + execution: ExecutionClient.Nethermind, + keysCount: 32, + }, + {id: "node-5", beacon: BeaconClient.Lighthouse, execution: ExecutionClient.Geth, keysCount: 32}, ] ); diff --git a/packages/cli/test/unit/cmds/beacon.test.ts b/packages/cli/test/unit/cmds/beacon.test.ts index 08367ad01309..7d111b3362a8 100644 --- a/packages/cli/test/unit/cmds/beacon.test.ts +++ b/packages/cli/test/unit/cmds/beacon.test.ts @@ -1,6 +1,6 @@ import path from "node:path"; import fs from "node:fs"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createFromJSON, createSecp256k1PeerId} from "@libp2p/peer-id-factory"; import {multiaddr} from "@multiformats/multiaddr"; import {createKeypairFromPeerId, ENR, SignableENR} from "@chainsafe/discv5"; @@ -33,8 +33,8 @@ describe("cmds / beacon / args handler", () => { }); const bootEnrs = options.network.discv5?.bootEnrs ?? []; - expect(bootEnrs.includes(enr1)).to.be.true; - expect(bootEnrs.includes(enr2)).to.be.true; + expect(bootEnrs.includes(enr1)).toBe(true); + expect(bootEnrs.includes(enr2)).toBe(true); }); it("Over-write ENR fields", async () => { @@ -50,15 +50,15 @@ describe("cmds / beacon / args handler", () => { const enr = ENR.decodeTxt(options.network.discv5?.enr as string); - expect(enr.ip).to.equal(enrIp, "wrong enr.ip"); - expect(enr.tcp).to.equal(enrTcp, "wrong enr.tcp"); + expect(enr.ip).toBe(enrIp); + expect(enr.tcp).toBe(enrTcp); }); it("Create different PeerId every run", async () => { const {peerId: peerId1} = await runBeaconHandlerInit({}); const {peerId: peerId2} = await runBeaconHandlerInit({}); - expect(peerId1.toString()).not.equal(peerId2.toString(), "peer ids must be different"); + expect(peerId1.toString()).not.toBe(peerId2.toString()); }); it("Re-use existing peer", async () => { @@ -74,7 +74,7 @@ describe("cmds / beacon / args handler", () => { persistNetworkIdentity: true, }); - expect(peerId.toString()).equal(prevPeerId.toString(), "peer must be equal to persisted"); + expect(peerId.toString()).toBe(prevPeerId.toString()); }); it("Set known deposit contract", async () => { @@ -83,7 +83,7 @@ describe("cmds / beacon / args handler", () => { }); // Okay to hardcode, since this value will never change - expect(options.eth1.depositContractDeployBlock).equal(11052984, "Wrong mainnet eth1.depositContractDeployBlock"); + expect(options.eth1.depositContractDeployBlock).toBe(11052984); }); it("Apply custom network name from config file", async () => { @@ -99,19 +99,19 @@ describe("cmds / beacon / args handler", () => { }); // Okay to hardcode, since this value will never change - expect(network).equal(networkName, "Wrong network name"); + expect(network).toBe(networkName); }); }); describe("Test isLocalMultiAddr", () => { it("should return true for 127.0.0.1", () => { const multi0 = multiaddr("/ip4/127.0.0.1/udp/30303"); - expect(isLocalMultiAddr(multi0)).to.equal(true); + expect(isLocalMultiAddr(multi0)).toBe(true); }); it("should return false for 0.0.0.0", () => { const multi0 = multiaddr("/ip4/0.0.0.0/udp/30303"); - expect(isLocalMultiAddr(multi0)).to.equal(false); + expect(isLocalMultiAddr(multi0)).toBe(false); }); }); @@ -128,7 +128,7 @@ describe("initPeerIdAndEnr", () => { testLogger() ); - expect(peerId1.toString()).not.equal(peerId2.toString(), "peer ids must be different"); + expect(peerId1.toString()).not.toBe(peerId2.toString()); }); it("should reuse peer id, persistNetworkIdentity=true", async () => { @@ -143,7 +143,7 @@ describe("initPeerIdAndEnr", () => { testLogger() ); - expect(peerId1.toString()).to.equal(peerId2.toString(), "peer ids must be equal"); + expect(peerId1.toString()).toBe(peerId2.toString()); }); it("should overwrite invalid peer id", async () => { @@ -157,8 +157,8 @@ describe("initPeerIdAndEnr", () => { ); const filePeerId = await createFromJSON(JSON.parse(fs.readFileSync(peerIdFile, "utf-8"))); - expect(peerId1Str).not.equal(peerId2.toString(), "peer ids must be different"); - expect(filePeerId.toString()).to.equal(peerId2.toString(), "peer ids must be equal"); + expect(peerId1Str).not.toBe(peerId2.toString()); + expect(filePeerId.toString()).toBe(peerId2.toString()); }); it("should overwrite invalid enr", async () => { @@ -170,7 +170,7 @@ describe("initPeerIdAndEnr", () => { const validEnr = fs.readFileSync(enrFilePath, "utf-8"); - expect(validEnr).not.equal(invalidEnr, "enrs must be different"); + expect(validEnr).not.toBe(invalidEnr); }); it("should overwrite enr that doesn't match peer id", async () => { @@ -182,7 +182,7 @@ describe("initPeerIdAndEnr", () => { const {enr} = await initPeerIdAndEnr({persistNetworkIdentity: true} as BeaconArgs, testFilesDir, testLogger()); - expect(enr.nodeId).not.equal(otherEnr, "enrs must be different"); + expect(enr.nodeId).not.toBe(otherEnr); }); }); diff --git a/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts b/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts index 4bdfedf64b95..a207e0c0f59d 100644 --- a/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts +++ b/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import tmp from "tmp"; -import {expect} from "chai"; import {initPeerIdAndEnr} from "../../../src/cmds/beacon/initPeerIdAndEnr.js"; import {BeaconArgs} from "../../../src/cmds/beacon/options.js"; import {testLogger} from "../../utils.js"; @@ -23,12 +23,11 @@ describe("initPeerIdAndEnr", () => { testLogger(), true ); - expect((await enr.peerId()).toString(), "enr peer id doesn't equal the returned peer id").to.equal( - peerId.toString() - ); - expect(enr.seq).to.equal(BigInt(1)); - expect(enr.tcp).to.equal(undefined); - expect(enr.tcp6).to.equal(undefined); + // "enr peer id doesn't equal the returned peer id" + expect((await enr.peerId()).toString()).toBe(peerId.toString()); + expect(enr.seq).toBe(BigInt(1)); + expect(enr.tcp).toBeUndefined(); + expect(enr.tcp6).toBeUndefined(); }); it("second time should use ths existing enr and peer id", async () => { @@ -46,7 +45,7 @@ describe("initPeerIdAndEnr", () => { true ); - expect(run1.peerId.toString()).to.equal(run2.peerId.toString()); - expect(run1.enr.encodeTxt()).to.equal(run2.enr.encodeTxt()); + expect(run1.peerId.toString()).toBe(run2.peerId.toString()); + expect(run1.enr.encodeTxt()).toBe(run2.enr.encodeTxt()); }); }); diff --git a/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts b/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts index 59113b273435..ee0fedf301b6 100644 --- a/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts +++ b/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts @@ -1,9 +1,7 @@ import fs from "node:fs"; import {randomBytes} from "node:crypto"; +import {describe, it, expect, beforeEach} from "vitest"; import tmp from "tmp"; -import {expect} from "chai"; -import chainAsPromised from "chai-as-promised"; -import chai from "chai"; import {Keystore} from "@chainsafe/bls-keystore"; import bls from "@chainsafe/bls"; import {interopSecretKey} from "@lodestar/state-transition"; @@ -11,8 +9,6 @@ import {SignerLocal, SignerType} from "@lodestar/validator"; import {loadKeystoreCache, writeKeystoreCache} from "../../../../../src/cmds/validator/keymanager/keystoreCache.js"; import {LocalKeystoreDefinition} from "../../../../../src/cmds/validator/keymanager/interface.js"; -chai.use(chainAsPromised); - const numberOfSigners = 10; describe("keystoreCache", () => { @@ -23,7 +19,6 @@ describe("keystoreCache", () => { let keystoreCacheFile: string; beforeEach(async function setup() { - this.timeout(50000); definitions = []; signers = []; secretKeys = []; @@ -55,16 +50,16 @@ describe("keystoreCache", () => { passwords.push(password); secretKeys.push(secretKey.toBytes()); } - }); + }, 50000); describe("writeKeystoreCache", () => { it("should write a valid keystore cache file", async () => { - await expect(writeKeystoreCache(keystoreCacheFile, signers, passwords)).to.fulfilled; - expect(fs.existsSync(keystoreCacheFile)).to.be.true; + await expect(writeKeystoreCache(keystoreCacheFile, signers, passwords)).resolves.toBeUndefined(); + expect(fs.existsSync(keystoreCacheFile)).toBe(true); }); it("should throw error if password length are not same as signers", async () => { - await expect(writeKeystoreCache(keystoreCacheFile, signers, [passwords[0]])).to.rejectedWith( + await expect(writeKeystoreCache(keystoreCacheFile, signers, [passwords[0]])).rejects.toThrow( `Number of signers and passwords must be equal. signers=${numberOfSigners}, passwords=1` ); }); @@ -75,14 +70,14 @@ describe("keystoreCache", () => { await writeKeystoreCache(keystoreCacheFile, signers, passwords); const result = await loadKeystoreCache(keystoreCacheFile, definitions); - expect(result.map((r) => r.secretKey.toBytes())).to.eql(secretKeys); + expect(result.map((r) => r.secretKey.toBytes())).toEqual(secretKeys); }); it("should raise error for mismatch public key", async () => { await writeKeystoreCache(keystoreCacheFile, signers, passwords); definitions[0].keystorePath = definitions[1].keystorePath; - await expect(loadKeystoreCache(keystoreCacheFile, definitions)).to.rejected; + await expect(loadKeystoreCache(keystoreCacheFile, definitions)).rejects.toBeDefined(); }); }); }); diff --git a/packages/cli/test/unit/config/beaconNodeOptions.test.ts b/packages/cli/test/unit/config/beaconNodeOptions.test.ts index 4e1a44102bf7..d35cf06ad1a4 100644 --- a/packages/cli/test/unit/config/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/config/beaconNodeOptions.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {defaultOptions} from "@lodestar/beacon-node"; import {BeaconNodeOptions} from "../../../src/config/index.js"; @@ -8,7 +8,7 @@ describe("config / beaconNodeOptions", () => { // Asserts only part of the data structure to avoid unnecesary duplicate code const optionsPartial = beaconNodeOptions.getWithDefaults(); - expect(optionsPartial?.api?.rest?.port).to.equal(defaultOptions.api.rest.port, "default api.rest.port not applied"); + expect(optionsPartial?.api?.rest?.port).toBe(defaultOptions.api.rest.port); }); it("Should return added partial options", () => { @@ -19,6 +19,6 @@ describe("config / beaconNodeOptions", () => { beaconNodeOptions.set(editedPartialOptions); const optionsPartial = beaconNodeOptions.get(); - expect(optionsPartial).to.deep.equal(editedPartialOptions); + expect(optionsPartial).toEqual(editedPartialOptions); }); }); diff --git a/packages/cli/test/unit/config/beaconParams.test.ts b/packages/cli/test/unit/config/beaconParams.test.ts index 6a953c584c73..4fedaf788601 100644 --- a/packages/cli/test/unit/config/beaconParams.test.ts +++ b/packages/cli/test/unit/config/beaconParams.test.ts @@ -1,5 +1,5 @@ import fs from "node:fs"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll} from "vitest"; import yaml from "js-yaml"; import {toHexString} from "@chainsafe/ssz"; import {getTestdirPath} from "../../utils.js"; @@ -59,19 +59,18 @@ describe("config / beaconParams", () => { }, ]; - before("Write config file", () => { + beforeAll(() => { // eslint-disable-next-line @typescript-eslint/naming-convention fs.writeFileSync(paramsFilepath, yaml.dump({GENESIS_FORK_VERSION: GENESIS_FORK_VERSION_FILE})); }); - after("Remove config file", () => { + afterAll(() => { if (fs.existsSync(paramsFilepath)) fs.unlinkSync(paramsFilepath); }); - for (const {id, kwargs, GENESIS_FORK_VERSION} of testCases) { - it(id, () => { - const params = getBeaconParams(kwargs); - expect(toHexString(params.GENESIS_FORK_VERSION)).to.equal(GENESIS_FORK_VERSION); - }); - } + // eslint-disable-next-line @typescript-eslint/naming-convention + it.each(testCases)("$id", ({kwargs, GENESIS_FORK_VERSION}) => { + const params = getBeaconParams(kwargs); + expect(toHexString(params.GENESIS_FORK_VERSION)).toBe(GENESIS_FORK_VERSION); + }); }); diff --git a/packages/cli/test/unit/config/peerId.test.ts b/packages/cli/test/unit/config/peerId.test.ts index 618c97a99062..c0cdc8cff1a9 100644 --- a/packages/cli/test/unit/config/peerId.test.ts +++ b/packages/cli/test/unit/config/peerId.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createSecp256k1PeerId} from "@libp2p/peer-id-factory"; import {getTestdirPath} from "../../utils.js"; import {writePeerId, readPeerId} from "../../../src/config/index.js"; @@ -11,6 +11,6 @@ describe("config / peerId", () => { writePeerId(peerIdFilepath, peerId); const peerIdRead = await readPeerId(peerIdFilepath); - expect(peerIdRead.toString()).to.equal(peerId.toString()); + expect(peerIdRead.toString()).toBe(peerId.toString()); }); }); diff --git a/packages/cli/test/unit/db.test.ts b/packages/cli/test/unit/db.test.ts index f951b3e6923b..1e19e514e9e5 100644 --- a/packages/cli/test/unit/db.test.ts +++ b/packages/cli/test/unit/db.test.ts @@ -1,3 +1,4 @@ +import {describe, it} from "vitest"; // eslint-disable-next-line import/no-relative-packages import {Bucket as BeaconBucket} from "../../../beacon-node/src/db/buckets.js"; // eslint-disable-next-line import/no-relative-packages diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index bc302bbc17d6..8a9ccff5a917 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -1,5 +1,5 @@ import fs from "node:fs"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {IBeaconNodeOptions} from "@lodestar/beacon-node"; import {RecursivePartial} from "@lodestar/utils"; import {parseBeaconNodeArgs, BeaconNodeArgs} from "../../../src/options/beaconNodeOptions/index.js"; @@ -216,7 +216,7 @@ describe("options / beaconNodeOptions", () => { }; const options = parseBeaconNodeArgs(beaconNodeArgsPartial); - expect(options).to.deep.equal(expectedOptions); + expect(options).toEqual(expectedOptions); }); it("Should use execution endpoint & jwt for eth1", () => { @@ -240,6 +240,6 @@ describe("options / beaconNodeOptions", () => { }; const options = parseBeaconNodeArgs(beaconNodeArgsPartial); - expect(options.eth1).to.deep.equal(expectedOptions.eth1); + expect(options.eth1).toEqual(expectedOptions.eth1); }); }); diff --git a/packages/cli/test/unit/options/paramsOptions.test.ts b/packages/cli/test/unit/options/paramsOptions.test.ts index d8a2982b1976..a08c70008612 100644 --- a/packages/cli/test/unit/options/paramsOptions.test.ts +++ b/packages/cli/test/unit/options/paramsOptions.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseBeaconParamsArgs} from "../../../src/options/paramsOptions.js"; import {IBeaconParamsUnparsed} from "../../../src/config/types.js"; @@ -18,6 +18,6 @@ describe("options / paramsOptions", () => { }; const beaconParams = parseBeaconParamsArgs(beaconParamsArgs); - expect(beaconParams).to.deep.equal(expectedBeaconParams); + expect(beaconParams).toEqual(expectedBeaconParams); }); }); diff --git a/packages/cli/test/unit/paths/globalPaths.test.ts b/packages/cli/test/unit/paths/globalPaths.test.ts index 0f394dc50b1a..98de8fd87a79 100644 --- a/packages/cli/test/unit/paths/globalPaths.test.ts +++ b/packages/cli/test/unit/paths/globalPaths.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {getGlobalPaths} from "../../../src/paths/global.js"; describe("paths / global", () => { @@ -36,7 +36,7 @@ describe("paths / global", () => { for (const {id, args, globalPaths} of testCases) { it(id, () => { - expect(getGlobalPaths(args, args.network ?? network)).to.deep.equal(globalPaths); + expect(getGlobalPaths(args, args.network ?? network)).toEqual(globalPaths); }); } }); diff --git a/packages/cli/test/unit/util/extractJwtHexSecret.test.ts b/packages/cli/test/unit/util/extractJwtHexSecret.test.ts index 37996c2b625f..bb7032390208 100644 --- a/packages/cli/test/unit/util/extractJwtHexSecret.test.ts +++ b/packages/cli/test/unit/util/extractJwtHexSecret.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {extractJwtHexSecret} from "../../../src/util/index.js"; describe("parseJwtHexSecret", () => { @@ -18,7 +18,7 @@ describe("parseJwtHexSecret", () => { ]; for (const {raw, parsed} of testCases) { it(`parse ${raw}`, () => { - expect(parsed).to.be.equal(extractJwtHexSecret(raw)); + expect(parsed).toBe(extractJwtHexSecret(raw)); }); } }); @@ -30,7 +30,7 @@ describe("invalid jwtHexSecret", () => { ]; for (const {raw, error} of testCases) { it(`should error on ${error}: ${raw}`, () => { - expect(() => extractJwtHexSecret(raw)).to.throw(); + expect(() => extractJwtHexSecret(raw)).toThrow(); }); } }); diff --git a/packages/cli/test/unit/util/format.test.ts b/packages/cli/test/unit/util/format.test.ts index 9c51a2af8a4c..c06259cc1842 100644 --- a/packages/cli/test/unit/util/format.test.ts +++ b/packages/cli/test/unit/util/format.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {isValidatePubkeyHex, parseRange} from "../../../src/util/index.js"; describe("util / format / parseRange", () => { @@ -10,7 +10,7 @@ describe("util / format / parseRange", () => { for (const {range, res} of testCases) { it(range, () => { - expect(parseRange(range)).to.deep.equal(res); + expect(parseRange(range)).toEqual(res); }); } }); @@ -27,7 +27,7 @@ describe("util / format / isValidatePubkeyHex", () => { for (const [pubkeyHex, isValid] of Object.entries(testCases)) { it(pubkeyHex, () => { - expect(isValidatePubkeyHex(pubkeyHex)).equals(isValid); + expect(isValidatePubkeyHex(pubkeyHex)).toBe(isValid); }); } }); diff --git a/packages/cli/test/unit/util/gitData.test.ts b/packages/cli/test/unit/util/gitData.test.ts index 18bd55118884..206dd070b545 100644 --- a/packages/cli/test/unit/util/gitData.test.ts +++ b/packages/cli/test/unit/util/gitData.test.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {findUpSync} from "find-up"; import {gitDataPath, readGitDataFile} from "../../../src/util/gitData/gitDataPath.js"; import {getGitData} from "../../../src/util/index.js"; @@ -20,7 +20,7 @@ describe("util / gitData", function () { it("gitData file must exist", () => { const gitData = readGitDataFile(); - expect(gitData).to.deep.equal(getGitData(), "Wrong git-data.json contents"); + expect(gitData).toEqual(getGitData()); }); it("gitData path must be included in the package.json", () => { @@ -32,6 +32,6 @@ describe("util / gitData", function () { const pkgJson = JSON.parse(fs.readFileSync(pkgJsonPath, "utf8")) as {files: string[]}; const gitDataPathFromPkgJson = path.relative(path.dirname(pkgJsonPath), gitDataPath); - expect(pkgJson.files).to.include(gitDataPathFromPkgJson, "package.json .files does not include gitData path"); + expect(pkgJson.files).toContain(gitDataPathFromPkgJson); }); }); diff --git a/packages/cli/test/unit/util/logger.test.ts b/packages/cli/test/unit/util/logger.test.ts index 5e792e9750d4..bddc86f2a483 100644 --- a/packages/cli/test/unit/util/logger.test.ts +++ b/packages/cli/test/unit/util/logger.test.ts @@ -1,18 +1,17 @@ -import {expect} from "chai"; -import sinon from "sinon"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {shouldDeleteLogFile} from "../../../src/util/logger.js"; describe("shouldDeleteLogFile", function () { const prefix = "beacon"; const extension = "log"; - const sandbox = sinon.createSandbox(); beforeEach(() => { - sandbox.useFakeTimers(new Date("2023-01-01")); + vi.useFakeTimers({now: new Date("2023-01-01")}); }); afterEach(() => { - sandbox.restore(); + vi.useRealTimers(); + vi.clearAllTimers(); }); const tcs: {logFile: string; maxFiles: number; now: number; result: boolean}[] = [ // missing .log @@ -55,7 +54,7 @@ describe("shouldDeleteLogFile", function () { it(`should ${ result ? "" : "not" } delete ${logFile}, maxFiles ${maxFiles}, today ${new Date().toUTCString()}`, () => { - expect(shouldDeleteLogFile(prefix, extension, logFile, maxFiles)).to.be.equal(result); + expect(shouldDeleteLogFile(prefix, extension, logFile, maxFiles)).toBe(result); }); } }); diff --git a/packages/cli/test/unit/util/parseBootnodesFile.test.ts b/packages/cli/test/unit/util/parseBootnodesFile.test.ts index db1a90fbf1c5..07338192bc68 100644 --- a/packages/cli/test/unit/util/parseBootnodesFile.test.ts +++ b/packages/cli/test/unit/util/parseBootnodesFile.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseBootnodesFile} from "../../../src/util/index.js"; describe("config / bootnodes / parsing", () => { @@ -18,7 +18,7 @@ describe("config / bootnodes / parsing", () => { { "enrs": [ - "enr:-cabfg", + "enr:-cabfg", "enr:-deadbeef" ] } @@ -124,9 +124,7 @@ enr:-LK4QKWrXTpV9T78hNG6s8AM6IO4XH9kFT91uZtFg1GcsJ6dKovDOr1jtAAFPnS2lvNltkOGA9k2 }, ]; - for (const {name, input, expected} of testCases) { - it(name, () => { - expect(parseBootnodesFile(input)).to.be.deep.equal(expected); - }); - } + it.each(testCases)("$name", ({input, expected}) => { + expect(parseBootnodesFile(input)).toEqual(expected); + }); }); diff --git a/packages/cli/test/unit/util/progress.test.ts b/packages/cli/test/unit/util/progress.test.ts index b62b54dbdd87..d04d959d7422 100644 --- a/packages/cli/test/unit/util/progress.test.ts +++ b/packages/cli/test/unit/util/progress.test.ts @@ -1,102 +1,100 @@ -import {expect} from "chai"; -import sinon from "sinon"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {showProgress} from "../../../src/util/progress.js"; describe("progress", () => { - const sandbox = sinon.createSandbox(); - describe("showProgress", () => { beforeEach(() => { - sandbox.useFakeTimers(); + vi.useFakeTimers(); }); afterEach(() => { - sandbox.restore(); + vi.useRealTimers(); + vi.clearAllTimers(); }); it("should call progress with correct frequency", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; showProgress({total: 10, signal: new AbortController().signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs * 4); + vi.advanceTimersByTime(frequencyMs * 4); - expect(progress.callCount).to.be.equal(4); + expect(progress).toBeCalledTimes(4); }); it("should call progress with correct values", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 8; const needle = showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); needle(1); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); needle(3); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); - expect(progress).to.be.calledThrice; - expect(progress.firstCall.args[0]).to.eql({total, current: 0, ratePerSec: 0, percentage: 0}); - expect(progress.secondCall.args[0]).to.eql({total, current: 2, ratePerSec: 40, percentage: 25}); - expect(progress.thirdCall.args[0]).to.eql({total, current: 4, ratePerSec: 40, percentage: 50}); + expect(progress).toHaveBeenCalledTimes(3); + expect(progress).nthCalledWith(1, {total, current: 0, ratePerSec: 0, percentage: 0}); + expect(progress).nthCalledWith(2, {total, current: 2, ratePerSec: 40, percentage: 25}); + expect(progress).nthCalledWith(3, {total, current: 4, ratePerSec: 40, percentage: 50}); }); it("should call progress with correct values when reach total", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 8; const needle = showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); needle(1); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); needle(7); // Once by timer and second time because of reaching total - expect(progress).to.be.calledTwice; + expect(progress).toHaveBeenCalledTimes(2); // ratePerSec is 0 (actually Infinity) because we reached total without moving the clock time - expect(progress.secondCall.args[0]).to.eql({total, current: total, ratePerSec: 0, percentage: 100}); + expect(progress).nthCalledWith(2, {total, current: total, ratePerSec: 0, percentage: 100}); }); it("should call progress with correct values directly reaches to total", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 8; const needle = showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); needle(7); - expect(progress).to.be.calledOnce; - expect(progress.firstCall.args[0]).to.eql({total, current: total, ratePerSec: 0, percentage: 100}); + expect(progress).toHaveBeenCalledTimes(1); + expect(progress).nthCalledWith(1, {total, current: total, ratePerSec: 0, percentage: 100}); }); it("should not call progress when initiated with zero total", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 0; showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); - expect(progress).to.be.not.be.called; + expect(progress).not.toHaveBeenCalled(); }); it("should not call progress further when abort signal is called", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const controller = new AbortController(); showProgress({total: 10, signal: controller.signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); controller.abort(); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); - expect(progress.callCount).to.be.equal(2); + expect(progress).toBeCalledTimes(2); }); it("should not call progress further when total is reached", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const needle = showProgress({total: 10, signal: new AbortController().signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); needle(50); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); // 2 calls based on interval and 1 call based on reaching total - expect(progress.callCount).to.be.equal(2 + 1); + expect(progress).toBeCalledTimes(2 + 1); }); }); }); diff --git a/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts b/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts index a4c2cf9d05a4..d88f684902e0 100644 --- a/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts +++ b/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {pruneOldFilesInDir} from "../../../src/util/index.js"; import {testFilesDir} from "../../utils.js"; @@ -25,14 +25,14 @@ describe("pruneOldFilesInDir", () => { pruneOldFilesInDir(dataDir, DAYS_TO_MS); const files = fs.readdirSync(dataDir); - expect(files).to.not.include(oldFile); + expect(files).toEqual(expect.not.arrayContaining([oldFile])); }); it("should not delete new files", () => { pruneOldFilesInDir(dataDir, DAYS_TO_MS); const files = fs.readdirSync(dataDir); - expect(files).to.include(newFile); + expect(files).toEqual(expect.arrayContaining([newFile])); }); it("should delete old files in nested directories", () => { @@ -43,7 +43,7 @@ describe("pruneOldFilesInDir", () => { pruneOldFilesInDir(dataDir, DAYS_TO_MS); - expect(fs.readdirSync(nestedDir)).to.be.empty; + expect(fs.readdirSync(nestedDir)).toHaveLength(0); }); it("should handle empty directories", () => { @@ -52,7 +52,7 @@ describe("pruneOldFilesInDir", () => { pruneOldFilesInDir(emptyDir, DAYS_TO_MS); - expect(fs.readdirSync(emptyDir)).to.be.empty; + expect(fs.readdirSync(emptyDir)).toHaveLength(0); }); function createFileWithAge(path: string, ageInDays: number): void { diff --git a/packages/cli/test/unit/util/stripOffNewlines.test.ts b/packages/cli/test/unit/util/stripOffNewlines.test.ts index 839625836fb5..3a5a5a1f3523 100644 --- a/packages/cli/test/unit/util/stripOffNewlines.test.ts +++ b/packages/cli/test/unit/util/stripOffNewlines.test.ts @@ -1,34 +1,34 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {stripOffNewlines} from "../../../src/util/index.js"; describe("stripOffNewlines", () => { it("should remove trailing newlines from a string", () => { - expect(stripOffNewlines("1231321\n")).to.equal("1231321"); - expect(stripOffNewlines("1231321\r")).to.equal("1231321"); - expect(stripOffNewlines("1231321\r\n")).to.equal("1231321"); - expect(stripOffNewlines("1231321\n\n\r")).to.equal("1231321"); - expect(stripOffNewlines("1231321\n\r\n")).to.equal("1231321"); - expect(stripOffNewlines("\n\r\n")).to.equal(""); + expect(stripOffNewlines("1231321\n")).toBe("1231321"); + expect(stripOffNewlines("1231321\r")).toBe("1231321"); + expect(stripOffNewlines("1231321\r\n")).toBe("1231321"); + expect(stripOffNewlines("1231321\n\n\r")).toBe("1231321"); + expect(stripOffNewlines("1231321\n\r\n")).toBe("1231321"); + expect(stripOffNewlines("\n\r\n")).toBe(""); }); it("should not remove pipe character(s) at the end of a string", () => { - expect(stripOffNewlines("1231321|")).to.equal("1231321|"); - expect(stripOffNewlines("1231321||")).to.equal("1231321||"); - expect(stripOffNewlines("1231321|||")).to.equal("1231321|||"); + expect(stripOffNewlines("1231321|")).toBe("1231321|"); + expect(stripOffNewlines("1231321||")).toBe("1231321||"); + expect(stripOffNewlines("1231321|||")).toBe("1231321|||"); }); it("should not remove newlines in the middle of a string", () => { - expect(stripOffNewlines("123\n1321\n\n\n")).to.equal("123\n1321"); + expect(stripOffNewlines("123\n1321\n\n\n")).toBe("123\n1321"); }); it("should not modify the string if there are no new lines", () => { - expect(stripOffNewlines("1231321")).to.equal("1231321"); - expect(stripOffNewlines("")).to.equal(""); + expect(stripOffNewlines("1231321")).toBe("1231321"); + expect(stripOffNewlines("")).toBe(""); }); it("should not mutate the original string", () => { const originalString = "123\n1321\n\n\n"; stripOffNewlines(originalString); - expect(originalString).to.equal("123\n1321\n\n\n"); + expect(originalString).toBe("123\n1321\n\n\n"); }); }); diff --git a/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts b/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts index dc3ce5dff5ad..f24b83ae43a6 100644 --- a/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts +++ b/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts @@ -1,15 +1,15 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, expect, beforeEach, vi} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {getKeystoresStr} from "@lodestar/test-utils"; import {cachedSeckeysHex} from "../../utils/cachedKeys.js"; import {testFilesDir} from "../../utils.js"; import {decryptKeystoreDefinitions} from "../../../src/cmds/validator/keymanager/decryptKeystoreDefinitions.js"; import {LocalKeystoreDefinition} from "../../../src/cmds/validator/keymanager/interface.js"; -describe("decryptKeystoreDefinitions", function () { - this.timeout(100_000); +describe("decryptKeystoreDefinitions", () => { + vi.setConfig({testTimeout: 100_000}); const signal = new AbortController().signal; const dataDir = path.join(testFilesDir, "decrypt-keystores-test"); @@ -19,11 +19,9 @@ describe("decryptKeystoreDefinitions", function () { const keyCount = 2; const secretKeys = cachedSeckeysHex.slice(0, keyCount); - // Produce and encrypt keystores let definitions: LocalKeystoreDefinition[] = []; - beforeEach("Prepare dataDir", async () => { - // wipe out data dir and existing keystores + beforeEach(async () => { rimraf.sync(dataDir); rimraf.sync(importFromDir); @@ -31,7 +29,7 @@ describe("decryptKeystoreDefinitions", function () { const keystoresStr = await getKeystoresStr(password, secretKeys); definitions = []; - // write keystores to disk + for (let i = 0; i < keyCount; i++) { const keystorePath = path.join(importFromDir, `keystore_${i}.json`); fs.writeFileSync(keystorePath, keystoresStr[i]); @@ -39,13 +37,13 @@ describe("decryptKeystoreDefinitions", function () { } }); - context("with keystore cache", () => { + describe("with keystore cache", () => { const cacheFilePath = path.join(dataDir, "cache", "keystores.cache"); beforeEach(async () => { // create cache file to ensure keystores are loaded from cache during tests await decryptKeystoreDefinitions(definitions, {logger: console, cacheFilePath, signal}); - expect(fs.existsSync(cacheFilePath)).to.be.true; + expect(fs.existsSync(cacheFilePath)).toBe(true); // remove lockfiles created during cache file preparation rimraf.sync(path.join(importFromDir, "*.lock"), {glob: true}); @@ -54,17 +52,18 @@ describe("decryptKeystoreDefinitions", function () { testDecryptKeystoreDefinitions(cacheFilePath); }); - context("without keystore cache", () => { + describe("without keystore cache", () => { testDecryptKeystoreDefinitions(); }); function testDecryptKeystoreDefinitions(cacheFilePath?: string): void { it("decrypt keystores", async () => { const signers = await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath}); - expect(signers.length).to.equal(secretKeys.length); + expect(signers.length).toBe(secretKeys.length); for (const signer of signers) { const hexSecret = signer.secretKey.toHex(); - expect(secretKeys.includes(hexSecret), `secretKeys doesn't include ${hexSecret}`).to.be.true; + + expect(secretKeys.includes(hexSecret)).toBe(true); } }); @@ -76,14 +75,14 @@ describe("decryptKeystoreDefinitions", function () { await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath}); expect.fail("Second decrypt should fail due to failure to get lockfile"); } catch (e) { - expect((e as Error).message.startsWith("EEXIST: file already exists"), "Wrong error is thrown").to.be.true; + expect((e as Error).message.startsWith("EEXIST: file already exists")).toBe(true); } }); it("decrypt keystores if lockfiles already exist if ignoreLockFile=true", async () => { await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath}); - // lockfiles should exist after the first run + // lockfiles should exist after the first run await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath, ignoreLockFile: true}); }); } diff --git a/packages/cli/test/unit/validator/keys.test.ts b/packages/cli/test/unit/validator/keys.test.ts index 686915e27db4..c977c2242c33 100644 --- a/packages/cli/test/unit/validator/keys.test.ts +++ b/packages/cli/test/unit/validator/keys.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import path from "node:path"; -import {expect} from "chai"; +import {describe, it, expect, afterEach} from "vitest"; import {importKeystoreDefinitionsFromExternalDir} from "../../../src/cmds/validator/signers/importExternalKeystores.js"; describe("validator / signers / importKeystoreDefinitionsFromExternalDir", () => { @@ -27,10 +27,7 @@ describe("validator / signers / importKeystoreDefinitionsFromExternalDir", () => const password = "12345678"; const definitions = importKeystoreDefinitionsFromExternalDir({keystoresPath: [tmpDir], password}); - expect(definitions.map((def) => def.keystorePath).sort()).to.deep.equal( - toReadFilepaths.sort(), - "Wrong read keystore paths" - ); + expect(definitions.map((def) => def.keystorePath).sort()).toEqual(toReadFilepaths.sort()); }); function inTmp(filepath: string): string { diff --git a/packages/cli/test/unit/validator/options.test.ts b/packages/cli/test/unit/validator/options.test.ts index dbe961ed0f33..627ee8f59818 100644 --- a/packages/cli/test/unit/validator/options.test.ts +++ b/packages/cli/test/unit/validator/options.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseFeeRecipient} from "../../../src/util/index.js"; const feeRecipient = Buffer.from(Array.from({length: 20}, () => Math.round(Math.random() * 255))); @@ -8,7 +8,7 @@ describe("validator / parseFeeRecipient", () => { const testCases: string[] = [`0x${feeRecipientString}`, `0X${feeRecipientString}`]; for (const testCase of testCases) { it(`parse ${testCase}`, () => { - expect(`0x${feeRecipientString}`).to.be.deep.equal(parseFeeRecipient(testCase)); + expect(`0x${feeRecipientString}`).toEqual(parseFeeRecipient(testCase)); }); } }); @@ -22,7 +22,7 @@ describe("validator / invalid feeRecipient", () => { ]; for (const testCase of testCases) { it(`should error on ${testCase}`, () => { - expect(() => parseFeeRecipient(testCase)).to.throw(); + expect(() => parseFeeRecipient(testCase)).toThrow(); }); } }); diff --git a/packages/cli/test/unit/validator/parseProposerConfig.test.ts b/packages/cli/test/unit/validator/parseProposerConfig.test.ts index da459cf84c6e..fcb6933f035b 100644 --- a/packages/cli/test/unit/validator/parseProposerConfig.test.ts +++ b/packages/cli/test/unit/validator/parseProposerConfig.test.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ import path from "node:path"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {routes} from "@lodestar/api"; import {parseProposerConfig} from "../../../src/util/index.js"; @@ -42,12 +42,12 @@ const testValue = { describe("validator / valid Proposer", () => { it("parse Valid proposer", () => { - expect(parseProposerConfig(path.join(__dirname, "./proposerConfigs/validData.yaml"))).to.be.deep.equal(testValue); + expect(parseProposerConfig(path.join(__dirname, "./proposerConfigs/validData.yaml"))).toEqual(testValue); }); }); describe("validator / invalid Proposer", () => { it("should throw error", () => { - expect(() => parseProposerConfig(path.join(__dirname, "./proposerConfigs/invalidData.yaml"))).to.throw(); + expect(() => parseProposerConfig(path.join(__dirname, "./proposerConfigs/invalidData.yaml"))).toThrow(); }); }); diff --git a/packages/cli/test/utils/runUtils.ts b/packages/cli/test/utils/runUtils.ts index f6a9c311946b..8d2846d6e664 100644 --- a/packages/cli/test/utils/runUtils.ts +++ b/packages/cli/test/utils/runUtils.ts @@ -1,5 +1,5 @@ import fs from "node:fs"; -import {expect} from "chai"; +import {expect} from "vitest"; import {apiTokenFileName} from "../../src/cmds/validator/keymanager/server.js"; import {recursiveLookup} from "../../src/util/index.js"; @@ -17,12 +17,14 @@ export function findApiToken(dirpath: string): string { } export function expectDeepEquals(a: T, b: T, message: string): void { - expect(a).deep.equals(b, message); + expect(a).toEqualWithMessage(b, message); } /** * Similar to `expectDeepEquals` but only checks presence of all elements in array, irrespective of their order. */ export function expectDeepEqualsUnordered(a: T[], b: T[], message: string): void { - expect(a).to.have.deep.members(b, message); + expect(a).toEqualWithMessage(expect.arrayContaining(b), message); + expect(b).toEqualWithMessage(expect.arrayContaining(a), message); + expect(a).toHaveLength(b.length); } diff --git a/packages/cli/test/utils/simulation/validator_clients/lodestar.ts b/packages/cli/test/utils/simulation/validator_clients/lodestar.ts index a85347d780c5..f7a1e808a778 100644 --- a/packages/cli/test/utils/simulation/validator_clients/lodestar.ts +++ b/packages/cli/test/utils/simulation/validator_clients/lodestar.ts @@ -15,7 +15,7 @@ import {getNodePorts} from "../utils/ports.js"; export const generateLodestarValidatorNode: ValidatorNodeGenerator = (opts, runner) => { const {paths, id, keys, forkConfig, genesisTime, nodeIndex, beaconUrls, clientOptions} = opts; const {rootDir, keystoresDir, keystoresSecretFilePath, logFilePath} = paths; - const {useProduceBlockV3, "builder.selection": builderSelection} = clientOptions ?? {}; + const {useProduceBlockV3, "builder.selection": builderSelection, blindedLocal} = clientOptions ?? {}; const ports = getNodePorts(nodeIndex); const rcConfigPath = path.join(rootDir, "rc_config.json"); const paramsPath = path.join(rootDir, "params.json"); @@ -39,8 +39,9 @@ export const generateLodestarValidatorNode: ValidatorNodeGenerator {} +export async function teardown(): Promise {} diff --git a/packages/config/test/unit/index.test.ts b/packages/config/test/unit/index.test.ts index 35dabd5dda61..a6fca7ad643a 100644 --- a/packages/config/test/unit/index.test.ts +++ b/packages/config/test/unit/index.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {toHexString} from "@chainsafe/ssz"; import {ForkName} from "@lodestar/params"; import {config, chainConfig} from "../../src/default.js"; @@ -12,30 +12,27 @@ describe("forks", () => { const fork2 = forks[i + 1]; // Use less equal to be okay with both forks being at Infinity - expect(fork1.epoch).to.be.at.most( - fork2.epoch, - `Forks are not sorted ${fork1.name} ${fork1.epoch} -> ${fork2.name} ${fork2.epoch}` - ); + expect(fork1.epoch).toBeLessThanOrEqual(fork2.epoch); } }); it("Get phase0 fork for slot 0", () => { const fork = config.getForkName(0); - expect(fork).to.equal(ForkName.phase0); + expect(fork).toBe(ForkName.phase0); }); it("correct prev data", () => { for (let i = 1; i < config.forksAscendingEpochOrder.length; i++) { const fork = config.forksAscendingEpochOrder[i]; const prevFork = config.forksAscendingEpochOrder[i - 1]; - expect(toHexString(fork.prevVersion)).to.equal(toHexString(prevFork.version), `Wrong prevVersion ${fork.name}`); - expect(fork.prevForkName).to.equal(prevFork.name, `Wrong prevName ${fork.name}`); + expect(toHexString(fork.prevVersion)).toBe(toHexString(prevFork.version)); + expect(fork.prevForkName).toBe(prevFork.name); } }); it("correctly handle pre-genesis", () => { // eslint-disable-next-line @typescript-eslint/naming-convention const postMergeTestnet = createForkConfig({...chainConfig, ALTAIR_FORK_EPOCH: 0, BELLATRIX_FORK_EPOCH: 0}); - expect(postMergeTestnet.getForkName(-1)).to.equal(ForkName.bellatrix); + expect(postMergeTestnet.getForkName(-1)).toBe(ForkName.bellatrix); }); }); diff --git a/packages/config/test/unit/json.test.ts b/packages/config/test/unit/json.test.ts index e04b566cc091..96ffd050a088 100644 --- a/packages/config/test/unit/json.test.ts +++ b/packages/config/test/unit/json.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {chainConfigFromJson, chainConfigToJson} from "../../src/index.js"; import {chainConfig} from "../../src/default.js"; @@ -7,6 +7,6 @@ describe("chainConfig JSON", () => { const json = chainConfigToJson(chainConfig); const chainConfigRes = chainConfigFromJson(json); - expect(chainConfigRes).to.deep.equal(chainConfig); + expect(chainConfigRes).toEqual(chainConfig); }); }); diff --git a/packages/config/vitest.config.ts b/packages/config/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/config/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/db/package.json b/packages/db/package.json index 1f83c4e25f3c..1bccf368c55f 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/db", - "version": "1.13.0", + "version": "1.14.0", "description": "DB modules of Lodestar", "author": "ChainSafe Systems", "homepage": "https://github.com/ChainSafe/lodestar#readme", @@ -33,18 +33,18 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit", - "test:unit": "mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/config": "^1.14.0", + "@lodestar/utils": "^1.14.0", "@types/levelup": "^4.3.3", "it-all": "^3.0.2", "level": "^8.0.0" }, "devDependencies": { - "@lodestar/logger": "^1.13.0" + "@lodestar/logger": "^1.14.0" } } diff --git a/packages/db/src/controller/metrics.ts b/packages/db/src/controller/metrics.ts index b4b8a0bf0963..4827d6fb4515 100644 --- a/packages/db/src/controller/metrics.ts +++ b/packages/db/src/controller/metrics.ts @@ -1,26 +1,10 @@ +import {Counter, Gauge, Histogram} from "@lodestar/utils"; + export type LevelDbControllerMetrics = { - dbReadReq: Counter<"bucket">; - dbReadItems: Counter<"bucket">; - dbWriteReq: Counter<"bucket">; - dbWriteItems: Counter<"bucket">; + dbReadReq: Counter<{bucket: string}>; + dbReadItems: Counter<{bucket: string}>; + dbWriteReq: Counter<{bucket: string}>; + dbWriteItems: Counter<{bucket: string}>; dbSizeTotal: Gauge; dbApproximateSizeTime: Histogram; }; - -type Labels = Partial>; - -interface Counter { - inc(value?: number): void; - inc(labels: Labels, value?: number): void; - inc(arg1?: Labels | number, arg2?: number): void; -} - -interface Gauge { - set(value: number): void; - set(labels: Labels, value: number): void; - set(arg1?: Labels | number, arg2?: number): void; -} - -interface Histogram { - startTimer(): () => number; -} diff --git a/packages/db/test/globalSetup.ts b/packages/db/test/globalSetup.ts new file mode 100644 index 000000000000..02074bb24d11 --- /dev/null +++ b/packages/db/test/globalSetup.ts @@ -0,0 +1,4 @@ +export async function setup(): Promise { + process.env.NODE_ENV = "test"; +} +export async function teardown(): Promise {} diff --git a/packages/db/test/unit/controller/level.test.ts b/packages/db/test/unit/controller/level.test.ts index 768ef3a39006..33d5b9a86c2a 100644 --- a/packages/db/test/unit/controller/level.test.ts +++ b/packages/db/test/unit/controller/level.test.ts @@ -1,6 +1,6 @@ import {execSync} from "node:child_process"; import os from "node:os"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll} from "vitest"; import leveldown from "leveldown"; import all from "it-all"; import {getEnvLogger} from "@lodestar/logger/env"; @@ -10,11 +10,11 @@ describe("LevelDB controller", () => { const dbLocation = "./.__testdb"; let db: LevelDbController; - before(async () => { + beforeAll(async () => { db = await LevelDbController.create({name: dbLocation}, {metrics: null, logger: getEnvLogger()}); }); - after(async () => { + afterAll(async () => { await db.close(); await new Promise((resolve, reject) => { leveldown.destroy(dbLocation, (err) => { @@ -26,16 +26,16 @@ describe("LevelDB controller", () => { it("test get not found", async () => { const key = Buffer.from("not-existing-key"); - expect(await db.get(key)).to.equal(null); + expect(await db.get(key)).toBe(null); }); it("test put/get/delete", async () => { const key = Buffer.from("test"); const value = Buffer.from("some value"); await db.put(key, value); - expect(await db.get(key)).to.be.deep.equal(value); + expect(await db.get(key)).toEqual(value); await db.delete(key); - expect(await db.get(key)).to.equal(null); + expect(await db.get(key)).toBe(null); }); it("test batchPut", async () => { @@ -51,8 +51,8 @@ describe("LevelDB controller", () => { value: Buffer.from("value"), }, ]); - expect(await db.get(k1)).to.not.be.null; - expect(await db.get(k2)).to.not.be.null; + expect(await db.get(k1)).not.toBeNull(); + expect(await db.get(k2)).not.toBeNull(); }); it("test batch delete", async () => { @@ -69,9 +69,9 @@ describe("LevelDB controller", () => { value: Buffer.from("value"), }, ]); - expect((await db.entries()).length).to.equal(2); + expect((await db.entries()).length).toBe(2); await db.batchDelete([k1, k2]); - expect((await db.entries()).length).to.equal(0); + expect((await db.entries()).length).toBe(0); }); it("test entries", async () => { @@ -91,7 +91,7 @@ describe("LevelDB controller", () => { gte: k1, lte: k2, }); - expect(result.length).to.be.equal(2); + expect(result.length).toBe(2); }); it("test entriesStream", async () => { @@ -112,7 +112,7 @@ describe("LevelDB controller", () => { lte: k2, }); const result = await all(resultStream); - expect(result.length).to.be.equal(2); + expect(result.length).toBe(2); }); it("test compactRange + approximateSize", async () => { @@ -129,11 +129,11 @@ describe("LevelDB controller", () => { await db.compactRange(minKey, maxKey); const sizeAfterCompact = getDbSize(); - expect(sizeAfterCompact).lt(sizeBeforeCompact, "Expected sizeAfterCompact < sizeBeforeCompact"); + expect(sizeAfterCompact).toBeLessThan(sizeBeforeCompact); // approximateSize is not exact, just test a number is positive const approxSize = await db.approximateSize(minKey, maxKey); - expect(approxSize).gt(0, "approximateSize return not > 0"); + expect(approxSize).toBeGreaterThan(0); }); function getDuCommand(): string { diff --git a/packages/db/test/unit/schema.test.ts b/packages/db/test/unit/schema.test.ts index 46cb3af23de6..c72611453e4f 100644 --- a/packages/db/test/unit/schema.test.ts +++ b/packages/db/test/unit/schema.test.ts @@ -1,4 +1,4 @@ -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {intToBytes} from "@lodestar/utils"; import {BUCKET_LENGTH, encodeKey} from "../../src/index.js"; @@ -25,7 +25,7 @@ describe("encodeKey", () => { expected = Buffer.concat([intToBytes(bucket, BUCKET_LENGTH, "le"), intToBytes(BigInt(key), 8, "be")]); } const actual = encodeKey(bucket, key); - assert.deepEqual(actual, expected); + expect(actual).toEqual(expected); }); } }); diff --git a/packages/db/vitest.config.ts b/packages/db/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/db/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/flare/.mocharc.yaml b/packages/flare/.mocharc.yaml deleted file mode 100644 index 55ad4d0122e1..000000000000 --- a/packages/flare/.mocharc.yaml +++ /dev/null @@ -1,5 +0,0 @@ -exit: true -extension: ["ts"] -colors: true -node-option: - - "loader=ts-node/esm" diff --git a/packages/flare/package.json b/packages/flare/package.json index 159ff7d3c69f..d3849d1b5362 100644 --- a/packages/flare/package.json +++ b/packages/flare/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/flare", - "version": "1.13.0", + "version": "1.14.0", "description": "Beacon chain debugging tool", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -44,7 +44,7 @@ "lint": "eslint --color --ext .ts src/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "repository": { @@ -60,12 +60,12 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/bls-keygen": "^0.3.0", - "@lodestar/api": "^1.13.0", - "@lodestar/config": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/state-transition": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/api": "^1.14.0", + "@lodestar/config": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/state-transition": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", "source-map-support": "^0.5.21", "yargs": "^17.7.1" }, diff --git a/packages/flare/test/globalSetup.ts b/packages/flare/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/flare/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/flare/test/unit/utils/format.test.ts b/packages/flare/test/unit/utils/format.test.ts index 0eb898a9fd0f..c6ef8805fe7b 100644 --- a/packages/flare/test/unit/utils/format.test.ts +++ b/packages/flare/test/unit/utils/format.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseRange} from "../../../src/util/format.js"; describe("utils / format", () => { @@ -10,7 +10,7 @@ describe("utils / format", () => { for (const {range, indexes} of testCases) { it(range, () => { - expect(parseRange(range)).to.deep.equal(indexes); + expect(parseRange(range)).toEqual(indexes); }); } }); diff --git a/packages/flare/vitest.config.ts b/packages/flare/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/flare/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index dbbbcb6d3dbd..b19fa976bdf4 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": "./lib/index.js", "types": "./lib/index.d.ts", @@ -34,16 +34,16 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit", - "test:unit": "mocha --colors -r ts-node/register 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/state-transition": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0" + "@lodestar/config": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/state-transition": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0" }, "keywords": [ "ethereum", diff --git a/packages/fork-choice/test/globalSetup.ts b/packages/fork-choice/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/fork-choice/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts index fe4f9a7afaad..fe11532dbb6f 100644 --- a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, beforeEach, beforeAll} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import {config} from "@lodestar/config/default"; import {RootHex, Slot} from "@lodestar/types"; @@ -119,12 +119,12 @@ describe("Forkchoice", function () { const forkchoice = new ForkChoice(config, fcStore, protoArr); const summaries = forkchoice.getAllAncestorBlocks(getBlockRoot(genesisSlot + 1)); // there are 2 blocks in protoArray but iterateAncestorBlocks should only return non-finalized blocks - expect(summaries.length).to.be.equals(1, "should not return the finalized block"); - expect(summaries[0]).to.be.deep.include(block, "the block summary is not correct"); + expect(summaries).toHaveLength(1); + expect(summaries[0]).toEqual({...block, bestChild: undefined, bestDescendant: undefined, parent: 0, weight: 0}); }); - before("Assert SLOTS_PER_EPOCH", () => { - expect(SLOTS_PER_EPOCH).equals(32, "Unexpected SLOTS_PER_EPOCH value"); + beforeAll(() => { + expect(SLOTS_PER_EPOCH).toBe(32); }); const dependentRootTestCases: {atSlot: Slot; pivotSlot: Slot; epoch: EpochDifference; skipped: Slot[]}[] = [ @@ -162,10 +162,7 @@ describe("Forkchoice", function () { const expectedDependentRoot = getBlockRoot(pivotSlot); - expect(forkchoice.getDependentRoot(block, epoch)).to.be.equal( - expectedDependentRoot, - "incorrect attester dependent root" - ); + expect(forkchoice.getDependentRoot(block, epoch)).toBe(expectedDependentRoot); }); } diff --git a/packages/fork-choice/test/unit/forkChoice/utils.test.ts b/packages/fork-choice/test/unit/forkChoice/utils.test.ts index 3cf497ac38a1..3f315d079842 100644 --- a/packages/fork-choice/test/unit/forkChoice/utils.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/utils.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createChainForkConfig} from "@lodestar/config"; import {ssz} from "@lodestar/types"; import {assertValidTerminalPowBlock, ExecutionStatus} from "../../../src/index.js"; @@ -17,7 +17,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: null, powBlock}) - ).to.not.throw(); + ).not.toThrow(); }); it("should require powBlockParent if powBlock not genesis", function () { @@ -29,7 +29,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: null, powBlock}) - ).to.throw(); + ).toThrow(); }); it("should require powBlock >= ttd", function () { @@ -41,7 +41,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: powBlock, powBlock}) - ).to.throw(); + ).toThrow(); }); it("should require powBlockParent < ttd", function () { @@ -53,7 +53,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: powBlock, powBlock}) - ).to.throw(); + ).toThrow(); }); it("should accept powBlockParent < ttd and powBlock >= ttd", function () { @@ -67,8 +67,6 @@ describe("assertValidTerminalPowBlock", function () { ...powBlock, totalDifficulty: BigInt(9), }; - expect(() => - assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent, powBlock}) - ).to.not.throw(); + expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent, powBlock})).not.toThrow(); }); }); diff --git a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts index 3981ef84ff4c..54b8a900d05c 100644 --- a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts +++ b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {getEffectiveBalanceIncrementsZeroed} from "@lodestar/state-transition"; import {computeDeltas} from "../../../src/protoArray/computeDeltas.js"; @@ -25,11 +25,11 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); - expect(deltas).to.deep.equal(Array.from({length: validatorCount}, () => 0)); + expect(deltas.length).toEqual(validatorCount); + expect(deltas).toEqual(Array.from({length: validatorCount}, () => 0)); for (const vote of votes) { - expect(vote.currentIndex).to.eql(vote.nextIndex); + expect(vote.currentIndex).toEqual(vote.nextIndex); } }); @@ -55,13 +55,13 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); for (const [i, delta] of deltas.entries()) { if (i === 0) { - expect(delta.toString()).to.equal((balance * validatorCount).toString()); + expect(delta.toString()).toBe((balance * validatorCount).toString()); } else { - expect(delta.toString()).to.equal("0"); + expect(delta.toString()).toBe("0"); } } }); @@ -88,10 +88,10 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); for (const delta of deltas) { - expect(delta.toString()).to.equal(balance.toString()); + expect(delta.toString()).toBe(balance.toString()); } }); @@ -117,17 +117,17 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); const totalDelta = balance * validatorCount; for (const [i, delta] of deltas.entries()) { if (i === 0) { - expect(delta.toString()).to.equal((0 - totalDelta).toString()); + expect(delta.toString()).toBe((0 - totalDelta).toString()); } else if (i === 1) { - expect(delta.toString()).to.equal(totalDelta.toString()); + expect(delta.toString()).toBe(totalDelta.toString()); } else { - expect(delta.toString()).to.equal("0"); + expect(delta.toString()).toBe("0"); } } }); @@ -201,15 +201,15 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); for (const [i, delta] of deltas.entries()) { if (i === 0) { - expect(delta.toString()).to.equal((0 - oldBalance * validatorCount).toString()); + expect(delta.toString()).toBe((0 - oldBalance * validatorCount).toString()); } else if (i === 1) { - expect(delta.toString()).to.equal((newBalance * validatorCount).toString()); + expect(delta.toString()).toBe((newBalance * validatorCount).toString()); } else { - expect(delta.toString()).to.equal("0"); + expect(delta.toString()).toBe("0"); } } }); @@ -239,13 +239,13 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(2); + expect(deltas.length).toEqual(2); - expect(deltas[0].toString()).to.eql((0 - balance).toString()); - expect(deltas[1].toString()).to.eql((balance * 2).toString()); + expect(deltas[0].toString()).toEqual((0 - balance).toString()); + expect(deltas[1].toString()).toEqual((balance * 2).toString()); for (const vote of votes) { - expect(vote.currentIndex).to.equal(vote.nextIndex); + expect(vote.currentIndex).toBe(vote.nextIndex); } }); @@ -273,13 +273,13 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(2); + expect(deltas.length).toEqual(2); - expect(deltas[0].toString()).to.eql((0 - balance * 2).toString()); - expect(deltas[1].toString()).to.eql(balance.toString()); + expect(deltas[0].toString()).toEqual((0 - balance * 2).toString()); + expect(deltas[1].toString()).toEqual(balance.toString()); for (const vote of votes) { - expect(vote.currentIndex).to.equal(vote.nextIndex); + expect(vote.currentIndex).toBe(vote.nextIndex); } }); @@ -303,12 +303,12 @@ describe("computeDeltas", () => { // 1st validator is part of an attester slashing const equivocatingIndices = new Set([0]); let deltas = computeDeltas(indices.size, votes, balances, balances, equivocatingIndices); - expect(deltas[0]).to.be.equals( + expect(deltas[0]).toBeWithMessage( -1 * (firstBalance + secondBalance), "should disregard the 1st validator due to attester slashing" ); - expect(deltas[1]).to.be.equals(secondBalance, "should move 2nd balance from 1st root to 2nd root"); + expect(deltas[1]).toBeWithMessage(secondBalance, "should move 2nd balance from 1st root to 2nd root"); deltas = computeDeltas(indices.size, votes, balances, balances, equivocatingIndices); - expect(deltas).to.be.deep.equals([0, 0], "calling computeDeltas again should not have any affect on the weight"); + expect(deltas).toEqualWithMessage([0, 0], "calling computeDeltas again should not have any affect on the weight"); }); }); diff --git a/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts b/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts index e1dda450aa46..94e5cd3ac9a0 100644 --- a/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts +++ b/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import { ProtoBlock, ProtoArray, @@ -135,7 +135,7 @@ describe("executionStatus / normal updates", () => { */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -156,7 +156,7 @@ describe("executionStatus / normal updates", () => { const invalidate3CValidate2CForkChoice = collectProtoarrayValidationStatus(fc); it("correcly invalidate 3C and validate 2C only", () => { - expect(invalidate3CValidate2CForkChoice).to.be.deep.equal( + expect(invalidate3CValidate2CForkChoice).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Syncing], @@ -186,7 +186,7 @@ describe("executionStatus / normal updates", () => { ); const validate3B2B1A = collectProtoarrayValidationStatus(fc); it("Validate 3B, 2B, 1A", () => { - expect(validate3B2B1A).to.be.deep.equal( + expect(validate3B2B1A).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Valid], @@ -218,7 +218,7 @@ describe("executionStatus / normal updates", () => { ); const invalidate3A2A = collectProtoarrayValidationStatus(fc); it("Invalidate 3A, 2A with 2A loosing its bestChild, bestDescendant", () => { - expect(invalidate3A2A).to.be.deep.equal( + expect(invalidate3A2A).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Valid], @@ -245,7 +245,7 @@ describe("executionStatus / invalidate all postmerge chain", () => { */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -265,7 +265,7 @@ describe("executionStatus / invalidate all postmerge chain", () => { ); const postMergeInvalidated = collectProtoarrayValidationStatus(fc); it("all post merge blocks should be invalidated except Cs", () => { - expect(postMergeInvalidated).to.be.deep.equal( + expect(postMergeInvalidated).toEqual( toFcTestCase([ ["0", undefined, undefined, ExecutionStatus.PreMerge], ["1A", undefined, undefined, ExecutionStatus.Invalid], @@ -281,7 +281,7 @@ describe("executionStatus / invalidate all postmerge chain", () => { const fcHead = fc.findHead("0", 3); it("pre merge block should be the FC head", () => { - expect(fcHead).to.be.equal("0"); + expect(fcHead).toBe("0"); }); }); @@ -297,7 +297,7 @@ describe("executionStatus / poision forkchoice if we invalidate previous valid", */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -316,7 +316,7 @@ describe("executionStatus / poision forkchoice if we invalidate previous valid", ); const validate3B2B1A = collectProtoarrayValidationStatus(fc); it("Validate 3B, 2B, 1A", () => { - expect(validate3B2B1A).to.be.deep.equal( + expect(validate3B2B1A).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Valid], @@ -340,10 +340,10 @@ describe("executionStatus / poision forkchoice if we invalidate previous valid", }, 3 ) - ).to.throw(Error); + ).toThrow(Error); - expect(fc.lvhError).to.be.deep.equal({lvhCode: LVHExecErrorCode.ValidToInvalid, blockRoot: "1A", execHash: "1A"}); - expect(() => fc.findHead("0", 3)).to.throw(Error); + expect(fc.lvhError).toEqual({lvhCode: LVHExecErrorCode.ValidToInvalid, blockRoot: "1A", execHash: "1A"}); + expect(() => fc.findHead("0", 3)).toThrow(Error); }); }); @@ -359,7 +359,7 @@ describe("executionStatus / poision forkchoice if we validate previous invalid", */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -379,7 +379,7 @@ describe("executionStatus / poision forkchoice if we validate previous invalid", ); const validate3B2B1A = collectProtoarrayValidationStatus(fc); it("Inalidate 3B, 2B, 1A", () => { - expect(validate3B2B1A).to.be.deep.equal( + expect(validate3B2B1A).toEqual( toFcTestCase([ ["0", undefined, undefined, ExecutionStatus.PreMerge], ["1A", undefined, undefined, ExecutionStatus.Invalid], @@ -402,10 +402,10 @@ describe("executionStatus / poision forkchoice if we validate previous invalid", }, 3 ) - ).to.throw(Error); + ).toThrow(Error); - expect(fc.lvhError).to.be.deep.equal({lvhCode: LVHExecErrorCode.InvalidToValid, blockRoot: "2A", execHash: "2A"}); - expect(() => fc.findHead("0", 3)).to.throw(Error); + expect(fc.lvhError).toEqual({lvhCode: LVHExecErrorCode.InvalidToValid, blockRoot: "2A", execHash: "2A"}); + expect(() => fc.findHead("0", 3)).toThrow(Error); }); }); diff --git a/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts b/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts index 3d47d906f74a..766c02a15a23 100644 --- a/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts +++ b/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {ProtoArray, ExecutionStatus} from "../../../src/index.js"; describe("getCommonAncestor", () => { @@ -73,7 +73,7 @@ describe("getCommonAncestor", () => { it(`${nodeA} & ${nodeB} -> ${ancestor}`, () => { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const ancestorNode = fc.getCommonAncestor(fc.getNode(nodeA)!, fc.getNode(nodeB)!); - expect(ancestorNode && ancestorNode.blockRoot).to.equal(ancestor); + expect(ancestorNode && ancestorNode.blockRoot).toBe(ancestor); }); } @@ -104,5 +104,5 @@ describe("getCommonAncestor", () => { // multiple calls to applyScoreChanges don't keep on adding boosts to weight over // and over again, and applyScoreChanges can be safely called after onAttestations - expect(weightsAfterCall1).to.deep.equal(weightsAfterCall2); + expect(weightsAfterCall1).toEqual(weightsAfterCall2); }); diff --git a/packages/fork-choice/test/unit/protoArray/protoArray.test.ts b/packages/fork-choice/test/unit/protoArray/protoArray.test.ts index 88d6453e6204..c3bf8a0f439a 100644 --- a/packages/fork-choice/test/unit/protoArray/protoArray.test.ts +++ b/packages/fork-choice/test/unit/protoArray/protoArray.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {RootHex} from "@lodestar/types"; import {ProtoArray, ExecutionStatus} from "../../../src/index.js"; @@ -107,7 +107,7 @@ describe("ProtoArray", () => { ]; for (const [ancestorRoot, descendantRoot, isDescendant] of assertions) { - expect(fc.isDescendant(ancestorRoot, descendantRoot)).to.equal( + expect(fc.isDescendant(ancestorRoot, descendantRoot)).toBeWithMessage( isDescendant, `${descendantRoot} must be ${isDescendant ? "descendant" : "not descendant"} of ${ancestorRoot}` ); diff --git a/packages/fork-choice/vitest.config.ts b/packages/fork-choice/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/fork-choice/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/light-client/.mocharc.yaml b/packages/light-client/.mocharc.yaml deleted file mode 100644 index f28ebdf663a0..000000000000 --- a/packages/light-client/.mocharc.yaml +++ /dev/null @@ -1,6 +0,0 @@ -colors: true -timeout: 5000 -exit: true -extension: ["ts"] -node-option: - - "loader=ts-node/esm" diff --git a/packages/light-client/.nycrc.json b/packages/light-client/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/light-client/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/light-client/README.md b/packages/light-client/README.md index 7afd871b3f2e..cac2f71c6880 100644 --- a/packages/light-client/README.md +++ b/packages/light-client/README.md @@ -12,7 +12,7 @@ The evolution of light clients is emblematic of the broader trajectory of Ethere [![Eth Consensus Spec v1.1.10](https://img.shields.io/badge/ETH%20consensus--spec-1.1.10-blue)](https://github.com/ethereum/consensus-specs/releases/tag/v1.1.10) ![ES Version](https://img.shields.io/badge/ES-2021-yellow) ![Node Version](https://img.shields.io/badge/node-16.x-green) -[Yarn](https://yarnpkg.com/) +![Yarn](https://img.shields.io/badge/yarn-%232C8EBB.svg?style=for-the-badge&logo=yarn&logoColor=white) > This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project @@ -26,14 +26,14 @@ Access to an beacon node that supports the light client specification is necessa - `/eth/v1/beacon/light_client/bootstrap/{block_root}` - `/eth/v0/beacon/light_client/committee_root` -System requirements are quite low so its possible to run a light client in the browser as part of a website. There are a few examples of this on github that you can use as reference, our [prover](https://chainsafe.github.io/lodestar/lightclient-prover/prover.md) being one of them. +System requirements are quite low so its possible to run a light client in the browser as part of a website. There are a few examples of this on github that you can use as reference, our [prover](https://chainsafe.github.io/lodestar/lightclient-prover/prover) being one of them. You can find more information about the light-client protocol in the [specification](https://github.com/ethereum/consensus-specs). ## Getting started -- Follow the [installation guide](https://chainsafe.github.io/lodestar/) to install Lodestar. -- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/usage/local). +- Follow the [installation guide](https://chainsafe.github.io/lodestar/getting-started/installation) or [Docker install](https://chainsafe.github.io/lodestar/getting-started/installation/#docker-installation) to install Lodestar. +- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/advanced-topics/setting-up-a-testnet). ## Light-Client CLI Example @@ -126,7 +126,7 @@ lightclient.emitter.on(LightclientEvent.lightClientOptimisticHeader, async (opti ## Contributors -Read our [contributors document](/CONTRIBUTING.md), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! +Read our [contribution documentation](https://chainsafe.github.io/lodestar/contribution/getting-started), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! ## License diff --git a/packages/light-client/karma.config.cjs b/packages/light-client/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/light-client/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/light-client/package.json b/packages/light-client/package.json index 468e82e0ed0f..3270dc14f088 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -59,20 +59,23 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit && yarn test:e2e", - "test:browsers": "yarn karma start karma.config.cjs", - "test:unit": "LODESTAR_PRESET=minimal nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/api": "^1.13.0", - "@lodestar/config": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/state-transition": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/api": "^1.14.0", + "@lodestar/config": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/state-transition": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", "mitt": "^3.0.0", "strict-event-emitter-types": "^2.0.0" }, diff --git a/packages/light-client/src/spec/validateLightClientUpdate.ts b/packages/light-client/src/spec/validateLightClientUpdate.ts index 256be6a99c2c..2629986e85f2 100644 --- a/packages/light-client/src/spec/validateLightClientUpdate.ts +++ b/packages/light-client/src/spec/validateLightClientUpdate.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import type {PublicKey, Signature} from "@chainsafe/bls/types"; import {Root, ssz, allForks} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; diff --git a/packages/light-client/src/utils/utils.ts b/packages/light-client/src/utils/utils.ts index c6be99bac8ac..9960921eee90 100644 --- a/packages/light-client/src/utils/utils.ts +++ b/packages/light-client/src/utils/utils.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import type {PublicKey} from "@chainsafe/bls/types"; import {BitArray} from "@chainsafe/ssz"; import {altair, Root, ssz} from "@lodestar/types"; diff --git a/packages/light-client/src/validation.ts b/packages/light-client/src/validation.ts index a0d6f83d8d02..85c5c35a2cea 100644 --- a/packages/light-client/src/validation.ts +++ b/packages/light-client/src/validation.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import type {PublicKey, Signature} from "@chainsafe/bls/types"; import {altair, Root, Slot, ssz, allForks} from "@lodestar/types"; import { diff --git a/packages/light-client/test/globalSetup.ts b/packages/light-client/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/light-client/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/light-client/test/unit/isValidLightClientHeader.test.ts b/packages/light-client/test/unit/isValidLightClientHeader.test.ts index a28ac65ff618..40efa1293231 100644 --- a/packages/light-client/test/unit/isValidLightClientHeader.test.ts +++ b/packages/light-client/test/unit/isValidLightClientHeader.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import {ssz, allForks} from "@lodestar/types"; import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; @@ -91,7 +91,7 @@ describe("isValidLightClientHeader", function () { testCases.forEach(([name, header]: [string, allForks.LightClientHeader]) => { it(name, function () { const isValid = isValidLightClientHeader(config, header); - expect(isValid).to.be.true; + expect(isValid).toBe(true); }); }); }); diff --git a/packages/light-client/test/unit/sync.node.test.ts b/packages/light-client/test/unit/sync.node.test.ts index 27c924e37462..168bfeceb5f9 100644 --- a/packages/light-client/test/unit/sync.node.test.ts +++ b/packages/light-client/test/unit/sync.node.test.ts @@ -1,5 +1,4 @@ -import {expect} from "chai"; -import {init} from "@chainsafe/bls/switchable"; +import {describe, it, expect, afterEach, vi} from "vitest"; import {JsonPath, toHexString} from "@chainsafe/ssz"; import {computeDescriptor, TreeOffsetProof} from "@chainsafe/persistent-merkle-tree"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH} from "@lodestar/params"; @@ -21,21 +20,15 @@ import { lastInMap, } from "../utils/utils.js"; import {startServer, ServerOpts} from "../utils/server.js"; -import {isNode} from "../../src/utils/utils.js"; import {computeSyncPeriodAtSlot} from "../../src/utils/clock.js"; import {LightClientRestTransport} from "../../src/transport/rest.js"; const SOME_HASH = Buffer.alloc(32, 0xff); describe("sync", () => { + vi.setConfig({testTimeout: 30_000}); const afterEachCbs: (() => Promise | unknown)[] = []; - before("init bls", async () => { - // This process has to be done manually because of an issue in Karma runner - // https://github.com/karma-runner/karma/issues/3804 - await init(isNode ? "blst-native" : "herumi"); - }); - afterEach(async () => { await Promise.all(afterEachCbs); afterEachCbs.length = 0; @@ -168,16 +161,13 @@ describe("sync", () => { }); // Ensure that the lightclient head is correct - expect(lightclient.getHead().beacon.slot).to.equal(targetSlot, "lightclient.head is not the targetSlot head"); + expect(lightclient.getHead().beacon.slot).toBe(targetSlot); // Fetch proof of "latestExecutionPayloadHeader.stateRoot" const {proof, header} = await getHeadStateProof(lightclient, api, [["latestExecutionPayloadHeader", "stateRoot"]]); const recoveredState = ssz.bellatrix.BeaconState.createFromProof(proof, header.beacon.stateRoot); - expect(toHexString(recoveredState.latestExecutionPayloadHeader.stateRoot)).to.equal( - toHexString(executionStateRoot), - "Recovered executionStateRoot from getHeadStateProof() not correct" - ); + expect(toHexString(recoveredState.latestExecutionPayloadHeader.stateRoot)).toBe(toHexString(executionStateRoot)); }); }); diff --git a/packages/light-client/test/unit/syncInMemory.test.ts b/packages/light-client/test/unit/syncInMemory.test.ts index df9f5dcd57da..770827e86655 100644 --- a/packages/light-client/test/unit/syncInMemory.test.ts +++ b/packages/light-client/test/unit/syncInMemory.test.ts @@ -1,5 +1,5 @@ -import {expect} from "chai"; -import bls, {init} from "@chainsafe/bls/switchable"; +import {describe, it, expect, beforeAll, vi} from "vitest"; +import bls from "@chainsafe/bls"; import {createBeaconConfig} from "@lodestar/config"; import {chainConfig} from "@lodestar/config/default"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH} from "@lodestar/params"; @@ -9,7 +9,6 @@ import {BeaconChainLcMock} from "../mocks/BeaconChainLcMock.js"; import {processLightClientUpdate} from "../utils/naive/update.js"; import {IBeaconChainLc, prepareUpdateNaive} from "../utils/prepareUpdateNaive.js"; import {getInteropSyncCommittee, getSyncAggregateSigningRoot, SyncCommitteeKeys} from "../utils/utils.js"; -import {isNode} from "../../src/utils/utils.js"; function getSyncCommittee( syncCommitteesKeys: Map, @@ -25,7 +24,7 @@ function getSyncCommittee( describe("syncInMemory", function () { // In browser test this process is taking more time than default 2000ms - this.timeout(10000); + vi.setConfig({testTimeout: 10000}); // Fixed params const genValiRoot = Buffer.alloc(32, 9); @@ -35,20 +34,14 @@ describe("syncInMemory", function () { let updateData: {chain: IBeaconChainLc; blockWithSyncAggregate: altair.BeaconBlock}; let update: altair.LightClientUpdate; - before("init bls", async () => { - // This process has to be done manually because of an issue in Karma runner - // https://github.com/karma-runner/karma/issues/3804 - await init(isNode ? "blst-native" : "herumi"); - }); - - before("BLS sanity check", () => { + beforeAll(() => { const sk = bls.SecretKey.fromBytes(Buffer.alloc(32, 1)); - expect(sk.toPublicKey().toHex()).to.equal( + expect(sk.toPublicKey().toHex()).toBe( "0xaa1a1c26055a329817a5759d877a2795f9499b97d6056edde0eea39512f24e8bc874b4471f0501127abb1ea0d9f68ac1" ); }); - before("Generate data for prepareUpdate", () => { + beforeAll(() => { // Create a state that has as nextSyncCommittee the committee 2 const finalizedBlockSlot = SLOTS_PER_EPOCH * EPOCHS_PER_SYNC_COMMITTEE_PERIOD + 1; const headerBlockSlot = finalizedBlockSlot + 1; @@ -107,6 +100,6 @@ describe("syncInMemory", function () { }, }; - expect(() => processLightClientUpdate(config, store, update, currentSlot)).to.not.throw(); + expect(() => processLightClientUpdate(config, store, update, currentSlot)).not.toThrow(); }); }); diff --git a/packages/light-client/test/unit/utils.test.ts b/packages/light-client/test/unit/utils.test.ts index 90a97e05db28..91bfab113431 100644 --- a/packages/light-client/test/unit/utils.test.ts +++ b/packages/light-client/test/unit/utils.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {isValidMerkleBranch} from "../../src/utils/verifyMerkleBranch.js"; import {computeMerkleBranch} from "../utils/utils.js"; @@ -9,6 +9,6 @@ describe("utils", () => { const index = 22; const {root, proof} = computeMerkleBranch(leaf, depth, index); - expect(isValidMerkleBranch(leaf, proof, depth, index, root)).to.equal(true); + expect(isValidMerkleBranch(leaf, proof, depth, index, root)).toBe(true); }); }); diff --git a/packages/light-client/test/unit/utils/chunkify.test.ts b/packages/light-client/test/unit/utils/chunkify.test.ts index 78fc567513da..297637fd70b0 100644 --- a/packages/light-client/test/unit/utils/chunkify.test.ts +++ b/packages/light-client/test/unit/utils/chunkify.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {chunkifyInclusiveRange} from "../../../src/utils/chunkify.js"; describe("utils / chunkifyInclusiveRange", () => { @@ -20,7 +20,7 @@ describe("utils / chunkifyInclusiveRange", () => { for (const {id, from, to, max, result} of testCases) { it(id, () => { - expect(chunkifyInclusiveRange(from, to, max)).to.deep.equal(result); + expect(chunkifyInclusiveRange(from, to, max)).toEqual(result); }); } }); diff --git a/packages/light-client/test/unit/validation.test.ts b/packages/light-client/test/unit/validation.test.ts index 9bda4c86a91f..61442fb4bf8c 100644 --- a/packages/light-client/test/unit/validation.test.ts +++ b/packages/light-client/test/unit/validation.test.ts @@ -1,5 +1,5 @@ -import {expect} from "chai"; -import bls, {init} from "@chainsafe/bls/switchable"; +import {describe, it, expect, beforeAll, vi} from "vitest"; +import bls from "@chainsafe/bls"; import {Tree} from "@chainsafe/persistent-merkle-tree"; import {altair, ssz} from "@lodestar/types"; import {chainConfig} from "@lodestar/config/default"; @@ -14,12 +14,11 @@ import { import {assertValidLightClientUpdate} from "../../src/validation.js"; import {LightClientSnapshotFast, SyncCommitteeFast} from "../../src/types.js"; import {defaultBeaconBlockHeader, getSyncAggregateSigningRoot, signAndAggregate} from "../utils/utils.js"; -import {isNode} from "../../src/utils/utils.js"; describe("validation", function () { // In browser test this process is taking more time than default 2000ms // specially on the CI - this.timeout(15000); + vi.setConfig({testTimeout: 15000}); const genValiRoot = Buffer.alloc(32, 9); const config = createBeaconConfig(chainConfig, genValiRoot); @@ -27,13 +26,7 @@ describe("validation", function () { let update: altair.LightClientUpdate; let snapshot: LightClientSnapshotFast; - before("prepare bls", async () => { - // This process has to be done manually because of an issue in Karma runner - // https://github.com/karma-runner/karma/issues/3804 - await init(isNode ? "blst-native" : "herumi"); - }); - - before("prepare data", function () { + beforeAll(function () { // Update slot must > snapshot slot // attestedHeaderSlot must == updateHeaderSlot + 1 const snapshotHeaderSlot = 1; @@ -106,6 +99,6 @@ describe("validation", function () { }); it("should validate valid update", () => { - expect(() => assertValidLightClientUpdate(config, snapshot.nextSyncCommittee, update)).to.not.throw(); + expect(() => assertValidLightClientUpdate(config, snapshot.nextSyncCommittee, update)).not.toThrow(); }); }); diff --git a/packages/light-client/test/utils/utils.ts b/packages/light-client/test/utils/utils.ts index df9bd4170dcc..455d6ef9997b 100644 --- a/packages/light-client/test/utils/utils.ts +++ b/packages/light-client/test/utils/utils.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import {PointFormat, PublicKey, SecretKey} from "@chainsafe/bls/types"; import {hasher, Tree} from "@chainsafe/persistent-merkle-tree"; import {BitArray, fromHexString} from "@chainsafe/ssz"; diff --git a/packages/light-client/tsconfig.e2e.json b/packages/light-client/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/light-client/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/light-client/vitest.browser.config.ts b/packages/light-client/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/light-client/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/light-client/vitest.config.ts b/packages/light-client/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/light-client/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/light-client/webpack.test.config.cjs b/packages/light-client/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/light-client/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/logger/.mocharc.yml b/packages/logger/.mocharc.yml deleted file mode 100644 index a70609535d4f..000000000000 --- a/packages/logger/.mocharc.yml +++ /dev/null @@ -1,5 +0,0 @@ -colors: true -node-option: - - "loader=ts-node/esm" -require: - - ./test/setup.ts \ No newline at end of file diff --git a/packages/logger/karma.config.cjs b/packages/logger/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/logger/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/logger/package.json b/packages/logger/package.json index 8a2f40a176a7..e8cc7d5f9622 100644 --- a/packages/logger/package.json +++ b/packages/logger/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -56,20 +56,24 @@ "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "mocha 'test/unit/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", - "test:e2e": "LODESTAR_PRESET=minimal mocha 'test/e2e/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --dir test/e2e", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", "dependencies": { - "@lodestar/utils": "^1.13.0", + "@lodestar/utils": "^1.14.0", "winston": "^3.8.2", "winston-daily-rotate-file": "^4.7.1", "winston-transport": "^4.5.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.13.0", + "@chainsafe/threads": "^1.11.1", + "@lodestar/test-utils": "^1.14.0", "@types/triple-beam": "^1.3.2", "rimraf": "^4.4.1", "triple-beam": "^1.3.0" diff --git a/packages/logger/test/e2e/logger/workerLogger.ts b/packages/logger/test/e2e/logger/workerLogger.js similarity index 87% rename from packages/logger/test/e2e/logger/workerLogger.ts rename to packages/logger/test/e2e/logger/workerLogger.js index 0a4f1dd9207b..9608336c433f 100644 --- a/packages/logger/test/e2e/logger/workerLogger.ts +++ b/packages/logger/test/e2e/logger/workerLogger.js @@ -3,7 +3,7 @@ import worker from "node:worker_threads"; import {expose} from "@chainsafe/threads/worker"; const parentPort = worker.parentPort; -const workerData = worker.workerData as {logFilepath: string}; +const workerData = worker.workerData; if (!parentPort) throw Error("parentPort must be defined"); const file = fs.createWriteStream(workerData.logFilepath, {flags: "a"}); diff --git a/packages/logger/test/e2e/logger/workerLoggerHandler.ts b/packages/logger/test/e2e/logger/workerLoggerHandler.ts index 3ff095fc4f89..b166ef15ff00 100644 --- a/packages/logger/test/e2e/logger/workerLoggerHandler.ts +++ b/packages/logger/test/e2e/logger/workerLoggerHandler.ts @@ -9,7 +9,9 @@ export type LoggerWorker = { type WorkerData = {logFilepath: string}; export async function getLoggerWorker(opts: WorkerData): Promise { - const workerThreadjs = new Worker("./workerLogger.js", {workerData: opts}); + const workerThreadjs = new Worker("./workerLogger.js", { + workerData: opts, + }); const worker = workerThreadjs as unknown as worker_threads.Worker; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/packages/logger/test/e2e/logger/workerLogs.test.ts b/packages/logger/test/e2e/logger/workerLogs.test.ts index 52b8b5efa4b1..3c81cbf92c57 100644 --- a/packages/logger/test/e2e/logger/workerLogs.test.ts +++ b/packages/logger/test/e2e/logger/workerLogs.test.ts @@ -1,7 +1,7 @@ import path from "node:path"; import fs from "node:fs"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {sleep} from "@lodestar/utils"; import {LoggerWorker, getLoggerWorker} from "./workerLoggerHandler.js"; @@ -11,7 +11,7 @@ import {LoggerWorker, getLoggerWorker} from "./workerLoggerHandler.js"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); describe("worker logs", function () { - this.timeout(60_000); + vi.setConfig({testTimeout: 60_000}); const logFilepath = path.join(__dirname, "../../../test-logs/test_worker_logs.log"); let loggerWorker: LoggerWorker; @@ -36,7 +36,7 @@ describe("worker logs", function () { fs.createWriteStream(logFilepath, {flags: "a"}).write(logTextMainThread); const data = await waitForFileSize(logFilepath, logTextMainThread.length); - expect(data).includes(logTextMainThread); + expect(data).toContain(logTextMainThread); }); it("worker writes to file", async () => { @@ -44,7 +44,7 @@ describe("worker logs", function () { loggerWorker.log(logTextWorker); const data = await waitForFileSize(logFilepath, logTextWorker.length); - expect(data).includes(logTextWorker); + expect(data).toContain(logTextWorker); }); it("concurrent write from two write streams in different threads", async () => { @@ -57,8 +57,8 @@ describe("worker logs", function () { file.write(logTextMainThread + "\n"); const data = await waitForFileSize(logFilepath, logTextWorker.length + logTextMainThread.length); - expect(data).includes(logTextWorker); - expect(data).includes(logTextMainThread); + expect(data).toContain(logTextWorker); + expect(data).toContain(logTextMainThread); }); }); diff --git a/packages/logger/test/globalSetup.ts b/packages/logger/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/logger/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/logger/test/setup.ts b/packages/logger/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/logger/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/logger/test/unit/browser.test.ts b/packages/logger/test/unit/browser.test.ts index c1dd70b6bebd..e2160418663a 100644 --- a/packages/logger/test/unit/browser.test.ts +++ b/packages/logger/test/unit/browser.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {LogLevel} from "@lodestar/utils"; import {stubLoggerForConsole} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode, logFormats} from "../../src/index.js"; @@ -22,7 +22,7 @@ describe("browser logger", () => { logger.warn(message, context, error); logger.restoreStubs(); - expect(logger.getLogs()).deep.equals([output[format]]); + expect(logger.getLogs()).toEqual([output[format]]); }); } } diff --git a/packages/logger/test/unit/env.node.test.ts b/packages/logger/test/unit/env.node.test.ts index 547f891b7ea1..4d2b914ca7f4 100644 --- a/packages/logger/test/unit/env.node.test.ts +++ b/packages/logger/test/unit/env.node.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {LogLevel} from "@lodestar/utils"; import {stubLoggerForConsole} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode, logFormats} from "../../src/index.js"; @@ -20,7 +20,7 @@ describe("env logger", () => { logger.warn(message, context, error); logger.restoreStubs(); - expect(logger.getLogs()).deep.equals([output[format]]); + expect(logger.getLogs()).toEqual([output[format]]); }); } } diff --git a/packages/logger/test/unit/node.node.test.ts b/packages/logger/test/unit/node.node.test.ts index 6342ae9e4ccb..12782fa49af8 100644 --- a/packages/logger/test/unit/node.node.test.ts +++ b/packages/logger/test/unit/node.node.test.ts @@ -1,27 +1,35 @@ -import {expect} from "chai"; +import {describe, it, expect, vi, afterEach, Mock} from "vitest"; import {LogLevel} from "@lodestar/utils"; -import {stubLoggerForProcessStd} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode, logFormats} from "../../src/index.js"; import {getNodeLogger} from "../../src/node.js"; import {formatsTestCases} from "../fixtures/loggerFormats.js"; +// Node.js maps `process.stdout` to `console._stdout`. +// spy does not work on `process.stdout` directly. +// eslint-disable-next-line @typescript-eslint/naming-convention +type TestConsole = typeof console & {_stdout: {write: Mock}}; + describe("node logger", () => { + afterEach(() => { + vi.resetAllMocks(); + }); + describe("format and options", () => { for (const testCase of formatsTestCases) { const {id, opts, message, context, error, output} = typeof testCase === "function" ? testCase() : testCase; for (const format of logFormats) { it(`${id} ${format} output`, async () => { - const logger = stubLoggerForProcessStd( - getNodeLogger({ - level: LogLevel.info, - format, - module: opts?.module, - timestampFormat: {format: TimestampFormatCode.Hidden}, - }) - ); + vi.spyOn((console as TestConsole)._stdout, "write"); + + const logger = getNodeLogger({ + level: LogLevel.info, + format, + module: opts?.module, + timestampFormat: {format: TimestampFormatCode.Hidden}, + }); logger.warn(message, context, error); - logger.restoreStubs(); - expect(logger.getLogs()).deep.equals([output[format]]); + + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith(1, `${output[format]}\n`); }); } } diff --git a/packages/logger/test/unit/utils/json.test.ts b/packages/logger/test/unit/utils/json.test.ts index 06352fc5f171..02a6c95e1ed9 100644 --- a/packages/logger/test/unit/utils/json.test.ts +++ b/packages/logger/test/unit/utils/json.test.ts @@ -1,6 +1,4 @@ -/* eslint-disable @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-assignment */ -import "../../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {fromHexString, toHexString} from "@chainsafe/ssz"; import {LodestarError} from "@lodestar/utils"; import {logCtxToJson, logCtxToString} from "../../../src/utils/json.js"; @@ -13,7 +11,7 @@ describe("Json helper", () => { type TestCase = { id: string; arg: unknown; - json: any; + json: unknown; }; const testCases: (TestCase | (() => TestCase))[] = [ // Basic types @@ -27,13 +25,13 @@ describe("Json helper", () => { // Functions // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "function", arg: function () {}, json: "function () { }"}, + {id: "function", arg: function () {}, json: "function() {\n }"}, // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "arrow function", arg: () => {}, json: "() => { }"}, + {id: "arrow function", arg: () => {}, json: "() => {\n }"}, // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "async function", arg: async function () {}, json: "async function () { }"}, + {id: "async function", arg: async function () {}, json: "async function() {\n }"}, // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "async arrow function", arg: async () => {}, json: "async () => { }"}, + {id: "async arrow function", arg: async () => {}, json: "async () => {\n }"}, // Arrays {id: "array of basic types", arg: [1, 2, 3], json: [1, 2, 3]}, @@ -119,6 +117,7 @@ describe("Json helper", () => { // Circular references () => { const circularReference: any = {}; + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access circularReference.myself = circularReference; return { id: "circular reference", @@ -131,7 +130,7 @@ describe("Json helper", () => { for (const testCase of testCases) { const {id, arg, json} = typeof testCase === "function" ? testCase() : testCase; it(id, () => { - expect(logCtxToJson(arg)).to.deep.equal(json); + expect(logCtxToJson(arg)).toEqual(json); }); } }); @@ -180,6 +179,7 @@ describe("Json helper", () => { // Circular references () => { const circularReference: any = {}; + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access circularReference.myself = circularReference; return { id: "circular reference", @@ -192,7 +192,7 @@ describe("Json helper", () => { for (const testCase of testCases) { const {id, json, output} = typeof testCase === "function" ? testCase() : testCase; it(id, () => { - expect(logCtxToString(json)).to.equal(output); + expect(logCtxToString(json)).toBe(output); }); } }); diff --git a/packages/logger/test/unit/utils/timeFormat.test.ts b/packages/logger/test/unit/utils/timeFormat.test.ts index 62640ff48c2c..fc374a0f6c7f 100644 --- a/packages/logger/test/unit/utils/timeFormat.test.ts +++ b/packages/logger/test/unit/utils/timeFormat.test.ts @@ -1,5 +1,4 @@ -import "../../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {formatEpochSlotTime} from "../../../src/utils/timeFormat.js"; describe("logger / util / formatEpochSlotTime", () => { @@ -17,7 +16,7 @@ describe("logger / util / formatEpochSlotTime", () => { const expectLog = `Eph ${epoch}/${slot} ${sec}`; // "Eph 3/6 11.423"; it(expectLog, () => { const genesisTime = nowSec - epoch * slotsPerEpoch * secondsPerSlot - slot * secondsPerSlot - sec; - expect(formatEpochSlotTime({genesisTime, secondsPerSlot, slotsPerEpoch}, nowSec * 1000)).to.equal(expectLog); + expect(formatEpochSlotTime({genesisTime, secondsPerSlot, slotsPerEpoch}, nowSec * 1000)).toBe(expectLog); }); } }); diff --git a/packages/logger/test/unit/winston.node.test.ts b/packages/logger/test/unit/winston.node.test.ts index cdf7e17ddeb1..8ef49da4e02d 100644 --- a/packages/logger/test/unit/winston.node.test.ts +++ b/packages/logger/test/unit/winston.node.test.ts @@ -1,26 +1,34 @@ import fs from "node:fs"; import path from "node:path"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll, afterEach, vi, Mock} from "vitest"; import {LogLevel} from "@lodestar/utils"; -import {stubLoggerForProcessStd} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode} from "../../src/index.js"; import {getNodeLogger} from "../../src/node.js"; import {readFileWhenExists} from "../utils/files.js"; +// Node.js maps `process.stdout` to `console._stdout`. +// spy does not work on `process.stdout` directly. +// eslint-disable-next-line @typescript-eslint/naming-convention +type TestConsole = typeof console & {_stdout: {write: Mock}}; + describe("winston logger", () => { + afterEach(() => { + vi.resetAllMocks(); + }); + describe("winston dynamic level by module", () => { it("should log to child at a lower log level", async () => { - const loggerA = stubLoggerForProcessStd( - getNodeLogger({ - level: LogLevel.info, - module: "a", - format: "human", - levelModule: { - "a/b": LogLevel.debug, - }, - timestampFormat: {format: TimestampFormatCode.Hidden}, - }) - ); + vi.spyOn((console as TestConsole)._stdout, "write"); + + const loggerA = getNodeLogger({ + level: LogLevel.info, + module: "a", + format: "human", + levelModule: { + "a/b": LogLevel.debug, + }, + timestampFormat: {format: TimestampFormatCode.Hidden}, + }); const loggerAB = loggerA.child({module: "b"}); @@ -29,24 +37,31 @@ describe("winston logger", () => { loggerAB.info("test a/b info"); // show loggerAB.debug("test a/b debug"); // show - loggerA.restoreStubs(); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 1, + "[a] \u001b[32minfo\u001b[39m: test a info\n" + ); + + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 2, + "[a/b] \u001b[32minfo\u001b[39m: test a/b info\n" + ); - expect(loggerA.getLogs()).deep.equals([ - "[a] \u001b[32minfo\u001b[39m: test a info", - "[a/b] \u001b[32minfo\u001b[39m: test a/b info", - "[a/b] \u001b[34mdebug\u001b[39m: test a/b debug", - ]); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 3, + "[a/b] \u001b[34mdebug\u001b[39m: test a/b debug\n" + ); }); }); describe("winston transport log to file", () => { let tmpDir: string; - before(() => { + beforeAll(() => { tmpDir = fs.mkdtempSync("test-lodestar-winston-test"); }); - after(() => { + afterAll(() => { fs.rmSync(tmpDir, {recursive: true}); }); @@ -71,15 +86,20 @@ describe("winston logger", () => { const expectedOut = "[a] \u001b[33mwarn\u001b[39m: test"; - expect(await readFileWhenExists(tmpDir, filenameRx)).to.equal(expectedOut); + expect(await readFileWhenExists(tmpDir, filenameRx)).toBe(expectedOut); }); }); describe("child logger", () => { it("should parse child module", async () => { - const loggerA = stubLoggerForProcessStd( - getNodeLogger({level: LogLevel.info, timestampFormat: {format: TimestampFormatCode.Hidden}, module: "a"}) - ); + vi.spyOn((console as TestConsole)._stdout, "write"); + + const loggerA = getNodeLogger({ + level: LogLevel.info, + timestampFormat: {format: TimestampFormatCode.Hidden}, + module: "a", + }); + const loggerAB = loggerA.child({module: "b"}); const loggerABC = loggerAB.child({module: "c"}); @@ -87,13 +107,18 @@ describe("winston logger", () => { loggerAB.warn("test a/b"); loggerABC.warn("test a/b/c"); - loggerA.restoreStubs(); - - expect(loggerA.getLogs()).deep.equals([ - "[a] \u001b[33mwarn\u001b[39m: test a", - "[a/b] \u001b[33mwarn\u001b[39m: test a/b", - "[a/b/c] \u001b[33mwarn\u001b[39m: test a/b/c", - ]); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 1, + "[a] \u001b[33mwarn\u001b[39m: test a\n" + ); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 2, + "[a/b] \u001b[33mwarn\u001b[39m: test a/b\n" + ); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 3, + "[a/b/c] \u001b[33mwarn\u001b[39m: test a/b/c\n" + ); }); }); }); diff --git a/packages/logger/test/utils/chai.ts b/packages/logger/test/utils/chai.ts deleted file mode 100644 index 3c1e855021be..000000000000 --- a/packages/logger/test/utils/chai.ts +++ /dev/null @@ -1,9 +0,0 @@ -import {expect} from "chai"; - -export function expectDeepEquals(a: T, b: T, message?: string): void { - expect(a).deep.equals(b, message); -} - -export function expectEquals(a: T, b: T, message?: string): void { - expect(a).equals(b, message); -} diff --git a/packages/logger/tsconfig.e2e.json b/packages/logger/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/logger/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/logger/vitest.browser.config.ts b/packages/logger/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/logger/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/logger/vitest.config.ts b/packages/logger/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/logger/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/logger/webpack.test.config.cjs b/packages/logger/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/logger/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/params/karma.config.cjs b/packages/params/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/params/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/params/package.json b/packages/params/package.json index 11606f1d3b07..7861da84424d 100644 --- a/packages/params/package.json +++ b/packages/params/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/params", - "version": "1.13.0", + "version": "1.14.0", "description": "Chain parameters required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -53,9 +53,12 @@ "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "test": "yarn run check-types", - "test:unit": "mocha 'test/unit/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", - "test:e2e": "LODESTAR_PRESET=minimal mocha 'test/e2e/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --dir test/e2e/", "check-readme": "typescript-docs-verifier" }, "repository": { diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index 3d784356ae0f..e0623537d7f0 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -90,6 +90,7 @@ export const { FIELD_ELEMENTS_PER_BLOB, MAX_BLOB_COMMITMENTS_PER_BLOCK, MAX_BLOBS_PER_BLOCK, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, } = activePreset; //////////// @@ -235,3 +236,10 @@ export const INTERVALS_PER_SLOT = 3; export const BYTES_PER_FIELD_ELEMENT = 32; export const BLOB_TX_TYPE = 0x03; export const VERSIONED_HASH_VERSION_KZG = 0x01; + +// ssz.deneb.BeaconBlockBody.getPathInfo(['blobKzgCommitments',0]).gindex +export const KZG_COMMITMENT_GINDEX0 = ACTIVE_PRESET === PresetName.minimal ? 864 : 221184; +export const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** KZG_COMMITMENT_INCLUSION_PROOF_DEPTH; + +// ssz.deneb.BlobSidecars.elementType.fixedSize +export const BLOBSIDECAR_FIXED_SIZE = ACTIVE_PRESET === PresetName.minimal ? 131672 : 131928; diff --git a/packages/params/src/presets/mainnet.ts b/packages/params/src/presets/mainnet.ts index f29b1668ac44..9b591103edf5 100644 --- a/packages/params/src/presets/mainnet.ts +++ b/packages/params/src/presets/mainnet.ts @@ -115,4 +115,5 @@ export const mainnetPreset: BeaconPreset = { FIELD_ELEMENTS_PER_BLOB: 4096, MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096, MAX_BLOBS_PER_BLOCK: 6, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17, }; diff --git a/packages/params/src/presets/minimal.ts b/packages/params/src/presets/minimal.ts index 34d690045117..ad86cbf89e61 100644 --- a/packages/params/src/presets/minimal.ts +++ b/packages/params/src/presets/minimal.ts @@ -119,7 +119,8 @@ export const minimalPreset: BeaconPreset = { // DENEB /////////// // https://github.com/ethereum/consensus-specs/blob/dev/presets/minimal/eip4844.yaml - FIELD_ELEMENTS_PER_BLOB: 4, + FIELD_ELEMENTS_PER_BLOB: 4096, MAX_BLOB_COMMITMENTS_PER_BLOCK: 16, MAX_BLOBS_PER_BLOCK: 6, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 9, }; diff --git a/packages/params/src/types.ts b/packages/params/src/types.ts index 67d258bdd0c9..3c5ba6381131 100644 --- a/packages/params/src/types.ts +++ b/packages/params/src/types.ts @@ -81,6 +81,7 @@ export type BeaconPreset = { FIELD_ELEMENTS_PER_BLOB: number; MAX_BLOB_COMMITMENTS_PER_BLOCK: number; MAX_BLOBS_PER_BLOCK: number; + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: number; }; /** @@ -165,6 +166,7 @@ export const beaconPresetTypes: BeaconPresetTypes = { FIELD_ELEMENTS_PER_BLOB: "number", MAX_BLOB_COMMITMENTS_PER_BLOCK: "number", MAX_BLOBS_PER_BLOCK: "number", + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: "number", }; type BeaconPresetTypes = { diff --git a/packages/params/test/e2e/ensure-config-is-synced.test.ts b/packages/params/test/e2e/ensure-config-is-synced.test.ts index 6be3e6e15db1..06fb4bae000c 100644 --- a/packages/params/test/e2e/ensure-config-is-synced.test.ts +++ b/packages/params/test/e2e/ensure-config-is-synced.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, vi} from "vitest"; import axios from "axios"; import {mainnetPreset} from "../../src/presets/mainnet.js"; import {minimalPreset} from "../../src/presets/minimal.js"; @@ -8,10 +8,10 @@ import {loadConfigYaml} from "../yaml.js"; // Not e2e, but slow. Run with e2e tests /** https://github.com/ethereum/consensus-specs/releases */ -const specConfigCommit = "v1.4.0-beta.2"; +const specConfigCommit = "v1.4.0-beta.5"; describe("Ensure config is synced", function () { - this.timeout(60 * 1000); + vi.setConfig({testTimeout: 60 * 1000}); it("mainnet", async function () { const remotePreset = await downloadRemoteConfig("mainnet", specConfigCommit); @@ -27,10 +27,10 @@ describe("Ensure config is synced", function () { function assertCorrectPreset(localPreset: BeaconPreset, remotePreset: BeaconPreset): void { // Check each key for better debuggability for (const key of Object.keys(remotePreset) as (keyof BeaconPreset)[]) { - expect(localPreset[key]).to.equal(remotePreset[key], `Wrong ${key} value`); + expect(localPreset[key]).toBe(remotePreset[key]); } - expect(localPreset).to.deep.equal(remotePreset); + expect(localPreset).toEqual(remotePreset); } async function downloadRemoteConfig(preset: "mainnet" | "minimal", commit: string): Promise { diff --git a/packages/params/test/e2e/overridePreset.test.ts b/packages/params/test/e2e/overridePreset.test.ts index c03e54a480da..16f29f3c5c84 100644 --- a/packages/params/test/e2e/overridePreset.test.ts +++ b/packages/params/test/e2e/overridePreset.test.ts @@ -2,16 +2,13 @@ import path from "node:path"; import util from "node:util"; import child from "node:child_process"; import {fileURLToPath} from "node:url"; -import {expect, use} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect, vi} from "vitest"; const scriptNames = { ok: "overridePresetOk.ts", error: "overridePresetError.ts", }; -use(chaiAsPromised); - const exec = util.promisify(child.exec); // Global variable __dirname no longer available in ES6 modules. @@ -21,7 +18,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); describe("Override preset", function () { // Allow time for ts-node to compile Typescript source - this.timeout(30_000); + vi.setConfig({testTimeout: 30_000}); it("Should correctly override preset", async () => { // These commands can not run with minimal preset @@ -31,7 +28,7 @@ describe("Override preset", function () { }); it("Should throw trying to override preset in the wrong order", async () => { - await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).to.be.rejectedWith( + await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).rejects.toThrow( "Lodestar preset is already frozen" ); }); diff --git a/packages/params/test/e2e/setPreset.test.ts b/packages/params/test/e2e/setPreset.test.ts index 38942d2ee514..aa1371fa2eea 100644 --- a/packages/params/test/e2e/setPreset.test.ts +++ b/packages/params/test/e2e/setPreset.test.ts @@ -2,16 +2,13 @@ import path from "node:path"; import util from "node:util"; import child from "node:child_process"; import {fileURLToPath} from "node:url"; -import {expect, use} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect, vi} from "vitest"; const scriptNames = { ok: "setPresetOk.ts", error: "setPresetError.ts", }; -use(chaiAsPromised); - const exec = util.promisify(child.exec); // Global variable __dirname no longer available in ES6 modules. @@ -21,7 +18,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); describe("setPreset", function () { // Allow time for ts-node to compile Typescript source - this.timeout(30_000); + vi.setConfig({testTimeout: 30_000}); it("Should correctly set preset", async () => { // These commands can not run with minimal preset @@ -31,7 +28,7 @@ describe("setPreset", function () { }); it("Should throw trying to set preset in the wrong order", async () => { - await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).to.be.rejectedWith( + await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).rejects.toThrow( "Lodestar preset is already frozen" ); }); diff --git a/packages/params/test/globalSetup.ts b/packages/params/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/params/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/params/test/unit/activePreset.test.ts b/packages/params/test/unit/activePreset.test.ts index 1d3c7b7a888d..eceda7eaac92 100644 --- a/packages/params/test/unit/activePreset.test.ts +++ b/packages/params/test/unit/activePreset.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {mainnetPreset} from "../../src/presets/mainnet.js"; import {minimalPreset} from "../../src/presets/minimal.js"; import {gnosisPreset as gnosisParams} from "../../src/presets/gnosis.js"; @@ -16,18 +16,15 @@ describe("active preset", async () => { it("Active preset should be set to the correct value", () => { if (process.env.LODESTAR_PRESET) { - expect(ACTIVE_PRESET).to.equal( - process.env.LODESTAR_PRESET, - "process.env.LODESTAR_PRESET must equal ACTIVE_PRESET" - ); + expect(ACTIVE_PRESET).toBe(process.env.LODESTAR_PRESET); } else { - expect(ACTIVE_PRESET).to.equal(PresetName.mainnet, "Default preset must be mainnet"); + expect(ACTIVE_PRESET).toBe(PresetName.mainnet); } }); it("Constants should be set to the correct value", () => { for (const [k, v] of Object.entries(params[ACTIVE_PRESET])) { - expect(exports[k]).to.deep.equal(v); + expect(exports[k]).toEqual(v); } }); @@ -37,6 +34,6 @@ describe("active preset", async () => { // To ensure this throws, call setActivePreset on both the src and lib file. setActivePreset(PresetName.minimal); setActivePresetLib(PresetName.minimal); - }).to.throw(); + }).toThrow(); }); }); diff --git a/packages/params/test/unit/applicationDomains.test.ts b/packages/params/test/unit/applicationDomains.test.ts index b3c0bb35ad9f..294ceb83ce44 100644 --- a/packages/params/test/unit/applicationDomains.test.ts +++ b/packages/params/test/unit/applicationDomains.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {DOMAIN_APPLICATION_MASK, DOMAIN_APPLICATION_BUILDER} from "../../src/index.js"; describe("validate application domains", () => { @@ -8,7 +8,8 @@ describe("validate application domains", () => { for (let i = 0; i < DOMAIN_APPLICATION_MASK.length; i++) { r += DOMAIN_APPLICATION_MASK[i] & domain[i]; } - expect(r).to.be.above(0, `${name} mask application should be valid`); + // eslint-disable-next-line chai-expect/no-inner-compare + expect(r > 0).toBeWithMessage(true, `${name} mask application should be valid`); }); }); }); diff --git a/packages/params/tsconfig.e2e.json b/packages/params/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/params/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/params/vitest.browser.config.ts b/packages/params/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/params/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/params/vitest.config.ts b/packages/params/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/params/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/params/webpack.test.config.cjs b/packages/params/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/params/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/prover/README.md b/packages/prover/README.md index e9cf389a3cc2..290766219e79 100644 --- a/packages/prover/README.md +++ b/packages/prover/README.md @@ -123,12 +123,12 @@ You will need to go over the [specification](https://github.com/ethereum/beacon- ## Getting started -- Follow the [installation guide](https://chainsafe.github.io/lodestar/) to install Lodestar. -- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/usage/local). +- Follow the [installation guide](https://chainsafe.github.io/lodestar/getting-started/installation) to install Lodestar. +- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/advanced-topics/setting-up-a-testnet). ## Contributors -Read our [contributors document](/CONTRIBUTING.md), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! +Read our [contributors document](https://chainsafe.github.io/lodestar/contribution/getting-started), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! ## License diff --git a/packages/prover/package.json b/packages/prover/package.json index 8ce95d5e79d7..80a992118202 100644 --- a/packages/prover/package.json +++ b/packages/prover/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -58,7 +58,7 @@ "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", - "test:e2e": "LODESTAR_PRESET=minimal vitest --run --poolOptions.threads.singleThread --dir test/e2e", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --poolOptions.threads.singleThread true --dir test/e2e", "check-readme": "typescript-docs-verifier", "generate-fixtures": "node --loader ts-node/esm scripts/generate_fixtures.ts" }, @@ -72,13 +72,13 @@ "@ethereumjs/tx": "^4.1.2", "@ethereumjs/util": "^8.0.6", "@ethereumjs/vm": "^6.4.2", - "@lodestar/api": "^1.13.0", - "@lodestar/config": "^1.13.0", - "@lodestar/light-client": "^1.13.0", - "@lodestar/logger": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/api": "^1.14.0", + "@lodestar/config": "^1.14.0", + "@lodestar/light-client": "^1.14.0", + "@lodestar/logger": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", "ethereum-cryptography": "^1.2.0", "find-up": "^6.3.0", "http-proxy": "^1.18.1", @@ -87,7 +87,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.13.0", + "@lodestar/test-utils": "^1.14.0", "@types/http-proxy": "^1.17.10", "@types/yargs": "^17.0.24", "axios": "^1.3.4", diff --git a/packages/prover/src/utils/rpc.ts b/packages/prover/src/utils/rpc.ts index 5feee3332c4a..fcd933675cab 100644 --- a/packages/prover/src/utils/rpc.ts +++ b/packages/prover/src/utils/rpc.ts @@ -100,6 +100,6 @@ export class ELRpc { getRequestId(): string { // TODO: Find better way to generate random id - return (Math.random() * 10000).toFixed(0); + return (Math.random() * 100000000000000000).toFixed(0); } } diff --git a/packages/reqresp/.nycrc.json b/packages/reqresp/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/reqresp/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/reqresp/package.json b/packages/reqresp/package.json index 793134dbe84f..9d81d082d087 100644 --- a/packages/reqresp/package.json +++ b/packages/reqresp/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -50,15 +50,15 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/fast-crc32c": "^4.1.1", "@libp2p/interface": "^0.1.2", - "@lodestar/config": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/config": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/utils": "^1.14.0", "it-all": "^3.0.2", "it-pipe": "^3.0.1", "snappy": "^7.2.2", @@ -67,8 +67,8 @@ "uint8arraylist": "^2.4.3" }, "devDependencies": { - "@lodestar/logger": "^1.13.0", - "@lodestar/types": "^1.13.0", + "@lodestar/logger": "^1.14.0", + "@lodestar/types": "^1.14.0", "libp2p": "0.46.12" }, "peerDependencies": { diff --git a/packages/reqresp/src/ReqResp.ts b/packages/reqresp/src/ReqResp.ts index e79b5737bc91..671df3c83662 100644 --- a/packages/reqresp/src/ReqResp.ts +++ b/packages/reqresp/src/ReqResp.ts @@ -2,8 +2,8 @@ import {setMaxListeners} from "node:events"; import {Connection, Stream} from "@libp2p/interface/connection"; import {PeerId} from "@libp2p/interface/peer-id"; import type {Libp2p} from "libp2p"; -import {Logger} from "@lodestar/utils"; -import {getMetrics, Metrics, MetricsRegister} from "./metrics.js"; +import {Logger, MetricsRegister} from "@lodestar/utils"; +import {getMetrics, Metrics} from "./metrics.js"; import {RequestError, RequestErrorCode, sendRequest, SendRequestOpts} from "./request/index.js"; import {handleRequest} from "./response/index.js"; import { diff --git a/packages/reqresp/src/index.ts b/packages/reqresp/src/index.ts index 9bb07c1a4fce..d31960fdcd89 100644 --- a/packages/reqresp/src/index.ts +++ b/packages/reqresp/src/index.ts @@ -1,7 +1,7 @@ export {ReqResp} from "./ReqResp.js"; export type {ReqRespOpts} from "./ReqResp.js"; export {getMetrics} from "./metrics.js"; -export type {Metrics, MetricsRegister} from "./metrics.js"; +export type {Metrics} from "./metrics.js"; export {Encoding as ReqRespEncoding} from "./types.js"; // Expose enums renamed export * from "./types.js"; export * from "./interface.js"; diff --git a/packages/reqresp/src/metrics.ts b/packages/reqresp/src/metrics.ts index c4474d0d61b7..4af18a782322 100644 --- a/packages/reqresp/src/metrics.ts +++ b/packages/reqresp/src/metrics.ts @@ -1,62 +1,7 @@ -type LabelValues = Partial>; - -interface Gauge { - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler - inc(value?: number): void; - inc(labels: LabelValues, value?: number): void; - inc(arg1?: LabelValues | number, arg2?: number): void; - - dec(value?: number): void; - dec(labels: LabelValues, value?: number): void; - dec(arg1?: LabelValues | number, arg2?: number): void; - - set(value: number): void; - set(labels: LabelValues, value: number): void; - set(arg1?: LabelValues | number, arg2?: number): void; - - addCollect: (collectFn: () => void) => void; -} - -interface Histogram { - startTimer(arg1?: LabelValues): (labels?: LabelValues) => number; - - observe(value: number): void; - observe(labels: LabelValues, values: number): void; - observe(arg1: LabelValues | number, arg2?: number): void; - - reset(): void; -} - -type GaugeConfig = { - name: string; - help: string; - labelNames?: T[]; -}; - -type HistogramConfig = { - name: string; - help: string; - labelNames?: T[]; - buckets?: number[]; -}; - -export interface MetricsRegister { - gauge(config: GaugeConfig): Gauge; - histogram(config: HistogramConfig): Histogram; -} +import {MetricsRegister} from "@lodestar/utils"; export type Metrics = ReturnType; -export type LodestarGitData = { - /** "0.16.0 developer/feature-1 ac99f2b5" */ - version: string; - /** "4f816b16dfde718e2d74f95f2c8292596138c248" */ - commit: string; - /** "goerli" */ - network: string; -}; - /** * A collection of metrics used throughout the Gossipsub behaviour. */ @@ -65,48 +10,48 @@ export function getMetrics(register: MetricsRegister) { // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types. return { - outgoingRequests: register.gauge<"method">({ + outgoingRequests: register.gauge<{method: string}>({ name: "beacon_reqresp_outgoing_requests_total", help: "Counts total requests done per method", labelNames: ["method"], }), - outgoingRequestRoundtripTime: register.histogram<"method">({ + outgoingRequestRoundtripTime: register.histogram<{method: string}>({ name: "beacon_reqresp_outgoing_request_roundtrip_time_seconds", help: "Histogram of outgoing requests round-trip time", labelNames: ["method"], // Spec sets RESP_TIMEOUT = 10 sec buckets: [0.1, 0.2, 0.5, 1, 5, 10, 15, 60], }), - outgoingErrors: register.gauge<"method">({ + outgoingErrors: register.gauge<{method: string}>({ name: "beacon_reqresp_outgoing_requests_error_total", help: "Counts total failed requests done per method", labelNames: ["method"], }), - incomingRequests: register.gauge<"method">({ + incomingRequests: register.gauge<{method: string}>({ name: "beacon_reqresp_incoming_requests_total", help: "Counts total responses handled per method", labelNames: ["method"], }), - incomingRequestHandlerTime: register.histogram<"method">({ + incomingRequestHandlerTime: register.histogram<{method: string}>({ name: "beacon_reqresp_incoming_request_handler_time_seconds", help: "Histogram of incoming requests internal handling time", labelNames: ["method"], // Spec sets RESP_TIMEOUT = 10 sec buckets: [0.1, 0.2, 0.5, 1, 5, 10], }), - incomingErrors: register.gauge<"method">({ + incomingErrors: register.gauge<{method: string}>({ name: "beacon_reqresp_incoming_requests_error_total", help: "Counts total failed responses handled per method", labelNames: ["method"], }), - outgoingResponseTTFB: register.histogram<"method">({ + outgoingResponseTTFB: register.histogram<{method: string}>({ name: "beacon_reqresp_outgoing_response_ttfb_seconds", help: "Time to first byte (TTFB) for outgoing responses", labelNames: ["method"], // Spec sets TTFB_TIMEOUT = 5 sec buckets: [0.1, 1, 5], }), - incomingResponseTTFB: register.histogram<"method">({ + incomingResponseTTFB: register.histogram<{method: string}>({ name: "beacon_reqresp_incoming_response_ttfb_seconds", help: "Time to first byte (TTFB) for incoming responses", labelNames: ["method"], diff --git a/packages/reqresp/test/fixtures/messages.ts b/packages/reqresp/test/fixtures/messages.ts index da71e70500ed..7c5eedaeb3d4 100644 --- a/packages/reqresp/test/fixtures/messages.ts +++ b/packages/reqresp/test/fixtures/messages.ts @@ -10,7 +10,7 @@ type MessageFixture = { type: TypeSizes; binaryPayload: ResponseIncoming; chunks: Uint8Array[]; - asyncChunks: Buffer[]; + asyncChunks: Uint8Array[]; }; const phase0Metadata = ssz.phase0.Metadata.fromJson({ @@ -26,14 +26,14 @@ export const sszSnappyPhase0Metadata: MessageFixture = { fork: ForkName.phase0, protocolVersion: 1, }, - chunks: ["0x10", "0xff060000734e61507059011400000b5ee91209000000000000000000000000000000"].map( - (s) => new Uint8Array(fromHexString(s)) + chunks: ["0x10", "0xff060000734e61507059011400000b5ee91209000000000000000000000000000000"].map((s) => + fromHexString(s) ), asyncChunks: [ "0x10", // length prefix "0xff060000734e61507059", // snappy frames header "0x011400000b5ee91209000000000000000000000000000000", // snappy frames content - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), }; const altairMetadata = ssz.altair.Metadata.fromJson({ @@ -68,14 +68,12 @@ export const sszSnappyPing: MessageFixture = { fork: ForkName.phase0, protocolVersion: 1, }, - chunks: ["0x08", "0xff060000734e61507059010c00000175de410100000000000000"].map( - (s) => new Uint8Array(fromHexString(s)) - ), + chunks: ["0x08", "0xff060000734e61507059010c00000175de410100000000000000"].map((s) => fromHexString(s)), asyncChunks: [ "0x08", // length prefix "0xff060000734e61507059", // snappy frames header "0x010c00000175de410100000000000000", // snappy frames content - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), }; const statusData = { @@ -96,9 +94,9 @@ export const sszSnappyStatus: MessageFixture = { "0x54", // length prefix "0xff060000734e61507059", // snappy frames header "0x001b0000097802c15400da8a010004090009017e2b001c0900000000000000", - ].map((d) => Buffer.from(fromHexString(d))), - chunks: ["0x54", "0xff060000734e61507059001b0000097802c15400da8a010004090009017e2b001c0900000000000000"].map( - (s) => new Uint8Array(fromHexString(s)) + ].map((d) => fromHexString(d)), + chunks: ["0x54", "0xff060000734e61507059001b0000097802c15400da8a010004090009017e2b001c0900000000000000"].map((s) => + fromHexString(s) ), }; @@ -137,11 +135,11 @@ export const sszSnappySignedBeaconBlockPhase0: MessageFixture = { "0x9403", "0xff060000734e61507059", "0x00340000fff3b3f594031064000000dafe01007a010004090009011108fe6f000054feb4008ab4007e0100fecc0011cc0cdc0000003e0400", - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), chunks: [ "0x9403", "0xff060000734e6150705900340000fff3b3f594031064000000dafe01007a010004090009011108fe6f000054feb4008ab4007e0100fecc0011cc0cdc0000003e0400", - ].map((s) => new Uint8Array(fromHexString(s))), + ].map((s) => fromHexString(s)), }; const signedBeaconBlockAltairData = { @@ -166,11 +164,11 @@ export const sszSnappySignedBeaconBlockAltair: MessageFixture = { "0xf803", // length prefix "0xff060000734e61507059", // snappy frames header "0x003f0000ee14ab0df8031064000000dafe01007a01000c995f0100010100090105ee70000d700054ee44000d44fe0100fecc0011cc0c400100003e0400fe01008e0100", - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), chunks: [ "0xb404", "0xff060000734e6150705900420000bab7f8feb4041064000000dafe01007a01000c995f0100010100090105ee70000d700054ee44000d44fe0100fecc0011cc0c7c0100003e0400fe0100fe01007e0100", - ].map((s) => new Uint8Array(fromHexString(s))), + ].map((s) => fromHexString(s)), }; // Set the altair fork to happen between the two precomputed SSZ snappy blocks diff --git a/packages/reqresp/test/globalSetup.ts b/packages/reqresp/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/reqresp/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/reqresp/test/setup.ts b/packages/reqresp/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/reqresp/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/reqresp/test/unit/ReqResp.test.ts b/packages/reqresp/test/unit/ReqResp.test.ts index 26a68ce02d25..b62b1883cce1 100644 --- a/packages/reqresp/test/unit/ReqResp.test.ts +++ b/packages/reqresp/test/unit/ReqResp.test.ts @@ -1,6 +1,5 @@ -import {expect} from "chai"; +import {describe, it, expect, beforeEach, afterEach, vi} from "vitest"; import {Libp2p} from "libp2p"; -import sinon from "sinon"; import {Logger} from "@lodestar/utils"; import {getEmptyLogger} from "@lodestar/logger/empty"; import {RespStatus} from "../../src/interface.js"; @@ -18,7 +17,7 @@ describe("ResResp", () => { beforeEach(() => { libp2p = { - dialProtocol: sinon.stub().resolves( + dialProtocol: vi.fn().mockResolvedValue( new MockLibP2pStream( responseEncode( [ @@ -32,7 +31,7 @@ describe("ResResp", () => { ping.method ) ), - handle: sinon.spy(), + handle: vi.fn(), } as unknown as Libp2p; logger = getEmptyLogger(); @@ -44,12 +43,16 @@ describe("ResResp", () => { }); }); + afterEach(() => { + vi.restoreAllMocks(); + }); + describe("dial only protocol", () => { it("should register protocol and dial", async () => { reqresp.registerDialOnlyProtocol(numberToStringProtocolDialOnly); - expect(reqresp.getRegisteredProtocols()).to.eql(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); - expect((libp2p.handle as sinon.SinonSpy).calledOnce).to.be.false; + expect(reqresp.getRegisteredProtocols()).toEqual(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); + expect(libp2p.handle).not.toHaveBeenCalledOnce(); }); }); @@ -57,8 +60,8 @@ describe("ResResp", () => { it("should register protocol and dial", async () => { await reqresp.registerProtocol(numberToStringProtocol); - expect(reqresp.getRegisteredProtocols()).to.eql(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); - expect((libp2p.handle as sinon.SinonSpy).calledOnce).to.be.true; + expect(reqresp.getRegisteredProtocols()).toEqual(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); + expect(libp2p.handle).toHaveBeenCalledOnce(); }); }); }); diff --git a/packages/reqresp/test/unit/encoders/reqestEncode.test.ts b/packages/reqresp/test/unit/encoders/reqestEncode.test.ts index f642151f4609..221dc8237e19 100644 --- a/packages/reqresp/test/unit/encoders/reqestEncode.test.ts +++ b/packages/reqresp/test/unit/encoders/reqestEncode.test.ts @@ -1,3 +1,4 @@ +import {describe, it} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {requestEncode} from "../../../src/encoders/requestEncode.js"; @@ -6,14 +7,12 @@ import {expectEqualByteChunks} from "../../utils/index.js"; describe("encoders / requestEncode", () => { describe("valid cases", () => { - for (const {id, protocol, requestBody, chunks} of requestEncodersCases) { - it(`${id}`, async () => { - const encodedChunks = await pipe(requestEncode(protocol, requestBody), all); - expectEqualByteChunks( - encodedChunks as Uint8Array[], - chunks.map((c) => c.subarray()) - ); - }); - } + it.each(requestEncodersCases)("$id", async ({protocol, requestBody, chunks}) => { + const encodedChunks = await pipe(requestEncode(protocol, requestBody), all); + expectEqualByteChunks( + encodedChunks as Uint8Array[], + chunks.map((c) => c.subarray()) + ); + }); }); }); diff --git a/packages/reqresp/test/unit/encoders/requestDecode.test.ts b/packages/reqresp/test/unit/encoders/requestDecode.test.ts index f306a621b6e6..60ccab1eecf1 100644 --- a/packages/reqresp/test/unit/encoders/requestDecode.test.ts +++ b/packages/reqresp/test/unit/encoders/requestDecode.test.ts @@ -1,30 +1,21 @@ -import chai, {expect} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import {pipe} from "it-pipe"; import {requestDecode} from "../../../src/encoders/requestDecode.js"; import {requestEncodersCases, requestEncodersErrorCases} from "../../fixtures/encoders.js"; import {expectRejectedWithLodestarError} from "../../utils/errors.js"; import {arrToSource} from "../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encoders / requestDecode", () => { describe("valid cases", () => { - for (const {id, protocol, requestBody, chunks} of requestEncodersCases) { - it(`${id}`, async () => { - // TODO: Debug this type error - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const decodedBody = await pipe(arrToSource(chunks), requestDecode(protocol)); - expect(decodedBody).to.deep.equal(requestBody); - }); - } + it.each(requestEncodersCases)("$id", async ({protocol, requestBody, chunks}) => { + const decodedBody = await pipe(arrToSource(chunks), requestDecode(protocol)); + expect(decodedBody).to.deep.equal(requestBody); + }); }); describe("error cases", () => { - for (const {id, protocol, errorDecode, chunks} of requestEncodersErrorCases.filter((r) => r.errorDecode)) { - it(`${id}`, async () => { - await expectRejectedWithLodestarError(pipe(arrToSource(chunks), requestDecode(protocol)), errorDecode); - }); - } + it.each(requestEncodersErrorCases.filter((r) => r.errorDecode))("$id", async ({protocol, errorDecode, chunks}) => { + await expectRejectedWithLodestarError(pipe(arrToSource(chunks), requestDecode(protocol)), errorDecode); + }); }); }); diff --git a/packages/reqresp/test/unit/encoders/responseDecode.test.ts b/packages/reqresp/test/unit/encoders/responseDecode.test.ts index 777bda9bd371..a6dfe092b169 100644 --- a/packages/reqresp/test/unit/encoders/responseDecode.test.ts +++ b/packages/reqresp/test/unit/encoders/responseDecode.test.ts @@ -1,5 +1,4 @@ -import chai, {expect} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {LodestarError} from "@lodestar/utils"; @@ -8,30 +7,25 @@ import {responseEncodersErrorTestCases, responseEncodersTestCases} from "../../f import {expectRejectedWithLodestarError} from "../../utils/errors.js"; import {arrToSource, onlySuccessResp} from "../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encoders / responseDecode", () => { describe("valid cases", () => { - for (const {id, protocol, responseChunks, chunks} of responseEncodersTestCases) { - it(`${id}`, async () => { - const responses = await pipe( - arrToSource(chunks), - // eslint-disable-next-line @typescript-eslint/no-empty-function - responseDecode(protocol, {onFirstHeader: () => {}, onFirstResponseChunk: () => {}}), - all - ); + it.each(responseEncodersTestCases)("$id", async ({protocol, responseChunks, chunks}) => { + const responses = await pipe( + arrToSource(chunks), + // eslint-disable-next-line @typescript-eslint/no-empty-function + responseDecode(protocol, {onFirstHeader: () => {}, onFirstResponseChunk: () => {}}), + all + ); - const expectedResponses = responseChunks.filter(onlySuccessResp).map((r) => r.payload); - expect(responses).to.deep.equal(expectedResponses); - }); - } + const expectedResponses = responseChunks.filter(onlySuccessResp).map((r) => r.payload); + expect(responses).to.deep.equal(expectedResponses); + }); }); describe("error cases", () => { - for (const {id, protocol, chunks, decodeError} of responseEncodersErrorTestCases.filter( - (r) => r.decodeError !== undefined - )) { - it(`${id}`, async () => { + it.each(responseEncodersErrorTestCases.filter((r) => r.decodeError !== undefined))( + "$id", + async ({protocol, chunks, decodeError}) => { await expectRejectedWithLodestarError( pipe( arrToSource(chunks as Uint8Array[]), @@ -41,7 +35,7 @@ describe("encoders / responseDecode", () => { ), decodeError as LodestarError ); - }); - } + } + ); }); }); diff --git a/packages/reqresp/test/unit/encoders/responseEncode.test.ts b/packages/reqresp/test/unit/encoders/responseEncode.test.ts index f8617c27ff43..b9b5f3f8ee11 100644 --- a/packages/reqresp/test/unit/encoders/responseEncode.test.ts +++ b/packages/reqresp/test/unit/encoders/responseEncode.test.ts @@ -1,5 +1,4 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {Protocol} from "../../../src/types.js"; @@ -7,19 +6,18 @@ import {responseEncodersTestCases} from "../../fixtures/encoders.js"; import {responseEncode} from "../../utils/response.js"; import {expectEqualByteChunks} from "../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encoders / responseEncode", () => { describe("valid cases", () => { - for (const {id, protocol, responseChunks, chunks} of responseEncodersTestCases.filter((f) => !f.skipEncoding)) { - it(`${id}`, async () => { + it.each(responseEncodersTestCases.filter((f) => !f.skipEncoding))( + "$id", + async ({protocol, responseChunks, chunks}) => { const encodedChunks = await pipe(responseEncode(responseChunks, protocol as Protocol), all); expectEqualByteChunks( encodedChunks as Uint8Array[], chunks.map((c) => c.subarray()) ); - }); - } + } + ); }); }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts index 6ad5954dafa2..bfa597e42519 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts @@ -1,5 +1,4 @@ -import chai, {expect} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import {Uint8ArrayList} from "uint8arraylist"; import {encode as varintEncode} from "uint8-varint"; import {readSszSnappyPayload} from "../../../../src/encodingStrategies/sszSnappy/index.js"; @@ -11,16 +10,12 @@ import { } from "../../../fixtures/index.js"; import {arrToSource} from "../../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encodingStrategies / sszSnappy / decode", () => { - for (const {id, type, binaryPayload, chunks} of encodingStrategiesTestCases) { - it(id, async () => { - const bufferedSource = new BufferedSource(arrToSource(chunks)); - const bodyResult = await readSszSnappyPayload(bufferedSource, type); - expect(bodyResult).to.deep.equal(binaryPayload.data, "Wrong decoded body"); - }); - } + it.each(encodingStrategiesTestCases)("$id", async ({type, binaryPayload, chunks}) => { + const bufferedSource = new BufferedSource(arrToSource(chunks)); + const bodyResult = await readSszSnappyPayload(bufferedSource, type); + expect(bodyResult).toEqual(binaryPayload.data); + }); describe("mainnet cases", () => { for (const {id, payload, type: serializer, streamedBody} of encodingStrategiesMainnetTestCases) { @@ -31,7 +26,7 @@ describe("encodingStrategies / sszSnappy / decode", () => { const bufferedSource = new BufferedSource(arrToSource([streamedBytes])); const bodyResult = await readSszSnappyPayload(bufferedSource, serializer); - expect(bodyResult).to.deep.equal(payload.data, "Wrong decoded body"); + expect(bodyResult).toEqual(new Uint8Array(payload.data)); }); } }); @@ -40,7 +35,7 @@ describe("encodingStrategies / sszSnappy / decode", () => { for (const {id, type, error, chunks} of encodingStrategiesDecodingErrorCases) { it(id, async () => { const bufferedSource = new BufferedSource(arrToSource([new Uint8ArrayList(...chunks)])); - await expect(readSszSnappyPayload(bufferedSource, type)).to.be.rejectedWith(error); + await expect(readSszSnappyPayload(bufferedSource, type)).rejects.toThrow(error); }); } }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts index 6e94596930e6..6ec27d1e6b16 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {encode as varintEncode} from "uint8-varint"; @@ -7,26 +7,22 @@ import {encodingStrategiesMainnetTestCases, encodingStrategiesTestCases} from ". import {expectEqualByteChunks} from "../../../utils/index.js"; describe("encodingStrategies / sszSnappy / encode", () => { - for (const {id, binaryPayload, chunks} of encodingStrategiesTestCases) { - it(id, async () => { - const encodedChunks = await pipe(writeSszSnappyPayload(binaryPayload.data), all); - expectEqualByteChunks( - encodedChunks as Uint8Array[], - chunks.map((c) => c.subarray()) - ); - }); - } + it.each(encodingStrategiesTestCases)("$id", async ({binaryPayload, chunks}) => { + const encodedChunks = await pipe(writeSszSnappyPayload(Buffer.from(binaryPayload.data)), all); + expectEqualByteChunks( + encodedChunks as Uint8Array[], + chunks.map((c) => c.subarray()) + ); + }); describe("mainnet cases", () => { - for (const {id, payload, streamedBody} of encodingStrategiesMainnetTestCases) { - it(id, async () => { - const bodySize = payload.data.length; + it.each(encodingStrategiesMainnetTestCases)("$id", async ({payload, streamedBody}) => { + const bodySize = payload.data.length; - const encodedChunks = await pipe(writeSszSnappyPayload(payload.data), all); - const encodedStream = Buffer.concat(encodedChunks as Uint8Array[]); - const expectedStreamed = Buffer.concat([Buffer.from(varintEncode(bodySize)), streamedBody]); - expect(encodedStream).to.be.deep.equal(expectedStreamed); - }); - } + const encodedChunks = await pipe(writeSszSnappyPayload(Buffer.from(payload.data)), all); + const encodedStream = Buffer.concat(encodedChunks as Uint8Array[]); + const expectedStreamed = Buffer.concat([Buffer.from(varintEncode(bodySize)), streamedBody]); + expect(encodedStream).toEqual(expectedStreamed); + }); }); }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts index 2abb99e35d54..b47621082a65 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts @@ -1,57 +1,59 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {Uint8ArrayList} from "uint8arraylist"; import {pipe} from "it-pipe"; import {SnappyFramesUncompress} from "../../../../../src/encodingStrategies/sszSnappy/snappyFrames/uncompress.js"; import {encodeSnappy} from "../../../../../src/encodingStrategies/sszSnappy/snappyFrames/compress.js"; describe("encodingStrategies / sszSnappy / snappy frames / uncompress", function () { - it("should work with short input", function (done) { - const testData = "Small test data"; - const compressIterable = encodeSnappy(Buffer.from(testData)); - - const decompress = new SnappyFramesUncompress(); - - void pipe(compressIterable, async function (source) { - for await (const data of source) { - const result = decompress.uncompress(new Uint8ArrayList(data)); - if (result) { - expect(result.subarray().toString()).to.be.equal(testData); - done(); + it("should work with short input", () => + new Promise((done) => { + const testData = "Small test data"; + const compressIterable = encodeSnappy(Buffer.from(testData)); + + const decompress = new SnappyFramesUncompress(); + + void pipe(compressIterable, async function (source) { + for await (const data of source) { + const result = decompress.uncompress(new Uint8ArrayList(data)); + if (result) { + expect(result.subarray().toString()).toBe(testData); + done(); + } } - } - }); - }); - - it("should work with huge input", function (done) { - const testData = Buffer.alloc(100000, 4).toString(); - const compressIterable = encodeSnappy(Buffer.from(testData)); - let result = Buffer.alloc(0); - const decompress = new SnappyFramesUncompress(); - - void pipe(compressIterable, async function (source) { - for await (const data of source) { - // testData will come compressed as two or more chunks - result = Buffer.concat([ - result, - decompress.uncompress(new Uint8ArrayList(data))?.subarray() ?? Buffer.alloc(0), - ]); - if (result.length === testData.length) { - expect(result.toString()).to.be.equal(testData); - done(); + }); + })); + + it("should work with huge input", () => + new Promise((done) => { + const testData = Buffer.alloc(100000, 4).toString(); + const compressIterable = encodeSnappy(Buffer.from(testData)); + let result = Buffer.alloc(0); + const decompress = new SnappyFramesUncompress(); + + void pipe(compressIterable, async function (source) { + for await (const data of source) { + // testData will come compressed as two or more chunks + result = Buffer.concat([ + result, + decompress.uncompress(new Uint8ArrayList(data))?.subarray() ?? Buffer.alloc(0), + ]); + if (result.length === testData.length) { + expect(result.toString()).toBe(testData); + done(); + } } - } - }); - }); + }); + })); it("should detect malformed input", function () { const decompress = new SnappyFramesUncompress(); - expect(() => decompress.uncompress(new Uint8ArrayList(Buffer.alloc(32, 5)))).to.throw(); + expect(() => decompress.uncompress(new Uint8ArrayList(Buffer.alloc(32, 5)))).toThrow(); }); it("should return null if not enough data", function () { const decompress = new SnappyFramesUncompress(); - expect(decompress.uncompress(new Uint8ArrayList(Buffer.alloc(3, 1)))).to.equal(null); + expect(decompress.uncompress(new Uint8ArrayList(Buffer.alloc(3, 1)))).toBe(null); }); }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts index eae2da2e57f8..a494b4acab9a 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts @@ -1,10 +1,10 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {maxEncodedLen} from "../../../../src/encodingStrategies/sszSnappy/utils.js"; describe("encodingStrategies / sszSnappy / utils", () => { describe("maxEncodedLen", () => { it("should calculate correct maxEncodedLen", () => { - expect(maxEncodedLen(6)).to.be.equal(39); + expect(maxEncodedLen(6)).toBe(39); }); }); }); diff --git a/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts b/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts index d8ea38b539dd..b3393c609a76 100644 --- a/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts +++ b/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts @@ -1,59 +1,58 @@ -import {expect} from "chai"; -import sinon from "sinon"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {RateLimiterGRCA} from "../../../src/rate_limiter/rateLimiterGRCA.js"; describe("rateLimiterGRCA", () => { let rateLimiter: RateLimiterGRCA; const limit = 500; const limitTimeMs = 60 * 1000; // 1 min - const sandbox = sinon.createSandbox(); beforeEach(() => { - sandbox.useFakeTimers(); + vi.useFakeTimers(); rateLimiter = RateLimiterGRCA.fromQuota({quotaTimeMs: limitTimeMs, quota: limit}); }); afterEach(() => { - sandbox.restore(); + vi.restoreAllMocks(); + vi.useRealTimers(); }); describe("allows()", () => { it("should throw error if requested for a zero value", () => { - expect(() => rateLimiter.allows(null, 0)).to.throw("Token value should always be positive. Given: 0"); + expect(() => rateLimiter.allows(null, 0)).toThrow("Token value should always be positive. Given: 0"); }); it("should throw error if requested for a negative value", () => { - expect(() => rateLimiter.allows(null, -1)).to.throw("Token value should always be positive. Given: -1"); + expect(() => rateLimiter.allows(null, -1)).toThrow("Token value should always be positive. Given: -1"); }); it("should return valid number of requests within request window", () => { - expect(rateLimiter.allows(null, 10)).to.be.true; - expect(rateLimiter.allows(null, 50)).to.be.true; + expect(rateLimiter.allows(null, 10)).toBe(true); + expect(rateLimiter.allows(null, 50)).toBe(true); }); it("should return valid number of requests within request window for maximum requests", () => { - expect(rateLimiter.allows(null, limit)).to.be.true; + expect(rateLimiter.allows(null, limit)).toBe(true); }); it("should return zero within request window for higher number of requests", () => { - expect(rateLimiter.allows(null, limit + 1)).to.be.false; + expect(rateLimiter.allows(null, limit + 1)).toBe(false); }); it("should return zero once the tracker limit reached", () => { rateLimiter.allows(null, limit); - expect(rateLimiter.allows(null, 10)).to.be.false; + expect(rateLimiter.allows(null, 10)).toBe(false); }); it("should return over limit values before limit reached", () => { rateLimiter.allows(null, limit - 10); - expect(rateLimiter.allows(null, 15)).to.be.false; + expect(rateLimiter.allows(null, 15)).toBe(false); }); it("should reset the rate after the time limit", () => { rateLimiter.allows(null, limit); - expect(rateLimiter.allows(null, 10)).to.be.false; - sandbox.clock.tick(limitTimeMs); - expect(rateLimiter.allows(null, 10)).to.be.true; + expect(rateLimiter.allows(null, 10)).toBe(false); + vi.advanceTimersByTime(limitTimeMs); + expect(rateLimiter.allows(null, 10)).toBe(true); }); }); diff --git a/packages/reqresp/test/unit/request/index.test.ts b/packages/reqresp/test/unit/request/index.test.ts index a056d8055668..b3241a8cc44a 100644 --- a/packages/reqresp/test/unit/request/index.test.ts +++ b/packages/reqresp/test/unit/request/index.test.ts @@ -1,9 +1,8 @@ +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {PeerId} from "@libp2p/interface/peer-id"; import all from "it-all"; import {pipe} from "it-pipe"; -import {expect} from "chai"; import {Libp2p} from "libp2p"; -import sinon from "sinon"; import {getEmptyLogger} from "@lodestar/logger/empty"; import {LodestarError, sleep} from "@lodestar/utils"; import {RequestError, RequestErrorCode, sendRequest, SendRequestOpts} from "../../../src/request/index.js"; @@ -21,7 +20,6 @@ describe("request / sendRequest", () => { let controller: AbortController; let peerId: PeerId; let libp2p: Libp2p; - const sandbox = sinon.createSandbox(); const emptyProtocol = pingProtocol(getEmptyHandler()); const EMPTY_REQUEST = new Uint8Array(); @@ -36,9 +34,9 @@ describe("request / sendRequest", () => { id: "Return first chunk only for a single-chunk method", protocols: [emptyProtocol], requestBody: sszSnappyPing.binaryPayload, - expectedReturn: [sszSnappyPing.binaryPayload], + expectedReturn: [{...sszSnappyPing.binaryPayload, data: Buffer.from(sszSnappyPing.binaryPayload.data)}], }, - // limit to max responses is no longer the responsability of this package + // limit to max responses is no longer the responsibility of this package // { // id: "Return up to maxResponses for a multi-chunk method", // protocols: [customProtocol({})], @@ -53,16 +51,16 @@ describe("request / sendRequest", () => { }); afterEach(() => { - sandbox.restore(); + vi.restoreAllMocks(); controller.abort(); }); for (const {id, protocols, expectedReturn, requestBody} of testCases) { it(id, async () => { libp2p = { - dialProtocol: sinon - .stub() - .resolves( + dialProtocol: vi + .fn() + .mockResolvedValue( new MockLibP2pStream( responseEncode([{status: RespStatus.SUCCESS, payload: requestBody}], protocols[0] as Protocol), protocols[0].method @@ -81,7 +79,7 @@ describe("request / sendRequest", () => { ), all ); - expect(responses).to.deep.equal(expectedReturn); + expect(responses).toEqual(expectedReturn); }); } @@ -138,7 +136,7 @@ describe("request / sendRequest", () => { for (const {id, source, opts, error} of timeoutTestCases) { it(id, async () => { libp2p = { - dialProtocol: sinon.stub().resolves(new MockLibP2pStream(source(), testMethod)), + dialProtocol: vi.fn().mockResolvedValue(new MockLibP2pStream(source(), testMethod)), } as unknown as Libp2p; await expectRejectedWithLodestarError( diff --git a/packages/reqresp/test/unit/response/index.test.ts b/packages/reqresp/test/unit/response/index.test.ts index 7298a21dccbd..5ab299b586ab 100644 --- a/packages/reqresp/test/unit/response/index.test.ts +++ b/packages/reqresp/test/unit/response/index.test.ts @@ -1,5 +1,5 @@ +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import {PeerId} from "@libp2p/interface/peer-id"; -import {expect} from "chai"; import {LodestarError, fromHex} from "@lodestar/utils"; import {getEmptyLogger} from "@lodestar/logger/empty"; import {Protocol, RespStatus} from "../../../src/index.js"; @@ -54,30 +54,28 @@ describe("response / handleRequest", () => { afterEach(() => controller.abort()); - for (const {id, requestChunks, protocol, expectedResponseChunks, expectedError} of testCases) { - it(id, async () => { - const stream = new MockLibP2pStream(requestChunks as any); - const rateLimiter = new ReqRespRateLimiter({rateLimitMultiplier: 0}); + it.each(testCases)("$id", async ({requestChunks, protocol, expectedResponseChunks, expectedError}) => { + const stream = new MockLibP2pStream(requestChunks as any); + const rateLimiter = new ReqRespRateLimiter({rateLimitMultiplier: 0}); - const resultPromise = handleRequest({ - logger, - metrics: null, - protocol, - protocolID: protocol.method, - stream, - peerId, - signal: controller.signal, - rateLimiter, - }); + const resultPromise = handleRequest({ + logger, + metrics: null, + protocol, + protocolID: protocol.method, + stream, + peerId, + signal: controller.signal, + rateLimiter, + }); - // Make sure the test error-ed with expected error, otherwise it's hard to debug with responseChunks - if (expectedError) { - await expectRejectedWithLodestarError(resultPromise, expectedError); - } else { - await expect(resultPromise).to.not.rejectedWith(); - } + // Make sure the test error-ed with expected error, otherwise it's hard to debug with responseChunks + if (expectedError) { + await expectRejectedWithLodestarError(resultPromise, expectedError); + } else { + await expect(resultPromise).resolves.toBeUndefined(); + } - expectEqualByteChunks(stream.resultChunks, expectedResponseChunks, "Wrong response chunks"); - }); - } + expectEqualByteChunks(stream.resultChunks, expectedResponseChunks, "Wrong response chunks"); + }); }); diff --git a/packages/reqresp/test/unit/utils/protocolId.test.ts b/packages/reqresp/test/unit/utils/protocolId.test.ts index 7d16669c1421..04cd93222045 100644 --- a/packages/reqresp/test/unit/utils/protocolId.test.ts +++ b/packages/reqresp/test/unit/utils/protocolId.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {Encoding, ProtocolAttributes} from "../../../src/index.js"; import {formatProtocolID, parseProtocolID as reqrespParseProtocolID} from "../../../src/utils/index.js"; @@ -36,11 +36,11 @@ describe("ReqResp protocolID parse / render", () => { for (const {method, encoding, version, protocolId} of testCases) { it(`Should render ${protocolId}`, () => { - expect(formatProtocolID(protocolPrefix, method, version, encoding)).to.equal(protocolId); + expect(formatProtocolID(protocolPrefix, method, version, encoding)).toBe(protocolId); }); it(`Should parse ${protocolId}`, () => { - expect(parseProtocolId(protocolId)).to.deep.equal({protocolPrefix, method, version, encoding}); + expect(parseProtocolId(protocolId)).toEqual({protocolPrefix, method, version, encoding}); }); } }); diff --git a/packages/reqresp/test/utils/index.ts b/packages/reqresp/test/utils/index.ts index 8ad11bbd30b2..924218c73cd4 100644 --- a/packages/reqresp/test/utils/index.ts +++ b/packages/reqresp/test/utils/index.ts @@ -74,7 +74,7 @@ export function fromHexBuf(hex: string): Buffer { return Buffer.from(fromHex(hex)); } -export const ZERO_HASH = Buffer.alloc(32, 0); +export const ZERO_HASH = new Uint8Array(32); export const onlySuccessResp = (resp: ResponseChunk): resp is {status: RespStatus.SUCCESS; payload: ResponseIncoming} => resp.status === RespStatus.SUCCESS; diff --git a/packages/reqresp/vitest.config.ts b/packages/reqresp/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/reqresp/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/spec-test-util/package.json b/packages/spec-test-util/package.json index 5fc59cd76e12..c267e65133c0 100644 --- a/packages/spec-test-util/package.json +++ b/packages/spec-test-util/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/spec-test-util", - "version": "1.13.0", + "version": "1.14.0", "description": "Spec test suite generator from yaml test files", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -45,7 +45,7 @@ "blockchain" ], "dependencies": { - "@lodestar/utils": "^1.13.0", + "@lodestar/utils": "^1.14.0", "async-retry": "^1.3.3", "axios": "^1.3.4", "chai": "^4.3.7", diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index f0f2f150f673..2b49b94179ca 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -63,10 +63,10 @@ "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/persistent-ts": "^0.19.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/config": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", "bigint-buffer": "^1.1.5", "buffer-xor": "^2.0.2" }, diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index 8b63b0285098..78cccacf1d00 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -309,8 +309,10 @@ export class EpochCache { if (cachedPreviousShuffling == null && isActiveValidator(validator, previousEpoch)) { previousActiveIndices.push(i); } - if (cachedCurrentShuffling == null && isActiveValidator(validator, currentEpoch)) { - currentActiveIndices.push(i); + if (isActiveValidator(validator, currentEpoch)) { + if (cachedCurrentShuffling == null) { + currentActiveIndices.push(i); + } // We track totalActiveBalanceIncrements as ETH to fit total network balance in a JS number (53 bits) totalActiveBalanceIncrements += effectiveBalanceIncrements[i]; } diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index 140e3d04c155..b01ca0c409b2 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -159,9 +159,9 @@ export function createCachedBeaconState( * Create a CachedBeaconState given a cached seed state and state bytes * This guarantees that the returned state shares the same tree with the seed state * Check loadState() api for more details - * TODO: after EIP-6110 need to provide a pivotValidatorIndex to decide which comes to finalized validators cache, which comes to unfinalized cache + * // TODO: rename to loadUnfinalizedCachedBeaconState() due to EIP-6110 */ -export function loadUnfinalizedCachedBeaconState( +export function loadCachedBeaconState( cachedSeedState: T, stateBytes: Uint8Array, opts?: EpochCacheOpts diff --git a/packages/state-transition/src/epoch/index.ts b/packages/state-transition/src/epoch/index.ts index 05c8b55d0435..b55ebe291fb9 100644 --- a/packages/state-transition/src/epoch/index.ts +++ b/packages/state-transition/src/epoch/index.ts @@ -51,6 +51,22 @@ export {computeUnrealizedCheckpoints} from "./computeUnrealizedCheckpoints.js"; const maxValidatorsPerStateSlashing = SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE; const maxSafeValidators = Math.floor(Number.MAX_SAFE_INTEGER / MAX_EFFECTIVE_BALANCE); +/** + * Epoch transition steps tracked in metrics + */ +export enum EpochTransitionStep { + beforeProcessEpoch = "beforeProcessEpoch", + afterProcessEpoch = "afterProcessEpoch", + processJustificationAndFinalization = "processJustificationAndFinalization", + processInactivityUpdates = "processInactivityUpdates", + processRegistryUpdates = "processRegistryUpdates", + processSlashings = "processSlashings", + processRewardsAndPenalties = "processRewardsAndPenalties", + processEffectiveBalanceUpdates = "processEffectiveBalanceUpdates", + processParticipationFlagUpdates = "processParticipationFlagUpdates", + processSyncCommitteeUpdates = "processSyncCommitteeUpdates", +} + export function processEpoch( fork: ForkSeq, state: CachedBeaconStateAllForks, @@ -67,14 +83,14 @@ export function processEpoch( { const timer = metrics?.epochTransitionStepTime.startTimer({ - step: "processJustificationAndFinalization", + step: EpochTransitionStep.processJustificationAndFinalization, }); processJustificationAndFinalization(state, cache); timer?.(); } if (fork >= ForkSeq.altair) { - const timer = metrics?.epochTransitionStepTime.startTimer({step: "processInactivityUpdates"}); + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processInactivityUpdates}); processInactivityUpdates(state as CachedBeaconStateAltair, cache); timer?.(); } @@ -83,7 +99,7 @@ export function processEpoch( // after processSlashings() to update balances only once // processRewardsAndPenalties(state, cache); { - const timer = metrics?.epochTransitionStepTime.startTimer({step: "processRegistryUpdates"}); + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processRegistryUpdates}); processRegistryUpdates(state, cache); timer?.(); } @@ -91,13 +107,13 @@ export function processEpoch( // accumulate slashing penalties and only update balances once in processRewardsAndPenalties() let slashingPenalties: number[]; { - const timer = metrics?.epochTransitionStepTime.startTimer({step: "processSlashings"}); + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processSlashings}); slashingPenalties = processSlashings(state, cache, false); timer?.(); } { - const timer = metrics?.epochTransitionStepTime.startTimer({step: "processRewardsAndPenalties"}); + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processRewardsAndPenalties}); processRewardsAndPenalties(state, cache, slashingPenalties); timer?.(); } @@ -106,7 +122,7 @@ export function processEpoch( { const timer = metrics?.epochTransitionStepTime.startTimer({ - step: "processEffectiveBalanceUpdates", + step: EpochTransitionStep.processEffectiveBalanceUpdates, }); processEffectiveBalanceUpdates(state, cache); timer?.(); @@ -126,7 +142,7 @@ export function processEpoch( } else { { const timer = metrics?.epochTransitionStepTime.startTimer({ - step: "processParticipationFlagUpdates", + step: EpochTransitionStep.processParticipationFlagUpdates, }); processParticipationFlagUpdates(state as CachedBeaconStateAltair); timer?.(); @@ -134,7 +150,7 @@ export function processEpoch( { const timer = metrics?.epochTransitionStepTime.startTimer({ - step: "processSyncCommitteeUpdates", + step: EpochTransitionStep.processSyncCommitteeUpdates, }); processSyncCommitteeUpdates(state as CachedBeaconStateAltair); timer?.(); diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index e72b6fa0581c..8786c0f6e358 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -2,6 +2,7 @@ export * from "./stateTransition.js"; export * from "./constants/index.js"; export * from "./util/index.js"; export * from "./signatureSets/index.js"; +export type {EpochTransitionStep} from "./epoch/index.js"; export type {BeaconStateTransitionMetrics} from "./metrics.js"; export type { @@ -25,7 +26,7 @@ export type { // Main state caches export { createCachedBeaconState, - loadUnfinalizedCachedBeaconState, + loadCachedBeaconState, type BeaconStateCache, isCachedBeaconState, isStateBalancesNodesPopulated, diff --git a/packages/state-transition/src/metrics.ts b/packages/state-transition/src/metrics.ts index 681bb2b910cf..62062bbfc539 100644 --- a/packages/state-transition/src/metrics.ts +++ b/packages/state-transition/src/metrics.ts @@ -1,18 +1,21 @@ import {Epoch} from "@lodestar/types"; +import {Gauge, Histogram} from "@lodestar/utils"; import {CachedBeaconStateAllForks} from "./types.js"; +import {StateCloneSource, StateHashTreeRootSource} from "./stateTransition.js"; import {AttesterStatus} from "./util/attesterStatus.js"; +import {EpochTransitionStep} from "./epoch/index.js"; export type BeaconStateTransitionMetrics = { epochTransitionTime: Histogram; epochTransitionCommitTime: Histogram; - epochTransitionStepTime: Histogram<"step">; + epochTransitionStepTime: Histogram<{step: EpochTransitionStep}>; processBlockTime: Histogram; processBlockCommitTime: Histogram; - stateHashTreeRootTime: Histogram; - preStateBalancesNodesPopulatedMiss: Gauge<"source">; - preStateBalancesNodesPopulatedHit: Gauge<"source">; - preStateValidatorsNodesPopulatedMiss: Gauge<"source">; - preStateValidatorsNodesPopulatedHit: Gauge<"source">; + stateHashTreeRootTime: Histogram<{source: StateHashTreeRootSource}>; + preStateBalancesNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; + preStateBalancesNodesPopulatedHit: Gauge<{source: StateCloneSource}>; + preStateValidatorsNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; + preStateValidatorsNodesPopulatedHit: Gauge<{source: StateCloneSource}>; preStateClonedCount: Histogram; postStateBalancesNodesPopulatedMiss: Gauge; postStateBalancesNodesPopulatedHit: Gauge; @@ -21,26 +24,10 @@ export type BeaconStateTransitionMetrics = { registerValidatorStatuses: (currentEpoch: Epoch, statuses: AttesterStatus[], balances?: number[]) => void; }; -type LabelValues = Partial>; - -interface Histogram { - startTimer(labels?: LabelValues): (labels?: LabelValues) => number; - - observe(value: number): void; - observe(labels: LabelValues, values: number): void; - observe(arg1: LabelValues | number, arg2?: number): void; -} - -interface Gauge { - inc(value?: number): void; - inc(labels: LabelValues, value?: number): void; - inc(arg1?: LabelValues | number, arg2?: number): void; -} - export function onStateCloneMetrics( state: CachedBeaconStateAllForks, metrics: BeaconStateTransitionMetrics, - source: "stateTransition" | "processSlots" + source: StateCloneSource ): void { metrics.preStateClonedCount.observe(state.clonedCount); diff --git a/packages/state-transition/src/signatureSets/proposer.ts b/packages/state-transition/src/signatureSets/proposer.ts index a00bcacc7c99..135ac7ed5c7a 100644 --- a/packages/state-transition/src/signatureSets/proposer.ts +++ b/packages/state-transition/src/signatureSets/proposer.ts @@ -1,5 +1,5 @@ -import {DOMAIN_BEACON_PROPOSER, DOMAIN_BLOB_SIDECAR} from "@lodestar/params"; -import {allForks, isBlindedBeaconBlock, isBlindedBlobSidecar, ssz} from "@lodestar/types"; +import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; +import {allForks, isBlindedBeaconBlock, phase0, ssz} from "@lodestar/types"; import {computeSigningRoot} from "../util/index.js"; import {ISignatureSet, SignatureSetType, verifySignatureSet} from "../util/signatureSets.js"; import {CachedBeaconStateAllForks} from "../types.js"; @@ -17,7 +17,7 @@ export function getBlockProposerSignatureSet( signedBlock: allForks.FullOrBlindedSignedBeaconBlock ): ISignatureSet { const {config, epochCtx} = state; - const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); + const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); const blockType = isBlindedBeaconBlock(signedBlock.message) ? config.getBlindedForkTypes(signedBlock.message.slot).BeaconBlock @@ -31,19 +31,17 @@ export function getBlockProposerSignatureSet( }; } -export function getBlobProposerSignatureSet( +export function getBlockHeaderProposerSignatureSet( state: CachedBeaconStateAllForks, - signedBlob: allForks.FullOrBlindedSignedBlobSidecar + signedBlockHeader: phase0.SignedBeaconBlockHeader ): ISignatureSet { const {config, epochCtx} = state; - const domain = config.getDomain(state.slot, DOMAIN_BLOB_SIDECAR, signedBlob.message.slot); - - const blockType = isBlindedBlobSidecar(signedBlob.message) ? ssz.deneb.BlindedBlobSidecar : ssz.deneb.BlobSidecar; + const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlockHeader.message.slot); return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[signedBlob.message.proposerIndex], - signingRoot: computeSigningRoot(blockType, signedBlob.message, domain), - signature: signedBlob.signature, + pubkey: epochCtx.index2pubkey[signedBlockHeader.message.proposerIndex], + signingRoot: computeSigningRoot(ssz.phase0.BeaconBlockHeader, signedBlockHeader.message, domain), + signature: signedBlockHeader.signature, }; } diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index cdb8878c87fa..b3f3b41eb865 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -20,7 +20,7 @@ import { upgradeStateToDeneb, } from "./slot/index.js"; import {processBlock} from "./block/index.js"; -import {processEpoch} from "./epoch/index.js"; +import {EpochTransitionStep, processEpoch} from "./epoch/index.js"; import {BlockExternalData, DataAvailableStatus, ExecutionPayloadStatus} from "./block/externalData.js"; import {ProcessBlockOpts} from "./block/types.js"; @@ -36,6 +36,24 @@ export type StateTransitionOpts = BlockExternalData & dontTransferCache?: boolean; }; +/** + * `state.clone()` invocation source tracked in metrics + */ +export enum StateCloneSource { + stateTransition = "stateTransition", + processSlots = "processSlots", +} + +/** + * `state.hashTreeRoot()` invocation source tracked in metrics + */ +export enum StateHashTreeRootSource { + stateTransition = "state_transition", + blockTransition = "block_transition", + prepareNextSlot = "prepare_next_slot", + computeNewStateRoot = "compute_new_state_root", +} + /** * Implementation Note: follows the optimizations in protolambda's eth2fastspec (https://github.com/protolambda/eth2fastspec) */ @@ -58,7 +76,7 @@ export function stateTransition( let postState = state.clone(options.dontTransferCache); if (metrics) { - onStateCloneMetrics(postState, metrics, "stateTransition"); + onStateCloneMetrics(postState, metrics, StateCloneSource.stateTransition); } // State is already a ViewDU, which won't commit changes. Equivalent to .setStateCachesAsTransient() @@ -96,7 +114,9 @@ export function stateTransition( // Verify state root if (verifyStateRoot) { - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer(); + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.stateTransition, + }); const stateRoot = postState.hashTreeRoot(); hashTreeRootTimer?.(); @@ -127,7 +147,7 @@ export function processSlots( let postState = state.clone(epochTransitionCacheOpts?.dontTransferCache); if (metrics) { - onStateCloneMetrics(postState, metrics, "processSlots"); + onStateCloneMetrics(postState, metrics, StateCloneSource.processSlots); } // State is already a ViewDU, which won't commit changes. Equivalent to .setStateCachesAsTransient() @@ -167,7 +187,7 @@ function processSlotsWithTransientCache( let epochTransitionCache: EpochTransitionCache; { - const timer = metrics?.epochTransitionStepTime.startTimer({step: "beforeProcessEpoch"}); + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.beforeProcessEpoch}); epochTransitionCache = beforeProcessEpoch(postState, epochTransitionCacheOpts); timer?.(); } @@ -180,7 +200,7 @@ function processSlotsWithTransientCache( postState.slot++; { - const timer = metrics?.epochTransitionStepTime.startTimer({step: "afterProcessEpoch"}); + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.afterProcessEpoch}); postState.epochCtx.afterProcessEpoch(postState, epochTransitionCache); timer?.(); } diff --git a/packages/state-transition/src/util/blindedBlock.ts b/packages/state-transition/src/util/blindedBlock.ts index 8c271e7fec81..5b6cf42d3cef 100644 --- a/packages/state-transition/src/util/blindedBlock.ts +++ b/packages/state-transition/src/util/blindedBlock.ts @@ -1,24 +1,9 @@ import {ChainForkConfig} from "@lodestar/config"; import {ForkSeq} from "@lodestar/params"; -import { - allForks, - phase0, - Root, - deneb, - ssz, - isBlindedBeaconBlock, - isBlindedBlobSidecar, - isSignedBlindedBlockContents, - isExecutionPayloadAndBlobsBundle, -} from "@lodestar/types"; +import {allForks, phase0, Root, deneb, isBlindedBeaconBlock, isExecutionPayloadAndBlobsBundle} from "@lodestar/types"; import {executionPayloadToPayloadHeader} from "./execution.js"; -type ParsedSignedBlindedBlockOrContents = { - signedBlindedBlock: allForks.SignedBlindedBeaconBlock; - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars | null; -}; - export function blindedOrFullBlockHashTreeRoot( config: ChainForkConfig, blindedOrFull: allForks.FullOrBlindedBeaconBlock @@ -30,17 +15,6 @@ export function blindedOrFullBlockHashTreeRoot( config.getForkTypes(blindedOrFull.slot).BeaconBlock.hashTreeRoot(blindedOrFull); } -export function blindedOrFullBlobSidecarHashTreeRoot( - config: ChainForkConfig, - blindedOrFull: allForks.FullOrBlindedBlobSidecar -): Root { - return isBlindedBlobSidecar(blindedOrFull) - ? // Blinded - config.getBlobsForkTypes(blindedOrFull.slot).BlindedBlobSidecar.hashTreeRoot(blindedOrFull) - : // Full - config.getBlobsForkTypes(blindedOrFull.slot).BlobSidecar.hashTreeRoot(blindedOrFull); -} - export function blindedOrFullBlockToHeader( config: ChainForkConfig, blindedOrFull: allForks.FullOrBlindedBeaconBlock @@ -70,13 +44,6 @@ export function beaconBlockToBlinded( return blindedBlock; } -export function blobSidecarsToBlinded(blobSidecars: deneb.BlobSidecars): deneb.BlindedBlobSidecars { - return blobSidecars.map((blobSidecar) => { - const blobRoot = ssz.deneb.Blob.hashTreeRoot(blobSidecar.blob); - return {...blobSidecar, blobRoot} as deneb.BlindedBlobSidecar; - }); -} - export function signedBlindedBlockToFull( signedBlindedBlock: allForks.SignedBlindedBeaconBlock, executionPayload: allForks.ExecutionPayload | null @@ -100,33 +67,6 @@ export function signedBlindedBlockToFull( return signedBlock; } -export function signedBlindedBlobSidecarsToFull( - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars, - blobs: deneb.Blobs -): deneb.SignedBlobSidecars { - const signedBlobSidecars = signedBlindedBlobSidecars.map((signedBlindedBlobSidecar, index) => { - const signedBlobSidecar = { - ...signedBlindedBlobSidecar, - message: {...signedBlindedBlobSidecar.message, blob: blobs[index]}, - }; - delete (signedBlobSidecar.message as {blobRoot?: deneb.BlindedBlob}).blobRoot; - return signedBlobSidecar; - }); - return signedBlobSidecars; -} - -export function parseSignedBlindedBlockOrContents( - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents -): ParsedSignedBlindedBlockOrContents { - if (isSignedBlindedBlockContents(signedBlindedBlockOrContents)) { - const signedBlindedBlock = signedBlindedBlockOrContents.signedBlindedBlock; - const signedBlindedBlobSidecars = signedBlindedBlockOrContents.signedBlindedBlobSidecars; - return {signedBlindedBlock, signedBlindedBlobSidecars}; - } else { - return {signedBlindedBlock: signedBlindedBlockOrContents, signedBlindedBlobSidecars: null}; - } -} - export function parseExecutionPayloadAndBlobsBundle( data: allForks.ExecutionPayload | allForks.ExecutionPayloadAndBlobsBundle ): {executionPayload: allForks.ExecutionPayload; blobsBundle: deneb.BlobsBundle | null} { @@ -141,27 +81,23 @@ export function parseExecutionPayloadAndBlobsBundle( } export function reconstructFullBlockOrContents( - {signedBlindedBlock, signedBlindedBlobSidecars}: ParsedSignedBlindedBlockOrContents, - {executionPayload, blobs}: {executionPayload: allForks.ExecutionPayload | null; blobs: deneb.Blobs | null} + signedBlindedBlock: allForks.SignedBlindedBeaconBlock, + { + executionPayload, + contents, + }: { + executionPayload: allForks.ExecutionPayload | null; + contents: deneb.Contents | null; + } ): allForks.SignedBeaconBlockOrContents { const signedBlock = signedBlindedBlockToFull(signedBlindedBlock, executionPayload); - if (signedBlindedBlobSidecars !== null) { + if (contents !== null) { if (executionPayload === null) { throw Error("Missing locally produced executionPayload for deneb+ publishBlindedBlock"); } - if (blobs === null) { - throw Error("Missing blobs from the local execution cache"); - } - if (blobs.length !== signedBlindedBlobSidecars.length) { - throw Error( - `Length mismatch signedBlindedBlobSidecars=${signedBlindedBlobSidecars.length} blobs=${blobs.length}` - ); - } - const signedBlobSidecars = signedBlindedBlobSidecarsToFull(signedBlindedBlobSidecars, blobs); - - return {signedBlock, signedBlobSidecars} as allForks.SignedBeaconBlockOrContents; + return {signedBlock, ...contents} as allForks.SignedBeaconBlockOrContents; } else { return signedBlock as allForks.SignedBeaconBlockOrContents; } diff --git a/packages/state-transition/src/util/blobs.ts b/packages/state-transition/src/util/blobs.ts deleted file mode 100644 index 8b6ea84362c4..000000000000 --- a/packages/state-transition/src/util/blobs.ts +++ /dev/null @@ -1,12 +0,0 @@ -import SHA256 from "@chainsafe/as-sha256"; -import {VERSIONED_HASH_VERSION_KZG} from "@lodestar/params"; -import {deneb} from "@lodestar/types"; - -type VersionHash = Uint8Array; - -export function kzgCommitmentToVersionedHash(kzgCommitment: deneb.KZGCommitment): VersionHash { - const hash = SHA256.digest(kzgCommitment); - // Equivalent to `VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]` - hash[0] = VERSIONED_HASH_VERSION_KZG; - return hash; -} diff --git a/packages/state-transition/src/util/blockRoot.ts b/packages/state-transition/src/util/blockRoot.ts index 7aa5de52cdfe..1e1df38ef4fe 100644 --- a/packages/state-transition/src/util/blockRoot.ts +++ b/packages/state-transition/src/util/blockRoot.ts @@ -54,3 +54,15 @@ export function blockToHeader(config: ChainForkConfig, block: allForks.BeaconBlo bodyRoot: config.getForkTypes(block.slot).BeaconBlockBody.hashTreeRoot(block.body), }; } + +export function signedBlockToSignedHeader( + config: ChainForkConfig, + signedBlock: allForks.SignedBeaconBlock +): phase0.SignedBeaconBlockHeader { + const message = blockToHeader(config, signedBlock.message); + const signature = signedBlock.signature; + return { + message, + signature, + }; +} diff --git a/packages/state-transition/src/util/index.ts b/packages/state-transition/src/util/index.ts index bbc9bf8a8654..3f2e91da9a77 100644 --- a/packages/state-transition/src/util/index.ts +++ b/packages/state-transition/src/util/index.ts @@ -4,7 +4,6 @@ export * from "./attestation.js"; export * from "./attesterStatus.js"; export * from "./balance.js"; export * from "./blindedBlock.js"; -export * from "./blobs.js"; export * from "./capella.js"; export * from "./execution.js"; export * from "./blockRoot.js"; diff --git a/packages/state-transition/src/util/loadState/index.ts b/packages/state-transition/src/util/loadState/index.ts new file mode 100644 index 000000000000..706de3c11540 --- /dev/null +++ b/packages/state-transition/src/util/loadState/index.ts @@ -0,0 +1 @@ +export {loadState} from "./loadState.js"; diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index f3089f39d913..2891cd3e6216 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -1,13 +1,14 @@ import {describe, it, expect} from "vitest"; -import {ssz} from "@lodestar/types"; +import {Epoch, ssz, RootHex} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; -import {config} from "@lodestar/config/default"; +import {config as defaultConfig} from "@lodestar/config/default"; import {createBeaconConfig} from "@lodestar/config"; import {createCachedBeaconStateTest} from "../utils/state.js"; import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; -import {createCachedBeaconState, loadUnfinalizedCachedBeaconState} from "../../src/cache/stateCache.js"; +import {createCachedBeaconState, loadCachedBeaconState} from "../../src/cache/stateCache.js"; import {interopPubkeysCached} from "../utils/interop.js"; import {modifyStateSameValidator, newStateWithValidators} from "../utils/capella.js"; +import {EpochShuffling, getShufflingDecisionBlock} from "../../src/util/epochShuffling.js"; describe("CachedBeaconState", () => { it("Clone and mutate", () => { @@ -57,10 +58,11 @@ describe("CachedBeaconState", () => { const pubkeys = interopPubkeysCached(2 * numValidator); const stateView = newStateWithValidators(numValidator); + const config = createBeaconConfig(defaultConfig, stateView.genesisValidatorsRoot); const seedState = createCachedBeaconState( stateView, { - config: createBeaconConfig(config, stateView.genesisValidatorsRoot), + config, pubkey2index: new PubkeyIndexMap(), index2pubkey: [], }, @@ -127,12 +129,49 @@ describe("CachedBeaconState", () => { // confirm loadState() result const stateBytes = state.serialize(); - const newCachedState = loadUnfinalizedCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); const newStateBytes = newCachedState.serialize(); expect(newStateBytes).toEqual(stateBytes); expect(newCachedState.hashTreeRoot()).toEqual(state.hashTreeRoot()); + const shufflingGetter = (shufflingEpoch: Epoch, dependentRoot: RootHex): EpochShuffling | null => { + if ( + shufflingEpoch === seedState.epochCtx.epoch - 1 && + dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) + ) { + return seedState.epochCtx.previousShuffling; + } + + if ( + shufflingEpoch === seedState.epochCtx.epoch && + dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) + ) { + return seedState.epochCtx.currentShuffling; + } + + if ( + shufflingEpoch === seedState.epochCtx.epoch + 1 && + dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) + ) { + return seedState.epochCtx.nextShuffling; + } + + return null; + }; + const cachedState = createCachedBeaconState( + state, + { + config, + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }, + {skipSyncCommitteeCache: true, shufflingGetter} + ); + // validatorCountDelta < 0 is unrealistic and shuffling computation results in a different result + if (validatorCountDelta >= 0) { + expect(newCachedState.epochCtx).toEqual(cachedState.epochCtx); + } - // confirm loadUnfinalizedCachedBeaconState() result + // confirm loadCachedBeaconState() result for (let i = 0; i < newCachedState.validators.length; i++) { expect(newCachedState.epochCtx.pubkey2index.get(newCachedState.validators.get(i).pubkey)).toBe(i); expect(newCachedState.epochCtx.index2pubkey[i].toBytes()).toEqual(pubkeys[i]); diff --git a/packages/state-transition/test/utils/capella.ts b/packages/state-transition/test/utils/capella.ts index e2cdc47b7e1d..7ef9248a5675 100644 --- a/packages/state-transition/test/utils/capella.ts +++ b/packages/state-transition/test/utils/capella.ts @@ -1,7 +1,12 @@ import crypto from "node:crypto"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; -import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX, SLOTS_PER_EPOCH} from "@lodestar/params"; +import { + BLS_WITHDRAWAL_PREFIX, + ETH1_ADDRESS_WITHDRAWAL_PREFIX, + SLOTS_PER_EPOCH, + SLOTS_PER_HISTORICAL_ROOT, +} from "@lodestar/params"; import {BeaconStateCapella, CachedBeaconStateCapella} from "../../src/index.js"; import {createCachedBeaconStateTest} from "./state.js"; import {mulberry32} from "./rand.js"; @@ -67,10 +72,17 @@ export function newStateWithValidators(numValidator: number): BeaconStateCapella const capellaStateType = ssz.capella.BeaconState; const stateView = capellaStateType.defaultViewDU(); stateView.slot = config.CAPELLA_FORK_EPOCH * SLOTS_PER_EPOCH + 100; + for (let i = 0; i < SLOTS_PER_HISTORICAL_ROOT; i++) { + stateView.blockRoots.set(i, crypto.randomBytes(32)); + } for (let i = 0; i < numValidator; i++) { const validator = ssz.phase0.Validator.defaultViewDU(); validator.pubkey = pubkeys[i]; + // make all validators active + validator.activationEpoch = 0; + validator.exitEpoch = Infinity; + validator.effectiveBalance = 32e9; stateView.validators.push(validator); stateView.balances.push(32); stateView.inactivityScores.push(0); @@ -85,8 +97,9 @@ export function newStateWithValidators(numValidator: number): BeaconStateCapella * Modify a state without changing number of validators */ export function modifyStateSameValidator(seedState: BeaconStateCapella): BeaconStateCapella { + const slotDiff = 10; const state = seedState.clone(); - state.slot = seedState.slot + 10; + state.slot = seedState.slot + slotDiff; state.latestBlockHeader = ssz.phase0.BeaconBlockHeader.toViewDU({ slot: state.slot, proposerIndex: 0, @@ -94,6 +107,9 @@ export function modifyStateSameValidator(seedState: BeaconStateCapella): BeaconS stateRoot: state.hashTreeRoot(), bodyRoot: ssz.phase0.BeaconBlockBody.hashTreeRoot(ssz.phase0.BeaconBlockBody.defaultValue()), }); + for (let i = 1; i <= slotDiff; i++) { + state.blockRoots.set((seedState.slot + i) % SLOTS_PER_HISTORICAL_ROOT, crypto.randomBytes(32)); + } state.blockRoots.set(0, crypto.randomBytes(32)); state.stateRoots.set(0, crypto.randomBytes(32)); state.historicalRoots.push(crypto.randomBytes(32)); diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json index 586be26fccab..61914ba91676 100644 --- a/packages/test-utils/package.json +++ b/packages/test-utils/package.json @@ -1,7 +1,7 @@ { "name": "@lodestar/test-utils", "private": true, - "version": "1.13.0", + "version": "1.14.0", "description": "Test utilities reused across other packages", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -62,9 +62,9 @@ ], "dependencies": { "@chainsafe/bls": "7.1.1", - "@chainsafe/bls-keystore": "^2.0.0", - "@lodestar/params": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@chainsafe/bls-keystore": "^3.0.0", + "@lodestar/params": "^1.14.0", + "@lodestar/utils": "^1.14.0", "axios": "^1.3.4", "chai": "^4.3.7", "mocha": "^10.2.0", diff --git a/packages/test-utils/src/mocha.ts b/packages/test-utils/src/mocha.ts index edf8053a60df..7b8c10ad5342 100644 --- a/packages/test-utils/src/mocha.ts +++ b/packages/test-utils/src/mocha.ts @@ -82,22 +82,6 @@ function wrapLogWriter(...writers: [writer: object, ...keys: string[]][]): { }; } -export function stubLoggerForProcessStd( - logger: T -): T & {getLogs: () => string[]; restoreStubs: () => void} { - const {flush: flushStdout, restore: restoreStdout} = wrapLogWriter( - [process.stdout, "write"], - [process.stderr, "write"] - ); - - return Object.assign(logger, { - getLogs: () => flushStdout(), - restoreStubs: () => { - restoreStdout(); - }, - }); -} - export function stubLoggerForConsole( logger: T ): T & {getLogs: () => string[]; restoreStubs: () => void} { diff --git a/packages/types/karma.config.cjs b/packages/types/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/types/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/types/package.json b/packages/types/package.json index 5c7a6599d890..9d8859ecaf05 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": { ".": { @@ -61,14 +61,19 @@ "check-types": "tsc", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", - "test:unit": "mocha 'test/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", + "test:constants:minimal": "LODESTAR_PRESET=minimal vitest --run --dir test/constants/ --coverage", + "test:constants:mainnet": "LODESTAR_PRESET=mainnet vitest --run --dir test/constants/ --coverage", + "test:unit": "wrapper() { yarn test:constants:minimal $@ && yarn test:constants:mainnet $@ && vitest --run --dir test/unit/ --coverage $@; }; wrapper", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", "dependencies": { "@chainsafe/ssz": "^0.14.0", - "@lodestar/params": "^1.13.0" + "@lodestar/params": "^1.14.0" }, "keywords": [ "ethereum", diff --git a/packages/types/src/allForks/sszTypes.ts b/packages/types/src/allForks/sszTypes.ts index 463e5c57bd0d..7174bc52e89c 100644 --- a/packages/types/src/allForks/sszTypes.ts +++ b/packages/types/src/allForks/sszTypes.ts @@ -155,7 +155,6 @@ export const allForksLightClient = { export const allForksBlobs = { deneb: { BlobSidecar: deneb.BlobSidecar, - BlindedBlobSidecar: deneb.BlindedBlobSidecar, ExecutionPayloadAndBlobsBundle: deneb.ExecutionPayloadAndBlobsBundle, }, }; diff --git a/packages/types/src/allForks/types.ts b/packages/types/src/allForks/types.ts index 01c597b8a245..59768a5a3308 100644 --- a/packages/types/src/allForks/types.ts +++ b/packages/types/src/allForks/types.ts @@ -68,31 +68,17 @@ export type FullOrBlindedBeaconBlockBody = BeaconBlockBody | BlindedBeaconBlockB export type FullOrBlindedBeaconBlock = BeaconBlock | BlindedBeaconBlock; export type FullOrBlindedSignedBeaconBlock = SignedBeaconBlock | SignedBlindedBeaconBlock; -export type FullOrBlindedBlobSidecar = deneb.BlobSidecar | deneb.BlindedBlobSidecar; -export type FullOrBlindedSignedBlobSidecar = deneb.SignedBlobSidecar | deneb.SignedBlindedBlobSidecar; - -export type FullOrBlindedBlobSidecars = deneb.BlobSidecars | deneb.BlindedBlobSidecars; -export type BlockContents = {block: BeaconBlock; blobSidecars: deneb.BlobSidecars}; +export type BlockContents = {block: BeaconBlock; kzgProofs: deneb.KZGProofs; blobs: deneb.Blobs}; export type SignedBlockContents = { signedBlock: SignedBeaconBlock; - signedBlobSidecars: deneb.SignedBlobSidecars; + kzgProofs: deneb.KZGProofs; + blobs: deneb.Blobs; }; -export type BlindedBlockContents = { - blindedBlock: BlindedBeaconBlock; - blindedBlobSidecars: deneb.BlindedBlobSidecars; -}; -export type SignedBlindedBlockContents = { - signedBlindedBlock: SignedBlindedBeaconBlock; - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars; -}; - -export type FullOrBlindedBlockContents = BlockContents | BlindedBlockContents; -export type FullOrBlindedBeaconBlockOrContents = FullOrBlindedBeaconBlock | FullOrBlindedBlockContents; export type BeaconBlockOrContents = BeaconBlock | BlockContents; -export type BlindedBeaconBlockOrContents = BlindedBeaconBlock | BlindedBlockContents; export type SignedBeaconBlockOrContents = SignedBeaconBlock | SignedBlockContents; -export type SignedBlindedBeaconBlockOrContents = SignedBlindedBeaconBlock | SignedBlindedBlockContents; + +export type FullOrBlindedBeaconBlockOrContents = BeaconBlockOrContents | BlindedBeaconBlock; export type BuilderBid = bellatrix.BuilderBid | capella.BuilderBid | deneb.BuilderBid; export type SignedBuilderBid = bellatrix.SignedBuilderBid | capella.SignedBuilderBid | deneb.SignedBuilderBid; @@ -308,6 +294,5 @@ export type AllForksLightClientSSZTypes = { export type AllForksBlobsSSZTypes = { BlobSidecar: AllForksTypeOf; - BlindedBlobSidecar: AllForksTypeOf; ExecutionPayloadAndBlobsBundle: AllForksTypeOf; }; diff --git a/packages/types/src/deneb/sszTypes.ts b/packages/types/src/deneb/sszTypes.ts index 96509d1d898b..b39e5f6281e1 100644 --- a/packages/types/src/deneb/sszTypes.ts +++ b/packages/types/src/deneb/sszTypes.ts @@ -8,6 +8,7 @@ import { BLOCK_BODY_EXECUTION_PAYLOAD_DEPTH as EXECUTION_PAYLOAD_DEPTH, EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, } from "@lodestar/params"; import {ssz as primitiveSsz} from "../primitive/index.js"; import {ssz as phase0Ssz} from "../phase0/index.js"; @@ -15,20 +16,8 @@ import {ssz as altairSsz} from "../altair/index.js"; import {ssz as bellatrixSsz} from "../bellatrix/index.js"; import {ssz as capellaSsz} from "../capella/index.js"; -const { - UintNum64, - Slot, - Root, - BLSSignature, - UintBn64, - UintBn256, - Bytes32, - Bytes48, - Bytes96, - BLSPubkey, - BlobIndex, - ValidatorIndex, -} = primitiveSsz; +const {UintNum64, Slot, Root, BLSSignature, UintBn64, UintBn256, Bytes32, Bytes48, Bytes96, BLSPubkey, BlobIndex} = + primitiveSsz; // Polynomial commitments // https://github.com/ethereum/consensus-specs/blob/dev/specs/eip4844/polynomial-commitments.md @@ -124,31 +113,22 @@ export const SignedBeaconBlock = new ContainerType( {typeName: "SignedBeaconBlock", jsonCase: "eth2"} ); +export const KzgCommitmentInclusionProof = new VectorCompositeType(Bytes32, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH); + export const BlobSidecar = new ContainerType( { - blockRoot: Root, index: BlobIndex, - slot: Slot, - blockParentRoot: Root, - proposerIndex: ValidatorIndex, blob: Blob, kzgCommitment: KZGCommitment, kzgProof: KZGProof, + signedBlockHeader: phase0Ssz.SignedBeaconBlockHeader, + kzgCommitmentInclusionProof: KzgCommitmentInclusionProof, }, {typeName: "BlobSidecar", jsonCase: "eth2"} ); export const BlobSidecars = new ListCompositeType(BlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); -export const SignedBlobSidecar = new ContainerType( - { - message: BlobSidecar, - signature: BLSSignature, - }, - {typeName: "SignedBlobSidecar", jsonCase: "eth2"} -); -export const SignedBlobSidecars = new ListCompositeType(SignedBlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); - export const BlobsBundle = new ContainerType( { commitments: BlobKzgCommitments, @@ -158,35 +138,6 @@ export const BlobsBundle = new ContainerType( {typeName: "BlobsBundle", jsonCase: "eth2"} ); -export const BlindedBlobSidecar = new ContainerType( - { - blockRoot: Root, - index: BlobIndex, - slot: Slot, - blockParentRoot: Root, - proposerIndex: ValidatorIndex, - blobRoot: BlindedBlob, - kzgCommitment: KZGCommitment, - kzgProof: KZGProof, - }, - {typeName: "BlindedBlobSidecar", jsonCase: "eth2"} -); - -export const BlindedBlobSidecars = new ListCompositeType(BlindedBlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); - -export const SignedBlindedBlobSidecar = new ContainerType( - { - message: BlindedBlobSidecar, - signature: BLSSignature, - }, - {typeName: "SignedBlindedBlobSidecar", jsonCase: "eth2"} -); - -export const SignedBlindedBlobSidecars = new ListCompositeType( - SignedBlindedBlobSidecar, - MAX_BLOB_COMMITMENTS_PER_BLOCK -); - export const BlindedBeaconBlockBody = new ContainerType( { ...altairSsz.BeaconBlockBody.fields, @@ -213,19 +164,10 @@ export const SignedBlindedBeaconBlock = new ContainerType( {typeName: "SignedBlindedBeaconBlock", jsonCase: "eth2"} ); -export const BlindedBlobsBundle = new ContainerType( - { - commitments: BlobKzgCommitments, - proofs: KZGProofs, - blobRoots: BlindedBlobs, - }, - {typeName: "BlindedBlobsBundle", jsonCase: "eth2"} -); - export const BuilderBid = new ContainerType( { header: ExecutionPayloadHeader, - blindedBlobsBundle: BlindedBlobsBundle, + blobKzgCommitments: BlobKzgCommitments, value: UintBn256, pubkey: BLSPubkey, }, diff --git a/packages/types/src/deneb/types.ts b/packages/types/src/deneb/types.ts index 1d6eb5fca5aa..0921ae2428e7 100644 --- a/packages/types/src/deneb/types.ts +++ b/packages/types/src/deneb/types.ts @@ -1,4 +1,5 @@ import {ValueOf} from "@chainsafe/ssz"; +import {BlockContents} from "../allForks/types.js"; import * as ssz from "./sszTypes.js"; export type KZGProof = ValueOf; @@ -6,19 +7,12 @@ export type KZGCommitment = ValueOf; export type Blob = ValueOf; export type Blobs = ValueOf; -export type BlindedBlob = ValueOf; -export type BlindedBlobs = ValueOf; export type BlobSidecar = ValueOf; export type BlobSidecars = ValueOf; -export type BlindedBlobSidecar = ValueOf; -export type BlindedBlobSidecars = ValueOf; -export type SignedBlobSidecar = ValueOf; -export type SignedBlobSidecars = ValueOf; -export type SignedBlindedBlobSidecar = ValueOf; -export type SignedBlindedBlobSidecars = ValueOf; export type ExecutionPayloadAndBlobsBundle = ValueOf; export type BlobsBundle = ValueOf; +export type KzgCommitmentInclusionProof = ValueOf; export type BlobKzgCommitments = ValueOf; export type KZGProofs = ValueOf; export type BLSFieldElement = ValueOf; @@ -42,7 +36,6 @@ export type SignedBlindedBeaconBlock = ValueOf; export type BuilderBid = ValueOf; export type SignedBuilderBid = ValueOf; export type SSEPayloadAttributes = ValueOf; @@ -53,3 +46,6 @@ export type LightClientUpdate = ValueOf; export type LightClientFinalityUpdate = ValueOf; export type LightClientOptimisticUpdate = ValueOf; export type LightClientStore = ValueOf; + +export type ProducedBlobSidecars = Omit; +export type Contents = Omit; diff --git a/packages/types/src/utils/typeguards.ts b/packages/types/src/utils/typeguards.ts index 0b9bee97d17a..781738c3dbad 100644 --- a/packages/types/src/utils/typeguards.ts +++ b/packages/types/src/utils/typeguards.ts @@ -5,21 +5,15 @@ import { FullOrBlindedBeaconBlockBody, FullOrBlindedExecutionPayload, ExecutionPayloadHeader, - FullOrBlindedBlobSidecar, - FullOrBlindedSignedBlobSidecar, BlindedBeaconBlockBody, BlindedBeaconBlock, BlockContents, - SignedBlindedBlockContents, SignedBlindedBeaconBlock, - BlindedBlockContents, SignedBlockContents, SignedBeaconBlock, - SignedBlindedBeaconBlockOrContents, ExecutionPayload, ExecutionPayloadAndBlobsBundle, } from "../allForks/types.js"; -import {ts as deneb} from "../deneb/index.js"; export function isBlindedExecution(payload: FullOrBlindedExecutionPayload): payload is ExecutionPayloadHeader { // we just check transactionsRoot for determinging as it the base field @@ -42,32 +36,12 @@ export function isBlindedSignedBeaconBlock( return (signedBlock as SignedBlindedBeaconBlock).message.body.executionPayloadHeader !== undefined; } -export function isBlindedBlobSidecar(blob: FullOrBlindedBlobSidecar): blob is deneb.BlindedBlobSidecar { - return (blob as deneb.BlindedBlobSidecar).blobRoot !== undefined; -} - -export function isBlindedSignedBlobSidecar( - blob: FullOrBlindedSignedBlobSidecar -): blob is deneb.SignedBlindedBlobSidecar { - return (blob as deneb.SignedBlindedBlobSidecar).message.blobRoot !== undefined; -} - export function isBlockContents(data: FullOrBlindedBeaconBlockOrContents): data is BlockContents { - return (data as BlockContents).blobSidecars !== undefined; + return (data as BlockContents).kzgProofs !== undefined; } export function isSignedBlockContents(data: SignedBeaconBlock | SignedBlockContents): data is SignedBlockContents { - return (data as SignedBlockContents).signedBlobSidecars !== undefined; -} - -export function isBlindedBlockContents(data: FullOrBlindedBeaconBlockOrContents): data is BlindedBlockContents { - return (data as BlindedBlockContents).blindedBlobSidecars !== undefined; -} - -export function isSignedBlindedBlockContents( - data: SignedBlindedBeaconBlockOrContents -): data is SignedBlindedBlockContents { - return (data as SignedBlindedBlockContents).signedBlindedBlobSidecars !== undefined; + return (data as SignedBlockContents).kzgProofs !== undefined; } export function isExecutionPayloadAndBlobsBundle( diff --git a/packages/types/test/constants/blobs.test.ts b/packages/types/test/constants/blobs.test.ts new file mode 100644 index 000000000000..93193463e76b --- /dev/null +++ b/packages/types/test/constants/blobs.test.ts @@ -0,0 +1,25 @@ +import {describe, it, expect} from "vitest"; +import * as constants from "@lodestar/params"; +import {ssz} from "../../src/index.js"; + +// NOTE: This test is here and not in lodestar-params, to prevent lodestar-params depending on SSZ +// Since lodestar-params and lodestar-types are in the same mono-repo, running this test here is enough +// guarantee that these constants are correct. + +describe(`${constants.ACTIVE_PRESET}/ blobs pre-computed constants`, () => { + const BLOBSIDECAR_FIXED_SIZE = ssz.deneb.BlobSidecars.elementType.fixedSize; + const KZG_COMMITMENT_GINDEX0 = Number(ssz.deneb.BeaconBlockBody.getPathInfo(["blobKzgCommitments", 0]).gindex); + const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** constants.KZG_COMMITMENT_INCLUSION_PROOF_DEPTH; + + const correctConstants = { + BLOBSIDECAR_FIXED_SIZE, + KZG_COMMITMENT_GINDEX0, + KZG_COMMITMENT_SUBTREE_INDEX0, + }; + + for (const [key, expectedValue] of Object.entries(correctConstants)) { + it(key, () => { + expect((constants as unknown as Record)[key]).to.equal(expectedValue); + }); + } +}); diff --git a/packages/types/test/unit/constants.test.ts b/packages/types/test/constants/lightclient.test.ts similarity index 90% rename from packages/types/test/unit/constants.test.ts rename to packages/types/test/constants/lightclient.test.ts index 09cbec8bf1b5..567cc7c3bd17 100644 --- a/packages/types/test/unit/constants.test.ts +++ b/packages/types/test/constants/lightclient.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import * as constants from "@lodestar/params"; import {ssz} from "../../src/index.js"; @@ -6,7 +6,7 @@ import {ssz} from "../../src/index.js"; // Since lodestar-params and lodestar-types are in the same mono-repo, running this test here is enough // guarantee that these constants are correct. -describe("Lightclient pre-computed constants", () => { +describe(`${constants.ACTIVE_PRESET}/ Lightclient pre-computed constants`, () => { const FINALIZED_ROOT_GINDEX = bnToNum(ssz.altair.BeaconState.getPathInfo(["finalizedCheckpoint", "root"]).gindex); const FINALIZED_ROOT_DEPTH = floorlog2(FINALIZED_ROOT_GINDEX); const FINALIZED_ROOT_INDEX = FINALIZED_ROOT_GINDEX % 2 ** FINALIZED_ROOT_DEPTH; @@ -26,7 +26,7 @@ describe("Lightclient pre-computed constants", () => { for (const [key, expectedValue] of Object.entries(correctConstants)) { it(key, () => { - expect((constants as unknown as Record)[key]).to.equal(expectedValue); + expect((constants as unknown as Record)[key]).toBe(expectedValue); }); } }); diff --git a/packages/types/test/globalSetup.ts b/packages/types/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/types/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/types/test/unit/ssz.test.ts b/packages/types/test/unit/ssz.test.ts index 80ddcb12b893..b5c972a8f471 100644 --- a/packages/types/test/unit/ssz.test.ts +++ b/packages/types/test/unit/ssz.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {ssz} from "../../src/index.js"; describe("size", function () { @@ -6,8 +6,8 @@ describe("size", function () { const minSize = ssz.phase0.BeaconState.minSize; const maxSize = ssz.phase0.BeaconState.maxSize; // https://gist.github.com/protolambda/db75c7faa1e94f2464787a480e5d613e - expect(minSize).to.be.equal(2687377); - expect(maxSize).to.be.equal(141837543039377); + expect(minSize).toBe(2687377); + expect(maxSize).toBe(141837543039377); }); }); @@ -24,7 +24,7 @@ describe("container serialization/deserialization field casing(s)", function () const result = ssz.phase0.AttesterSlashing.fromJson(json); const back = ssz.phase0.AttesterSlashing.toJson(result); - expect(back).to.be.deep.equal(json); + expect(back).toEqual(json); }); it("ProposerSlashing", function () { @@ -39,6 +39,6 @@ describe("container serialization/deserialization field casing(s)", function () const result = ssz.phase0.ProposerSlashing.fromJson(json); const back = ssz.phase0.ProposerSlashing.toJson(result); - expect(back).to.be.deep.equal(json); + expect(back).toEqual(json); }); }); diff --git a/packages/types/tsconfig.e2e.json b/packages/types/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/types/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/types/vitest.browser.config.ts b/packages/types/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/types/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/types/vitest.config.ts b/packages/types/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/types/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/types/webpack.test.config.cjs b/packages/types/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/types/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/utils/.mocharc.yml b/packages/utils/.mocharc.yml deleted file mode 100644 index 8b4eb53ed37a..000000000000 --- a/packages/utils/.mocharc.yml +++ /dev/null @@ -1,3 +0,0 @@ -colors: true -node-option: - - "loader=ts-node/esm" diff --git a/packages/utils/karma.config.cjs b/packages/utils/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/utils/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/utils/package.json b/packages/utils/package.json index 8490ba2fa89d..9f526582483e 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.13.0", + "version": "1.14.0", "type": "module", "exports": "./lib/index.js", "files": [ @@ -27,12 +27,15 @@ "build:watch": "yarn run build --watch", "build:release": "yarn clean && yarn build", "check-build": "node -e \"(async function() { await import('./lib/index.js') })()\"", - "check-types": "tsc", + "check-types": "tsc && vitest --run --typecheck --dir test/types/", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "mocha 'test/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", @@ -47,6 +50,7 @@ "devDependencies": { "@types/js-yaml": "^4.0.5", "@types/triple-beam": "^1.3.2", + "prom-client": "^15.1.0", "triple-beam": "^1.3.0" }, "keywords": [ diff --git a/packages/utils/src/ethConversion.ts b/packages/utils/src/ethConversion.ts new file mode 100644 index 000000000000..7aa8fa0cc63c --- /dev/null +++ b/packages/utils/src/ethConversion.ts @@ -0,0 +1,12 @@ +export const ETH_TO_GWEI = BigInt(10 ** 9); +export const GWEI_TO_WEI = BigInt(10 ** 9); +export const ETH_TO_WEI = ETH_TO_GWEI * GWEI_TO_WEI; + +type EthNumeric = bigint; + +/** + * Convert gwei to wei. + */ +export function gweiToWei(gwei: EthNumeric): EthNumeric { + return gwei * GWEI_TO_WEI; +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index 9ecb78e62533..fcff789f9c56 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -8,6 +8,7 @@ export * from "./format.js"; export * from "./logger.js"; export * from "./map.js"; export * from "./math.js"; +export * from "./metrics.js"; export * from "./objects.js"; export {retry, type RetryOptions} from "./retry.js"; export * from "./notNullish.js"; @@ -19,3 +20,4 @@ export * from "./url.js"; export * from "./verifyMerkleBranch.js"; export * from "./promise.js"; export * from "./waitFor.js"; +export * from "./ethConversion.js"; diff --git a/packages/utils/src/metrics.ts b/packages/utils/src/metrics.ts new file mode 100644 index 000000000000..a25518280ee1 --- /dev/null +++ b/packages/utils/src/metrics.ts @@ -0,0 +1,71 @@ +export type NoLabels = Record; +export type LabelsGeneric = Record; +export type LabelKeys = Extract; +export type CollectFn = (metric: Gauge) => void; + +export interface Gauge { + inc: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void; + dec: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void; + set: NoLabels extends Labels ? (value: number) => void : (labels: Labels, value: number) => void; + + collect?(): void; +} + +export interface GaugeExtra extends Omit, "collect"> { + addCollect(collectFn: CollectFn): void; +} + +export interface Histogram { + startTimer(): NoLabels extends Labels ? () => number : (labels: Labels) => number; + startTimer>( + labels?: NoLabels extends Labels ? never : L + ): keyof Omit extends never ? () => number : (labels: Omit) => number; + + observe: NoLabels extends Labels ? (value: number) => void : (labels: Labels, value: number) => void; + + reset(): void; +} + +export interface AvgMinMax { + addGetValuesFn(getValuesFn: () => number[]): void; + + set: NoLabels extends Labels ? (values: number[]) => void : (labels: Labels, values: number[]) => void; +} + +export interface Counter { + inc: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void; +} + +export type GaugeConfig = { + name: string; + help: string; +} & (NoLabels extends Labels ? {labelNames?: never} : {labelNames: [LabelKeys, ...LabelKeys[]]}); + +export type HistogramConfig = GaugeConfig & { + buckets?: number[]; +}; + +export type AvgMinMaxConfig = GaugeConfig; + +export type CounterConfig = GaugeConfig; + +export type StaticConfig = { + name: GaugeConfig["name"]; + help: GaugeConfig["help"]; + value: Record, string>; +}; + +export interface MetricsRegister { + gauge(config: GaugeConfig): Gauge; + histogram(config: HistogramConfig): Histogram; + counter(config: CounterConfig): Counter; +} + +export interface MetricsRegisterExtra extends MetricsRegister { + gauge(config: GaugeConfig): GaugeExtra; +} + +export interface MetricsRegisterCustom extends MetricsRegisterExtra { + avgMinMax(config: AvgMinMaxConfig): AvgMinMax; + static(config: StaticConfig): void; +} diff --git a/packages/utils/test/globalSetup.ts b/packages/utils/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/utils/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/utils/test/setup.ts b/packages/utils/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/utils/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/utils/test/types/metrics.test-d.ts b/packages/utils/test/types/metrics.test-d.ts new file mode 100644 index 000000000000..2f008618e648 --- /dev/null +++ b/packages/utils/test/types/metrics.test-d.ts @@ -0,0 +1,114 @@ +import {describe, it, expectTypeOf} from "vitest"; +import {Counter as PromCounter, Gauge as PromGauge, Histogram as PromHistogram} from "prom-client"; +import {Counter, Gauge, Histogram, MetricsRegister} from "../../src/metrics.js"; + +describe("Metric types", () => { + type Labels = {label: string}; + type MultipleLabels = {label1: string; label2: string}; + + describe("MetricsRegister", () => { + const register = {} as MetricsRegister; + + it("should require name and help to be defined on each metric", () => { + expectTypeOf(register.gauge).parameter(0).toHaveProperty("name").toBeString(); + expectTypeOf(register.gauge).parameter(0).toHaveProperty("help").toBeString(); + }); + + it("should require to set labelNames if metric has defined labels", () => { + expectTypeOf(register.gauge) + .parameter(0) + .toHaveProperty("labelNames") + .toMatchTypeOf<"label"[]>(); + + expectTypeOf(register.gauge) + .parameter(0) + .toHaveProperty("labelNames") + .toMatchTypeOf<("label1" | "label2")[]>(); + }); + + it("should not require to set labelNames if metric has no labels", () => { + expectTypeOf(register.gauge).parameter(0).toHaveProperty("labelNames").toEqualTypeOf(); + }); + }); + + describe("Gauge", () => { + it("should be compatible with prom-client type", () => { + expectTypeOf().toMatchTypeOf(); + }); + + it("should require to set labels if metric has defined labels", () => { + const gauge = {} as Gauge; + + expectTypeOf(gauge.inc).toEqualTypeOf<(labels: Labels, value?: number | undefined) => void>(); + expectTypeOf(gauge.dec).toEqualTypeOf<(labels: Labels, value?: number | undefined) => void>(); + expectTypeOf(gauge.set).toEqualTypeOf<(labels: Labels, value: number) => void>(); + }); + + it("should not require to set labels if metric has no labels", () => { + const gauge = {} as Gauge; + + expectTypeOf(gauge.inc).toEqualTypeOf<(value?: number | undefined) => void>(); + expectTypeOf(gauge.dec).toEqualTypeOf<(value?: number | undefined) => void>(); + expectTypeOf(gauge.set).toEqualTypeOf<(value: number) => void>(); + }); + }); + + describe("Histogram", () => { + it("should be compatible with prom-client type", () => { + expectTypeOf().toMatchTypeOf(); + }); + + it("should require to set labels if metric has defined labels", () => { + const histogram = {} as Histogram; + + expectTypeOf(histogram.startTimer).toMatchTypeOf<(labels: Labels) => () => number>(); + expectTypeOf(histogram.observe).toEqualTypeOf<(labels: Labels, value: number) => void>(); + }); + + it("should require to set labels in timer if not set in startTimer", () => { + const histogram = {} as Histogram; + + const timer = histogram.startTimer(); + expectTypeOf(timer).toEqualTypeOf<(labels: MultipleLabels) => number>(); + }); + + it("should not require to set labels in timer if already set in startTimer", () => { + const histogram = {} as Histogram; + + const timer = histogram.startTimer({label1: "value1", label2: "label2"}); + expectTypeOf(timer).toEqualTypeOf<() => number>(); + }); + + it("should allow to set labels in either startTimer or timer", () => { + const histogram = {} as Histogram; + + const timer = histogram.startTimer({label1: "value1"}); + expectTypeOf(timer).toEqualTypeOf<(labels: {label2: string}) => number>(); + }); + + it("should not require to set labels if metric has no labels", () => { + const histogram = {} as Histogram; + + expectTypeOf(histogram.startTimer).toMatchTypeOf<() => () => number>(); + expectTypeOf(histogram.observe).toEqualTypeOf<(value: number) => void>(); + }); + }); + + describe("Counter", () => { + it("should be compatible with prom-client type", () => { + expectTypeOf().toMatchTypeOf(); + }); + + it("should require to set labels if metric has defined labels", () => { + const counter = {} as Counter; + + expectTypeOf(counter.inc).toEqualTypeOf<(labels: Labels, value?: number | undefined) => void>(); + }); + + it("should not require to set labels if metric has no labels", () => { + const counter = {} as Counter; + + expectTypeOf(counter.inc).toEqualTypeOf<(value?: number | undefined) => void>(); + }); + }); +}); diff --git a/packages/utils/test/unit/assert.test.ts b/packages/utils/test/unit/assert.test.ts index e20595b69cfc..0555bcbd01a0 100644 --- a/packages/utils/test/unit/assert.test.ts +++ b/packages/utils/test/unit/assert.test.ts @@ -1,23 +1,22 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {assert} from "../../src/index.js"; describe("assert", () => { describe("true", () => { it("Should not throw with true", () => { - expect(() => assert.true(true)).to.not.throw(); + expect(() => assert.true(true)).not.toThrow(); }); it("Should throw with false", () => { - expect(() => assert.true(false, "something must be valid")).to.throw("something must be valid"); + expect(() => assert.true(false, "something must be valid")).toThrow("something must be valid"); }); }); describe("equal with custom message", () => { it("Should not throw with equal values", () => { - expect(() => assert.equal(1, 1)).to.not.throw(); + expect(() => assert.equal(1, 1)).not.toThrow(); }); it("Should throw with different values", () => { - expect(() => assert.equal(1, 2, "something must be equal")).to.throw("something must be equal: 1 === 2"); + expect(() => assert.equal(1, 2, "something must be equal")).toThrow("something must be equal: 1 === 2"); }); }); @@ -51,9 +50,9 @@ describe("assert", () => { for (const {op, args, ok} of cases) { it(`assert ${args[0]} ${op} ${args[1]} = ${ok}`, () => { if (ok) { - expect(() => assert[op](...args)).to.not.throw(); + expect(() => assert[op](...args)).not.toThrow(); } else { - expect(() => assert[op](...args)).to.throw(); + expect(() => assert[op](...args)).toThrow(); } }); } diff --git a/packages/utils/test/unit/base64.test.ts b/packages/utils/test/unit/base64.test.ts index 38ccd77bafe8..7c68e84f4c3e 100644 --- a/packages/utils/test/unit/base64.test.ts +++ b/packages/utils/test/unit/base64.test.ts @@ -1,15 +1,14 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {toBase64, fromBase64} from "../../src/index.js"; describe("toBase64", () => { it("should encode UTF-8 string as base64 string", () => { - expect(toBase64("user:password")).to.be.equal("dXNlcjpwYXNzd29yZA=="); + expect(toBase64("user:password")).toBe("dXNlcjpwYXNzd29yZA=="); }); }); describe("fromBase64", () => { it("should decode UTF-8 string from base64 string", () => { - expect(fromBase64("dXNlcjpwYXNzd29yZA==")).to.be.equal("user:password"); + expect(fromBase64("dXNlcjpwYXNzd29yZA==")).toBe("user:password"); }); }); diff --git a/packages/utils/test/unit/bytes.test.ts b/packages/utils/test/unit/bytes.test.ts index f47e4c7ac3ed..8410e667187a 100644 --- a/packages/utils/test/unit/bytes.test.ts +++ b/packages/utils/test/unit/bytes.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {assert, expect} from "chai"; +import {describe, it, expect} from "vitest"; import {intToBytes, bytesToInt, toHex, fromHex, toHexString} from "../../src/index.js"; describe("intToBytes", () => { @@ -27,7 +26,7 @@ describe("intToBytes", () => { const type = typeof input; const length = input[1]; it(`should correctly serialize ${type} to bytes length ${length}`, () => { - assert(intToBytes(input[0], input[1]).equals(output)); + expect(intToBytes(input[0], input[1])).toEqual(output); }); } }); @@ -43,7 +42,7 @@ describe("bytesToInt", () => { ]; for (const {input, output} of testCases) { it(`should produce ${output}`, () => { - expect(bytesToInt(input)).to.be.equal(output); + expect(bytesToInt(input)).toBe(output); }); } }); @@ -57,7 +56,7 @@ describe("toHex", () => { ]; for (const {input, output} of testCases) { it(`should convert Uint8Array to hex string ${output}`, () => { - expect(toHex(input)).to.be.equal(output); + expect(toHex(input)).toBe(output); }); } }); @@ -77,7 +76,7 @@ describe("fromHex", () => { for (const {input, output} of testCases) { it(`should convert hex string ${input} to Uint8Array`, () => { - expect(fromHex(input)).to.deep.equal(output); + expect(fromHex(input)).toEqual(output); }); } }); @@ -94,7 +93,7 @@ describe("toHexString", () => { for (const {input, output} of testCases) { it(`should convert Uint8Array to hex string ${output}`, () => { - expect(toHexString(input)).to.be.equal(output); + expect(toHexString(input)).toBe(output); }); } }); diff --git a/packages/utils/test/unit/err.test.ts b/packages/utils/test/unit/err.test.ts index 81bfd505ffc0..a4b30ee65d73 100644 --- a/packages/utils/test/unit/err.test.ts +++ b/packages/utils/test/unit/err.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {Err, isErr, mapOkResults, mapOkResultsAsync, Result} from "../../src/err.js"; import {expectDeepEquals, expectEquals} from "../utils/chai.js"; @@ -46,7 +46,7 @@ describe("Result Err", () => { }); it("throw for different length", () => { - expect(() => mapOkResults([], () => [0])).to.throw(); + expect(() => mapOkResults([], () => [0])).toThrow(); }); it("num to string mixed results", () => { diff --git a/packages/utils/test/unit/math.test.ts b/packages/utils/test/unit/math.test.ts index 526f98ac1f1a..6827fea2bbb0 100644 --- a/packages/utils/test/unit/math.test.ts +++ b/packages/utils/test/unit/math.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {bigIntMin, bigIntMax, intDiv, intSqrt, bigIntSqrt} from "../../src/index.js"; describe("util/maths", function () { @@ -8,13 +7,13 @@ describe("util/maths", function () { const a = BigInt(1); const b = BigInt(2); const result = bigIntMin(a, b); - assert.equal(result, a, "Should have returned a!"); + expect(result).toBe(a); }); it("if b is lt should return b", () => { const a = BigInt(3); const b = BigInt(2); const result = bigIntMin(a, b); - assert.equal(result, b, "Should have returned b!"); + expect(result).toBe(b); }); }); @@ -23,78 +22,78 @@ describe("util/maths", function () { const a = BigInt(2); const b = BigInt(1); const result = bigIntMax(a, b); - assert.equal(result, a, "Should have returned a!"); + expect(result).toBe(a); }); it("if b is gt should return b", () => { const a = BigInt(2); const b = BigInt(3); const result = bigIntMax(a, b); - assert.equal(result, b, "Should have returned b!"); + expect(result).toBe(b); }); }); describe("intDiv", () => { it("should divide whole number", () => { const result = intDiv(6, 3); - assert.equal(result, 2, "Should have returned 2!"); + expect(result).toBe(2); }); it("should round less division", () => { const result = intDiv(9, 8); - assert.equal(result, 1, "Should have returned 1!"); + expect(result).toBe(1); }); }); describe("intSqrt", () => { it("0 should return 0", () => { const result = intSqrt(0); - assert.equal(result, 0, "Should have returned 0!"); + expect(result).toBe(0); }); it("1 should return 1", () => { const result = intSqrt(1); - assert.equal(result, 1, "Should have returned 1!"); + expect(result).toBe(1); }); it("3 should return 1", () => { const result = intSqrt(3); - assert.equal(result, 1, "Should have returned 1!"); + expect(result).toBe(1); }); it("4 should return 2", () => { const result = intSqrt(4); - assert.equal(result, 2, "Should have returned 2!"); + expect(result).toBe(2); }); it("16 should return 4", () => { const result = intSqrt(16); - assert.equal(result, 4, "Should have returned 4!"); + expect(result).toBe(4); }); it("31 should return 5", () => { const result = intSqrt(31); - assert.equal(result, 5, "Should have returned 5!"); + expect(result).toBe(5); }); }); describe("bigIntSqrt", () => { it("0 should return 0", () => { const result = bigIntSqrt(BigInt(0)); - assert.equal(result.toString(), BigInt(0).toString(), "Should have returned 0!"); + expect(result.toString()).toBe(BigInt(0).toString()); }); it("1 should return 1", () => { const result = bigIntSqrt(BigInt(1)); - assert.equal(result.toString(), BigInt(1).toString(), "Should have returned 1!"); + expect(result.toString()).toBe(BigInt(1).toString()); }); it("3 should return 1", () => { const result = bigIntSqrt(BigInt(3)); - assert.equal(result.toString(), BigInt(1).toString(), "Should have returned 1!"); + expect(result.toString()).toBe(BigInt(1).toString()); }); it("4 should return 2", () => { const result = bigIntSqrt(BigInt(4)); - assert.equal(result.toString(), BigInt(2).toString(), "Should have returned 2!"); + expect(result.toString()).toBe(BigInt(2).toString()); }); it("16 should return 4", () => { const result = bigIntSqrt(BigInt(16)); - assert.equal(result.toString(), BigInt(4).toString(), "Should have returned 4!"); + expect(result.toString()).toBe(BigInt(4).toString()); }); it("31 should return 5", () => { const result = bigIntSqrt(BigInt(31)); - assert.equal(result.toString(), BigInt(5).toString(), "Should have returned 5!"); + expect(result.toString()).toBe(BigInt(5).toString()); }); }); }); diff --git a/packages/utils/test/unit/objects.test.ts b/packages/utils/test/unit/objects.test.ts index ebad6c3f447c..4699a8c6f405 100644 --- a/packages/utils/test/unit/objects.test.ts +++ b/packages/utils/test/unit/objects.test.ts @@ -1,20 +1,19 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {isPlainObject, objectToExpectedCase} from "../../src/index.js"; describe("Objects helper", () => { it("should be plain object", () => { - expect(isPlainObject(Object.create({}))).to.equal(true); - expect(isPlainObject(Object.create(Object.create(Object.prototype)))).to.equal(true); - expect(isPlainObject({foo: "bar"})).to.equal(true); - expect(isPlainObject({})).to.equal(true); + expect(isPlainObject(Object.create({}))).toBe(true); + expect(isPlainObject(Object.create(Object.create(Object.prototype)))).toBe(true); + expect(isPlainObject({foo: "bar"})).toBe(true); + expect(isPlainObject({})).toBe(true); }); it("should not be plain object", () => { - expect(isPlainObject(1)).to.equal(false); - expect(isPlainObject(["foo", "bar"])).to.equal(false); - expect(isPlainObject([])).to.equal(false); - expect(isPlainObject(null)).to.equal(false); + expect(isPlainObject(1)).toBe(false); + expect(isPlainObject(["foo", "bar"])).toBe(false); + expect(isPlainObject([])).toBe(false); + expect(isPlainObject(null)).toBe(false); }); }); @@ -54,11 +53,11 @@ describe("objectToExpectedCase", () => { for (const {id, snake, camel} of testCases) { describe(id, () => { it("snake > camel", () => { - expect(objectToExpectedCase(snake, "camel")).to.deep.equal(camel); + expect(objectToExpectedCase(snake, "camel")).toEqual(camel); }); it("camel > snake", () => { - expect(objectToExpectedCase(camel, "snake")).to.deep.equal(snake); + expect(objectToExpectedCase(camel, "snake")).toEqual(snake); }); }); } diff --git a/packages/utils/test/unit/promise.node.test.ts b/packages/utils/test/unit/promise.node.test.ts new file mode 100644 index 000000000000..c9f6a3c2f98d --- /dev/null +++ b/packages/utils/test/unit/promise.node.test.ts @@ -0,0 +1,35 @@ +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; +import {callFnWhenAwait} from "../../src/promise.js"; + +// TODO: Need to debug why vi.useFakeTimers() is not working for the browsers +describe("callFnWhenAwait util", function () { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.clearAllTimers(); + }); + + it("should call function while awaing for promise", async () => { + const p = new Promise((resolve) => setTimeout(() => resolve("done"), 5 * 1000)); + const stub = vi.fn(); + const result = await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), vi.advanceTimersByTimeAsync(5000)]); + expect(result[0]).toBe("done"); + expect(stub).toHaveBeenCalledTimes(2); + await vi.advanceTimersByTimeAsync(5000); + expect(stub).toHaveBeenCalledTimes(2); + }); + + it("should throw error", async () => { + const stub = vi.fn(); + const p = new Promise((_, reject) => setTimeout(() => reject(new Error("done")), 5 * 1000)); + try { + await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), vi.advanceTimersByTimeAsync(5000)]); + expect.fail("should throw error here"); + } catch (e) { + expect((e as Error).message).toBe("done"); + expect(stub).toHaveBeenCalledTimes(2); + } + }); +}); diff --git a/packages/utils/test/unit/promise.test.ts b/packages/utils/test/unit/promise.test.ts deleted file mode 100644 index dec5dc370a2b..000000000000 --- a/packages/utils/test/unit/promise.test.ts +++ /dev/null @@ -1,37 +0,0 @@ -import "../setup.js"; -import {expect} from "chai"; -import sinon from "sinon"; -import {callFnWhenAwait} from "../../src/promise.js"; - -describe("callFnWhenAwait util", function () { - const sandbox = sinon.createSandbox(); - beforeEach(() => { - sandbox.useFakeTimers(); - }); - - afterEach(() => { - sandbox.restore(); - }); - - it("should call function while awaing for promise", async () => { - const p = new Promise((resolve) => setTimeout(() => resolve("done"), 5 * 1000)); - const stub = sandbox.stub(); - const result = await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), sandbox.clock.tickAsync(5000)]); - expect(result[0]).to.be.equal("done"); - expect(stub).to.be.calledTwice; - await sandbox.clock.tickAsync(5000); - expect(stub).to.be.calledTwice; - }); - - it("should throw error", async () => { - const stub = sandbox.stub(); - const p = new Promise((_, reject) => setTimeout(() => reject(new Error("done")), 5 * 1000)); - try { - await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), sandbox.clock.tickAsync(5000)]); - expect.fail("should throw error here"); - } catch (e) { - expect((e as Error).message).to.be.equal("done"); - expect(stub).to.be.calledTwice; - } - }); -}); diff --git a/packages/utils/test/unit/promiserace.test.ts b/packages/utils/test/unit/promiserace.test.ts index 25952f828920..5d0567553522 100644 --- a/packages/utils/test/unit/promiserace.test.ts +++ b/packages/utils/test/unit/promiserace.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {racePromisesWithCutoff, RaceEvent} from "../../src/promise.js"; describe("racePromisesWithCutoff", () => { @@ -98,7 +98,7 @@ describe("racePromisesWithCutoff", () => { testEvents.push(event) ); const testResultsCmp = testResults.map((res: string | Error) => (res instanceof Error ? res.message : res)); - expect({results: testResultsCmp, events: testEvents}).to.be.deep.equal({results, events}); + expect({results: testResultsCmp, events: testEvents}).toEqual({results, events}); }); } }); diff --git a/packages/utils/test/unit/retry.test.ts b/packages/utils/test/unit/retry.test.ts index b5211c7e106b..12afb7597015 100644 --- a/packages/utils/test/unit/retry.test.ts +++ b/packages/utils/test/unit/retry.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {retry, RetryOptions} from "../../src/retry.js"; describe("retry", () => { @@ -39,9 +38,9 @@ describe("retry", () => { for (const {id, fn, opts, result} of testCases) { it(id, async () => { if (result instanceof Error) { - await expect(retry(fn, opts)).to.be.rejectedWith(result); + await expect(retry(fn, opts)).rejects.toThrow(result); } else { - expect(await retry(fn, opts)).to.deep.equal(result); + expect(await retry(fn, opts)).toEqual(result); } }); } diff --git a/packages/utils/test/unit/sleep.test.ts b/packages/utils/test/unit/sleep.test.ts index 44f7d309412a..a887560836eb 100644 --- a/packages/utils/test/unit/sleep.test.ts +++ b/packages/utils/test/unit/sleep.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {sleep} from "../../src/sleep.js"; import {ErrorAborted} from "../../src/errors.js"; @@ -13,20 +12,19 @@ describe("sleep", function () { const controller = new AbortController(); setTimeout(() => controller.abort(), 10); - // Sleep for longer than the current test timeout. - // If the abort signal doesn't work mocha will throw a timeout error - const sleepTime = 2 * this.timeout(); + const sleepTime = 5000; - await expect(sleep(sleepTime, controller.signal)).to.rejectedWith(ErrorAborted); + await expect(sleep(sleepTime, controller.signal)).rejects.toThrow(ErrorAborted); }); it("Should abort timeout with already aborted signal", async function () { const controller = new AbortController(); controller.abort(); - expect(controller.signal.aborted, "Signal should already be aborted").to.equal(true); + // "Signal should already be aborted" + expect(controller.signal.aborted).toBe(true); - await expect(sleep(0, controller.signal)).to.rejectedWith(ErrorAborted); + await expect(sleep(0, controller.signal)).rejects.toThrow(ErrorAborted); }); it("sleep 0 must tick the event loop", async () => { @@ -51,16 +49,13 @@ describe("sleep", function () { await new Promise((r) => setTimeout(r, 0)); } - expect(steps).to.deep.equal( - [ - // Sync execution - Step.beforeSleep, - // Next tick, first registered callback - Step.setTimeout0, - // Next tick, second registered callback - Step.afterSleep, - ], - "Wrong steps" - ); + expect(steps).toEqual([ + // Sync execution + Step.beforeSleep, + // Next tick, first registered callback + Step.setTimeout0, + // Next tick, second registered callback + Step.afterSleep, + ]); }); }); diff --git a/packages/utils/test/unit/timeout.test.ts b/packages/utils/test/unit/timeout.test.ts index a2b430e86855..b8844355effb 100644 --- a/packages/utils/test/unit/timeout.test.ts +++ b/packages/utils/test/unit/timeout.test.ts @@ -1,14 +1,11 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect, afterEach} from "vitest"; import {withTimeout} from "../../src/timeout.js"; import {ErrorAborted, TimeoutError} from "../../src/errors.js"; describe("withTimeout", function () { const data = "DATA"; const shortTimeoutMs = 10; - // Sleep for longer than the current test timeout. - // If the abort signal doesn't work mocha will throw a timeout error - const longTimeoutMs = 2 * this.timeout(); + const longTimeoutMs = 5000; const pendingTimeouts: NodeJS.Timeout[] = []; @@ -32,33 +29,33 @@ describe("withTimeout", function () { it("Should resolve timeout", async function () { const res = await withTimeout(() => pause(shortTimeoutMs, data), longTimeoutMs); - expect(res).to.equal(data); + expect(res).toBe(data); }); it("Should resolve timeout with not triggered signal", async function () { const controller = new AbortController(); const res = await withTimeout(() => pause(shortTimeoutMs, data), longTimeoutMs, controller.signal); - expect(res).to.equal(data); + expect(res).toBe(data); }); it("Should abort timeout with triggered signal", async function () { const controller = new AbortController(); setTimeout(() => controller.abort(), shortTimeoutMs); - await expect(withTimeout(() => pause(longTimeoutMs, data), longTimeoutMs, controller.signal)).to.rejectedWith( + await expect(withTimeout(() => pause(longTimeoutMs, data), longTimeoutMs, controller.signal)).rejects.toThrow( ErrorAborted ); }); it("Should timeout with no signal", async function () { - await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs)).to.rejectedWith(TimeoutError); + await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs)).rejects.toThrow(TimeoutError); }); it("Should timeout with not triggered signal", async function () { const controller = new AbortController(); - await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs, controller.signal)).to.rejectedWith( + await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs, controller.signal)).rejects.toThrow( TimeoutError ); }); @@ -67,9 +64,10 @@ describe("withTimeout", function () { const controller = new AbortController(); controller.abort(); - expect(controller.signal.aborted, "Signal should already be aborted").to.equal(true); + // "Signal should already be aborted" + expect(controller.signal.aborted).toBe(true); - await expect(withTimeout(() => pause(shortTimeoutMs, data), shortTimeoutMs, controller.signal)).to.rejectedWith( + await expect(withTimeout(() => pause(shortTimeoutMs, data), shortTimeoutMs, controller.signal)).rejects.toThrow( ErrorAborted ); }); diff --git a/packages/utils/test/unit/waitFor.test.ts b/packages/utils/test/unit/waitFor.test.ts index d659be3d4bcb..293e5aba936a 100644 --- a/packages/utils/test/unit/waitFor.test.ts +++ b/packages/utils/test/unit/waitFor.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {waitFor, createElapsedTimeTracker} from "../../src/waitFor.js"; import {ErrorAborted, TimeoutError} from "../../src/errors.js"; import {sleep} from "../../src/sleep.js"; @@ -9,7 +8,7 @@ describe("waitFor", () => { const timeout = 20; it("Should resolve if condition is already true", async () => { - await expect(waitFor(() => true, {interval, timeout})).to.be.fulfilled; + await expect(waitFor(() => true, {interval, timeout})).resolves.toBeUndefined(); }); it("Should resolve if condition becomes true within timeout", async () => { @@ -21,19 +20,19 @@ describe("waitFor", () => { }); it("Should reject with TimeoutError if condition does not become true within timeout", async () => { - await expect(waitFor(() => false, {interval, timeout})).to.be.rejectedWith(TimeoutError); + await expect(waitFor(() => false, {interval, timeout})).rejects.toThrow(TimeoutError); }); it("Should reject with ErrorAborted if aborted before condition becomes true", async () => { const controller = new AbortController(); setTimeout(() => controller.abort(), interval); - await expect(waitFor(() => false, {interval, timeout, signal: controller.signal})).to.be.rejectedWith(ErrorAborted); + await expect(waitFor(() => false, {interval, timeout, signal: controller.signal})).rejects.toThrow(ErrorAborted); }); it("Should reject with ErrorAborted if signal is already aborted", async () => { const controller = new AbortController(); controller.abort(); - await expect(waitFor(() => true, {interval, timeout, signal: controller.signal})).to.be.rejectedWith(ErrorAborted); + await expect(waitFor(() => true, {interval, timeout, signal: controller.signal})).rejects.toThrow(ErrorAborted); }); }); @@ -41,7 +40,7 @@ describe("waitForElapsedTime", () => { it("should true for the first time", () => { const callIfTimePassed = createElapsedTimeTracker({minElapsedTime: 1000}); - expect(callIfTimePassed()).to.be.true; + expect(callIfTimePassed()).toBe(true); }); it("should return true after the minElapsedTime has passed", async () => { @@ -50,7 +49,7 @@ describe("waitForElapsedTime", () => { await sleep(150); - expect(callIfTimePassed()).to.be.true; + expect(callIfTimePassed()).toBe(true); }); it("should return false before the minElapsedTime has passed", async () => { @@ -59,6 +58,6 @@ describe("waitForElapsedTime", () => { await sleep(10); - expect(callIfTimePassed()).to.be.false; + expect(callIfTimePassed()).toBe(false); }); }); diff --git a/packages/utils/tsconfig.e2e.json b/packages/utils/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/utils/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/utils/vitest.browser.config.ts b/packages/utils/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/utils/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/utils/vitest.config.ts b/packages/utils/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/utils/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/utils/webpack.test.config.cjs b/packages/utils/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/utils/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/validator/package.json b/packages/validator/package.json index ecd15116169a..54d332455114 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/validator", - "version": "1.13.0", + "version": "1.14.0", "description": "A Typescript implementation of the validator client", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -50,18 +50,18 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/api": "^1.13.0", - "@lodestar/config": "^1.13.0", - "@lodestar/db": "^1.13.0", - "@lodestar/params": "^1.13.0", - "@lodestar/state-transition": "^1.13.0", - "@lodestar/types": "^1.13.0", - "@lodestar/utils": "^1.13.0", + "@lodestar/api": "^1.14.0", + "@lodestar/config": "^1.14.0", + "@lodestar/db": "^1.14.0", + "@lodestar/params": "^1.14.0", + "@lodestar/state-transition": "^1.14.0", + "@lodestar/types": "^1.14.0", + "@lodestar/utils": "^1.14.0", "bigint-buffer": "^1.1.5", "strict-event-emitter-types": "^2.0.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.13.0", + "@lodestar/test-utils": "^1.14.0", "bigint-buffer": "^1.1.5", "rimraf": "^4.4.1" } diff --git a/packages/validator/src/index.ts b/packages/validator/src/index.ts index 39a331af6657..4619b924ef63 100644 --- a/packages/validator/src/index.ts +++ b/packages/validator/src/index.ts @@ -1,5 +1,5 @@ export {Validator, type ValidatorOptions} from "./validator.js"; -export {ValidatorStore, SignerType, defaultOptions} from "./services/validatorStore.js"; +export {ValidatorStore, SignerType, defaultOptions, MAX_BUILDER_BOOST_FACTOR} from "./services/validatorStore.js"; export type { Signer, SignerLocal, @@ -8,7 +8,7 @@ export type { ProposerConfig, } from "./services/validatorStore.js"; export {waitForGenesis} from "./genesis.js"; -export {getMetrics, type Metrics, type MetricsRegister} from "./metrics.js"; +export {getMetrics, type Metrics} from "./metrics.js"; // Remote signer client export { diff --git a/packages/validator/src/metrics.ts b/packages/validator/src/metrics.ts index 5bc3895414a2..4acf66955769 100644 --- a/packages/validator/src/metrics.ts +++ b/packages/validator/src/metrics.ts @@ -1,3 +1,5 @@ +import {MetricsRegisterExtra} from "@lodestar/utils"; + export enum MessageSource { forward = "forward", publish = "publish", @@ -11,64 +13,6 @@ export enum BeaconHealth { ERROR = 4, } -type LabelsGeneric = Record; -type CollectFn = (metric: Gauge) => void; - -interface Gauge { - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler - inc(value?: number): void; - inc(labels: Labels, value?: number): void; - inc(arg1?: Labels | number, arg2?: number): void; - - dec(value?: number): void; - dec(labels: Labels, value?: number): void; - dec(arg1?: Labels | number, arg2?: number): void; - - set(value: number): void; - set(labels: Labels, value: number): void; - set(arg1?: Labels | number, arg2?: number): void; - - addCollect(collectFn: CollectFn): void; -} - -interface Histogram { - startTimer(): () => number; - - observe(value: number): void; - observe(labels: Labels, values: number): void; - observe(arg1: Labels | number, arg2?: number): void; - - reset(): void; -} - -interface AvgMinMax { - set(values: number[]): void; - set(labels: Labels, values: number[]): void; - set(arg1?: Labels | number[], arg2?: number[]): void; -} - -type GaugeConfig = { - name: string; - help: string; - labelNames?: keyof Labels extends string ? (keyof Labels)[] : undefined; -}; - -type HistogramConfig = { - name: string; - help: string; - labelNames?: (keyof Labels)[]; - buckets?: number[]; -}; - -type AvgMinMaxConfig = GaugeConfig; - -export interface MetricsRegister { - gauge(config: GaugeConfig): Gauge; - histogram(config: HistogramConfig): Histogram; - avgMinMax(config: AvgMinMaxConfig): AvgMinMax; -} - export type Metrics = ReturnType; export type LodestarGitData = { @@ -81,10 +25,10 @@ export type LodestarGitData = { }; /** - * A collection of metrics used throughout the Gossipsub behaviour. + * A collection of metrics used by the validator client */ // eslint-disable-next-line @typescript-eslint/explicit-function-return-type -export function getMetrics(register: MetricsRegister, gitData: LodestarGitData) { +export function getMetrics(register: MetricsRegisterExtra, gitData: LodestarGitData) { // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types. // Track version, same as https://github.com/ChainSafe/lodestar/blob/6df28de64f12ea90b341b219229a47c8a25c9343/packages/lodestar/src/metrics/metrics/lodestar.ts#L17 @@ -92,7 +36,7 @@ export function getMetrics(register: MetricsRegister, gitData: LodestarGitData) .gauge({ name: "lodestar_version", help: "Lodestar version", - labelNames: Object.keys(gitData) as (keyof LodestarGitData)[], + labelNames: Object.keys(gitData) as [keyof LodestarGitData], }) .set(gitData, 1); @@ -367,7 +311,7 @@ export function getMetrics(register: MetricsRegister, gitData: LodestarGitData) labelNames: ["routeId"], }), - urlsScore: register.gauge<{urlIndex: string}>({ + urlsScore: register.gauge<{urlIndex: number}>({ name: "vc_rest_api_client_urls_score", help: "Current score of REST API URLs by url index", labelNames: ["urlIndex"], diff --git a/packages/validator/src/services/block.ts b/packages/validator/src/services/block.ts index d65ce2616fb5..b17b7519b2cf 100644 --- a/packages/validator/src/services/block.ts +++ b/packages/validator/src/services/block.ts @@ -4,15 +4,14 @@ import { Slot, BLSSignature, allForks, - isBlindedBeaconBlock, + isBlindedSignedBeaconBlock, ProducedBlockSource, deneb, isBlockContents, - isBlindedBlockContents, } from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; -import {ForkPreBlobs, ForkBlobs, ForkSeq} from "@lodestar/params"; -import {extendError, prettyBytes} from "@lodestar/utils"; +import {ForkPreBlobs, ForkBlobs, ForkSeq, ForkExecution} from "@lodestar/params"; +import {ETH_TO_GWEI, ETH_TO_WEI, extendError, gweiToWei, prettyBytes} from "@lodestar/utils"; import {Api, ApiError, routes} from "@lodestar/api"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; @@ -21,42 +20,45 @@ import {formatBigDecimal} from "../util/format.js"; import {ValidatorStore} from "./validatorStore.js"; import {BlockDutiesService, GENESIS_SLOT} from "./blockDuties.js"; -const ETH_TO_WEI = BigInt("1000000000000000000"); // display upto 5 decimal places const MAX_DECIMAL_FACTOR = BigInt("100000"); // The following combination of blocks and blobs can be produced // i) a full block pre deneb // ii) a full block and full blobs post deneb -// iii) a blinded block pre deneb as a result of beacon/execution race -// iv) a blinded block + blinded blobs as a result of beacon/execution race +// iii) a blinded block post bellatrix type FullOrBlindedBlockWithContents = | { version: ForkPreBlobs; block: allForks.BeaconBlock; - blobs: null; + contents: null; executionPayloadBlinded: false; + executionPayloadSource: ProducedBlockSource.engine; } | { version: ForkBlobs; block: allForks.BeaconBlock; - blobs: deneb.BlobSidecars; + contents: { + kzgProofs: deneb.KZGProofs; + blobs: deneb.Blobs; + }; executionPayloadBlinded: false; + executionPayloadSource: ProducedBlockSource.engine; } | { - version: ForkPreBlobs; + version: ForkExecution; block: allForks.BlindedBeaconBlock; - blobs: null; - executionPayloadBlinded: true; - } - | { - version: ForkBlobs; - block: allForks.BlindedBeaconBlock; - blobs: deneb.BlindedBlobSidecars; + contents: null; executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; }; type DebugLogCtx = {debugLogCtx: Record}; +type BlockProposalOpts = { + useProduceBlockV3?: boolean; + broadcastValidation: routes.beacon.BroadcastValidation; + blindedLocal: boolean; +}; /** * Service that sets up and handles validator block proposal duties. */ @@ -70,7 +72,7 @@ export class BlockProposingService { private readonly clock: IClock, private readonly validatorStore: ValidatorStore, private readonly metrics: Metrics | null, - private readonly opts: {useProduceBlockV3: boolean; broadcastValidation: routes.beacon.BroadcastValidation} + private readonly opts: BlockProposalOpts ) { this.dutiesService = new BlockDutiesService( config, @@ -119,24 +121,38 @@ export class BlockProposingService { const debugLogCtx = {...logCtx, validator: pubkeyHex}; const strictFeeRecipientCheck = this.validatorStore.strictFeeRecipientCheck(pubkeyHex); - const builderSelection = this.validatorStore.getBuilderSelection(pubkeyHex); + const {selection: builderSelection, boostFactor: builderBoostFactor} = + this.validatorStore.getBuilderSelectionParams(pubkeyHex); const feeRecipient = this.validatorStore.getFeeRecipient(pubkeyHex); + const blindedLocal = this.opts.blindedLocal; + const useProduceBlockV3 = this.opts.useProduceBlockV3 ?? this.config.getForkSeq(slot) >= ForkSeq.deneb; this.logger.debug("Producing block", { ...debugLogCtx, builderSelection, + builderBoostFactor, feeRecipient, strictFeeRecipientCheck, - useProduceBlockV3: this.opts.useProduceBlockV3, + useProduceBlockV3, + blindedLocal, }); this.metrics?.proposerStepCallProduceBlock.observe(this.clock.secFromSlot(slot)); - const produceBlockFn = this.opts.useProduceBlockV3 ? this.produceBlockWrapper : this.produceBlockV2Wrapper; - const blockContents = await produceBlockFn(this.config, slot, randaoReveal, graffiti, { + const produceBlockFn = useProduceBlockV3 ? this.produceBlockWrapper : this.produceBlockV2Wrapper; + const produceOpts = { feeRecipient, strictFeeRecipientCheck, - builderSelection, - }).catch((e: Error) => { + builderBoostFactor, + blindedLocal, + }; + const blockContents = await produceBlockFn( + this.config, + slot, + randaoReveal, + graffiti, + produceOpts, + builderSelection + ).catch((e: Error) => { this.metrics?.blockProposingErrors.inc({error: "produce"}); throw extendError(e, "Failed to produce block"); }); @@ -144,26 +160,15 @@ export class BlockProposingService { this.logger.debug("Produced block", {...debugLogCtx, ...blockContents.debugLogCtx}); this.metrics?.blocksProduced.inc(); - const signedBlockPromise = this.validatorStore.signBlock(pubkey, blockContents.block, slot, this.logger); - const signedBlobPromises = - blockContents.blobs !== null - ? blockContents.blobs.map((blob) => this.validatorStore.signBlob(pubkey, blob, slot)) - : undefined; - let signedBlock: allForks.FullOrBlindedSignedBeaconBlock, - signedBlobs: allForks.FullOrBlindedSignedBlobSidecar[] | undefined; - if (signedBlobPromises !== undefined) { - [signedBlock, ...signedBlobs] = await Promise.all([signedBlockPromise, ...signedBlobPromises]); - } else { - signedBlock = await signedBlockPromise; - signedBlobs = undefined; - } + const signedBlock = await this.validatorStore.signBlock(pubkey, blockContents.block, slot); - await this.publishBlockWrapper(signedBlock, signedBlobs, { - broadcastValidation: this.opts.broadcastValidation, - }).catch((e: Error) => { + const {broadcastValidation} = this.opts; + const publishOpts = {broadcastValidation}; + await this.publishBlockWrapper(signedBlock, blockContents.contents, publishOpts).catch((e: Error) => { this.metrics?.blockProposingErrors.inc({error: "publish"}); throw extendError(e, "Failed to publish block"); }); + this.metrics?.proposerStepCallPublishBlock.observe(this.clock.secFromSlot(slot)); this.metrics?.blocksPublished.inc(); this.logger.info("Published block", {...logCtx, graffiti, ...blockContents.debugLogCtx}); @@ -174,30 +179,22 @@ export class BlockProposingService { private publishBlockWrapper = async ( signedBlock: allForks.FullOrBlindedSignedBeaconBlock, - signedBlobSidecars?: allForks.FullOrBlindedSignedBlobSidecar[], + contents: {kzgProofs: deneb.KZGProofs; blobs: deneb.Blobs} | null, opts: {broadcastValidation?: routes.beacon.BroadcastValidation} = {} ): Promise => { - if (signedBlobSidecars === undefined) { - ApiError.assert( - isBlindedBeaconBlock(signedBlock.message) - ? await this.api.beacon.publishBlindedBlockV2(signedBlock as allForks.SignedBlindedBeaconBlock, opts) - : await this.api.beacon.publishBlockV2(signedBlock as allForks.SignedBeaconBlock, opts) - ); + if (isBlindedSignedBeaconBlock(signedBlock)) { + if (contents !== null) { + this.logger.warn( + "Ignoring contents while publishing blinded block - publishing beacon should assemble it from its local cache or builder" + ); + } + ApiError.assert(await this.api.beacon.publishBlindedBlockV2(signedBlock, opts)); } else { - ApiError.assert( - isBlindedBeaconBlock(signedBlock.message) - ? await this.api.beacon.publishBlindedBlockV2( - { - signedBlindedBlock: signedBlock, - signedBlindedBlobSidecars: signedBlobSidecars, - } as allForks.SignedBlindedBlockContents, - opts - ) - : await this.api.beacon.publishBlockV2( - {signedBlock, signedBlobSidecars} as allForks.SignedBlockContents, - opts - ) - ); + if (contents === null) { + ApiError.assert(await this.api.beacon.publishBlockV2(signedBlock, opts)); + } else { + ApiError.assert(await this.api.beacon.publishBlockV2({...contents, signedBlock}, opts)); + } } }; @@ -206,23 +203,27 @@ export class BlockProposingService { slot: Slot, randaoReveal: BLSSignature, graffiti: string, - {feeRecipient, strictFeeRecipientCheck, builderSelection}: routes.validator.ExtraProduceBlockOps + {feeRecipient, strictFeeRecipientCheck, builderBoostFactor, blindedLocal}: routes.validator.ExtraProduceBlockOps, + builderSelection: routes.validator.BuilderSelection ): Promise => { const res = await this.api.validator.produceBlockV3(slot, randaoReveal, graffiti, false, { feeRecipient, builderSelection, strictFeeRecipientCheck, + blindedLocal, + builderBoostFactor, }); ApiError.assert(res, "Failed to produce block: validator.produceBlockV2"); const {response} = res; const debugLogCtx = { - source: response.executionPayloadBlinded ? ProducedBlockSource.builder : ProducedBlockSource.engine, + executionPayloadSource: response.executionPayloadSource, + executionPayloadBlinded: response.executionPayloadBlinded, // winston logger doesn't like bigint executionPayloadValue: `${formatBigDecimal(response.executionPayloadValue, ETH_TO_WEI, MAX_DECIMAL_FACTOR)} ETH`, - consensusBlockValue: `${formatBigDecimal(response.consensusBlockValue, ETH_TO_WEI, MAX_DECIMAL_FACTOR)} ETH`, + consensusBlockValue: `${formatBigDecimal(response.consensusBlockValue, ETH_TO_GWEI, MAX_DECIMAL_FACTOR)} ETH`, totalBlockValue: `${formatBigDecimal( - response.executionPayloadValue + response.consensusBlockValue, + response.executionPayloadValue + gweiToWei(response.consensusBlockValue), ETH_TO_WEI, MAX_DECIMAL_FACTOR )} ETH`, @@ -232,7 +233,7 @@ export class BlockProposingService { api: "produceBlockV3", }; - return parseProduceBlockResponse(response, debugLogCtx); + return parseProduceBlockResponse(response, debugLogCtx, builderSelection); }; /** a wrapper function used for backward compatibility with the clients who don't have v3 implemented yet */ @@ -241,7 +242,8 @@ export class BlockProposingService { slot: Slot, randaoReveal: BLSSignature, graffiti: string, - {builderSelection}: routes.validator.ExtraProduceBlockOps + _opts: routes.validator.ExtraProduceBlockOps, + builderSelection: routes.validator.BuilderSelection ): Promise => { // other clients have always implemented builder vs execution race in produce blinded block // so if builderSelection is executiononly then only we call produceBlockV2 else produceBlockV3 always @@ -253,55 +255,73 @@ export class BlockProposingService { const res = await this.api.validator.produceBlockV2(slot, randaoReveal, graffiti); ApiError.assert(res, "Failed to produce block: validator.produceBlockV2"); const {response} = res; - return parseProduceBlockResponse({executionPayloadBlinded: false, ...response}, debugLogCtx); + const executionPayloadSource = ProducedBlockSource.engine; + + return parseProduceBlockResponse( + {executionPayloadBlinded: false, executionPayloadSource, ...response}, + debugLogCtx, + builderSelection + ); } else { Object.assign(debugLogCtx, {api: "produceBlindedBlock"}); const res = await this.api.validator.produceBlindedBlock(slot, randaoReveal, graffiti); ApiError.assert(res, "Failed to produce block: validator.produceBlockV2"); const {response} = res; + const executionPayloadSource = ProducedBlockSource.builder; - return parseProduceBlockResponse({executionPayloadBlinded: true, ...response}, debugLogCtx); + return parseProduceBlockResponse( + {executionPayloadBlinded: true, executionPayloadSource, ...response}, + debugLogCtx, + builderSelection + ); } }; } function parseProduceBlockResponse( response: routes.validator.ProduceFullOrBlindedBlockOrContentsRes, - debugLogCtx: Record + debugLogCtx: Record, + builderSelection: routes.validator.BuilderSelection ): FullOrBlindedBlockWithContents & DebugLogCtx { + const executionPayloadSource = response.executionPayloadSource; + + if ( + (builderSelection === routes.validator.BuilderSelection.BuilderOnly && + executionPayloadSource === ProducedBlockSource.engine) || + (builderSelection === routes.validator.BuilderSelection.ExecutionOnly && + executionPayloadSource === ProducedBlockSource.builder) + ) { + throw Error( + `Block not produced as per desired builderSelection=${builderSelection} executionPayloadSource=${executionPayloadSource}` + ); + } + if (response.executionPayloadBlinded) { - if (isBlindedBlockContents(response.data)) { - return { - block: response.data.blindedBlock, - blobs: response.data.blindedBlobSidecars, - version: response.version, - executionPayloadBlinded: true, - debugLogCtx, - } as FullOrBlindedBlockWithContents & DebugLogCtx; - } else { - return { - block: response.data, - blobs: null, - version: response.version, - executionPayloadBlinded: true, - debugLogCtx, - } as FullOrBlindedBlockWithContents & DebugLogCtx; - } + return { + block: response.data, + contents: null, + version: response.version, + executionPayloadBlinded: true, + executionPayloadSource, + debugLogCtx, + } as FullOrBlindedBlockWithContents & DebugLogCtx; } else { if (isBlockContents(response.data)) { return { block: response.data.block, - blobs: response.data.blobSidecars, + contents: {blobs: response.data.blobs, kzgProofs: response.data.kzgProofs}, version: response.version, executionPayloadBlinded: false, + executionPayloadSource, debugLogCtx, } as FullOrBlindedBlockWithContents & DebugLogCtx; } else { return { block: response.data, - blobs: null, + contents: null, version: response.version, executionPayloadBlinded: false, + executionPayloadSource, debugLogCtx, } as FullOrBlindedBlockWithContents & DebugLogCtx; } diff --git a/packages/validator/src/services/prepareBeaconProposer.ts b/packages/validator/src/services/prepareBeaconProposer.ts index 7ca939fb0c41..7d7907a4592d 100644 --- a/packages/validator/src/services/prepareBeaconProposer.ts +++ b/packages/validator/src/services/prepareBeaconProposer.ts @@ -86,7 +86,8 @@ export function pollBuilderValidatorRegistration( .filter( (pubkeyHex): pubkeyHex is string => pubkeyHex !== undefined && - validatorStore.getBuilderSelection(pubkeyHex) !== routes.validator.BuilderSelection.ExecutionOnly + validatorStore.getBuilderSelectionParams(pubkeyHex).selection !== + routes.validator.BuilderSelection.ExecutionOnly ); if (pubkeyHexes.length > 0) { diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index e2736a09754a..03811062c2ad 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -7,7 +7,6 @@ import { computeDomain, ZERO_HASH, blindedOrFullBlockHashTreeRoot, - blindedOrFullBlobSidecarHashTreeRoot, } from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; import { @@ -20,7 +19,6 @@ import { DOMAIN_SYNC_COMMITTEE, DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF, DOMAIN_APPLICATION_BUILDER, - DOMAIN_BLOB_SIDECAR, } from "@lodestar/params"; import { allForks, @@ -71,6 +69,7 @@ type DefaultProposerConfig = { builder: { gasLimit: number; selection: routes.validator.BuilderSelection; + boostFactor: bigint; }; }; @@ -81,6 +80,7 @@ export type ProposerConfig = { builder?: { gasLimit?: number; selection?: routes.validator.BuilderSelection; + boostFactor?: bigint; }; }; @@ -125,12 +125,15 @@ export const defaultOptions = { defaultGasLimit: 30_000_000, builderSelection: routes.validator.BuilderSelection.ExecutionOnly, builderAliasSelection: routes.validator.BuilderSelection.MaxProfit, - // turn it off by default, turn it back on once other clients support v3 api - useProduceBlockV3: false, + builderBoostFactor: BigInt(100), // spec asks for gossip validation by default broadcastValidation: routes.beacon.BroadcastValidation.gossip, + // should request fetching the locally produced block in blinded format + blindedLocal: false, }; +export const MAX_BUILDER_BOOST_FACTOR = 2n ** 64n - 1n; + /** * Service that sets up and handles validator attester duties. */ @@ -155,6 +158,11 @@ export class ValidatorStore { this.metrics = metrics; const defaultConfig = valProposerConfig.defaultConfig; + const builderBoostFactor = defaultConfig.builder?.boostFactor ?? defaultOptions.builderBoostFactor; + if (builderBoostFactor > MAX_BUILDER_BOOST_FACTOR) { + throw Error(`Invalid builderBoostFactor=${builderBoostFactor} > MAX_BUILDER_BOOST_FACTOR for defaultConfig`); + } + this.defaultProposerConfig = { graffiti: defaultConfig.graffiti ?? "", strictFeeRecipientCheck: defaultConfig.strictFeeRecipientCheck ?? false, @@ -162,6 +170,7 @@ export class ValidatorStore { builder: { gasLimit: defaultConfig.builder?.gasLimit ?? defaultOptions.defaultGasLimit, selection: defaultConfig.builder?.selection ?? defaultOptions.builderSelection, + boostFactor: builderBoostFactor, }, }; @@ -252,8 +261,27 @@ export class ValidatorStore { delete validatorData["graffiti"]; } - getBuilderSelection(pubkeyHex: PubkeyHex): routes.validator.BuilderSelection { - return (this.validators.get(pubkeyHex)?.builder || {}).selection ?? this.defaultProposerConfig.builder.selection; + getBuilderSelectionParams(pubkeyHex: PubkeyHex): {selection: routes.validator.BuilderSelection; boostFactor: bigint} { + const selection = + (this.validators.get(pubkeyHex)?.builder || {}).selection ?? this.defaultProposerConfig.builder.selection; + + let boostFactor; + switch (selection) { + case routes.validator.BuilderSelection.MaxProfit: + boostFactor = + (this.validators.get(pubkeyHex)?.builder || {}).boostFactor ?? this.defaultProposerConfig.builder.boostFactor; + break; + + case routes.validator.BuilderSelection.BuilderAlways: + case routes.validator.BuilderSelection.BuilderOnly: + boostFactor = MAX_BUILDER_BOOST_FACTOR; + break; + + case routes.validator.BuilderSelection.ExecutionOnly: + boostFactor = BigInt(0); + } + + return {selection, boostFactor}; } strictFeeRecipientCheck(pubkeyHex: PubkeyHex): boolean { @@ -286,6 +314,34 @@ export class ValidatorStore { delete validatorData.builder?.gasLimit; } + getBuilderBoostFactor(pubkeyHex: PubkeyHex): bigint { + const validatorData = this.validators.get(pubkeyHex); + if (validatorData === undefined) { + throw Error(`Validator pubkey ${pubkeyHex} not known`); + } + return validatorData?.builder?.boostFactor ?? this.defaultProposerConfig.builder.boostFactor; + } + + setBuilderBoostFactor(pubkeyHex: PubkeyHex, boostFactor: bigint): void { + if (boostFactor > MAX_BUILDER_BOOST_FACTOR) { + throw Error(`Invalid builderBoostFactor=${boostFactor} > MAX_BUILDER_BOOST_FACTOR`); + } + + const validatorData = this.validators.get(pubkeyHex); + if (validatorData === undefined) { + throw Error(`Validator pubkey ${pubkeyHex} not known`); + } + validatorData.builder = {...validatorData.builder, boostFactor}; + } + + deleteBuilderBoostFactor(pubkeyHex: PubkeyHex): void { + const validatorData = this.validators.get(pubkeyHex); + if (validatorData === undefined) { + throw Error(`Validator pubkey ${pubkeyHex} not known`); + } + delete validatorData.builder?.boostFactor; + } + /** Return true if `index` is active part of this validator client */ hasValidatorIndex(index: ValidatorIndex): boolean { return this.indicesService.index2pubkey.has(index); @@ -315,6 +371,10 @@ export class ValidatorStore { async addSigner(signer: Signer, valProposerConfig?: ValidatorProposerConfig): Promise { const pubkey = getSignerPubkeyHex(signer); const proposerConfig = (valProposerConfig?.proposerConfig ?? {})[pubkey]; + const builderBoostFactor = proposerConfig?.builder?.boostFactor; + if (builderBoostFactor !== undefined && builderBoostFactor > MAX_BUILDER_BOOST_FACTOR) { + throw Error(`Invalid builderBoostFactor=${builderBoostFactor} > MAX_BUILDER_BOOST_FACTOR for pubkey=${pubkey}`); + } if (!this.validators.has(pubkey)) { // Doppelganger registration must be done before adding validator to signers @@ -395,37 +455,6 @@ export class ValidatorStore { } as allForks.FullOrBlindedSignedBeaconBlock; } - async signBlob( - pubkey: BLSPubkey, - blindedOrFull: allForks.FullOrBlindedBlobSidecar, - currentSlot: Slot - ): Promise { - // Make sure the block slot is not higher than the current slot to avoid potential attacks. - if (blindedOrFull.slot > currentSlot) { - throw Error(`Not signing block with slot ${blindedOrFull.slot} greater than current slot ${currentSlot}`); - } - - // Duties are filtered before-hard by doppelganger-safe, this assert should never throw - this.assertDoppelgangerSafe(pubkey); - - const signingSlot = blindedOrFull.slot; - const domain = this.config.getDomain(signingSlot, DOMAIN_BLOB_SIDECAR); - const blobRoot = blindedOrFullBlobSidecarHashTreeRoot(this.config, blindedOrFull); - // Don't use `computeSigningRoot()` here to compute the objectRoot in typesafe function blindedOrFullBlockHashTreeRoot() - const signingRoot = ssz.phase0.SigningData.hashTreeRoot({objectRoot: blobRoot, domain}); - - // Slashing protection is not required as blobs are binded to blocks which are already protected - const signableMessage: SignableMessage = { - type: SignableMessageType.BLOB, - data: blindedOrFull, - }; - - return { - message: blindedOrFull, - signature: await this.getSignature(pubkey, signingRoot, signingSlot, signableMessage), - } as allForks.FullOrBlindedSignedBlobSidecar; - } - async signRandao(pubkey: BLSPubkey, slot: Slot): Promise { const signingSlot = slot; const domain = this.config.getDomain(slot, DOMAIN_RANDAO); diff --git a/packages/validator/src/util/externalSignerClient.ts b/packages/validator/src/util/externalSignerClient.ts index 2716533e536f..90c6e1f464c8 100644 --- a/packages/validator/src/util/externalSignerClient.ts +++ b/packages/validator/src/util/externalSignerClient.ts @@ -15,7 +15,6 @@ export enum SignableMessageType { AGGREGATE_AND_PROOF = "AGGREGATE_AND_PROOF", ATTESTATION = "ATTESTATION", BLOCK_V2 = "BLOCK_V2", - BLOB = "BLOB", DEPOSIT = "DEPOSIT", RANDAO_REVEAL = "RANDAO_REVEAL", VOLUNTARY_EXIT = "VOLUNTARY_EXIT", @@ -65,7 +64,6 @@ export type SignableMessage = | {type: SignableMessageType.AGGREGATE_AND_PROOF; data: phase0.AggregateAndProof} | {type: SignableMessageType.ATTESTATION; data: phase0.AttestationData} | {type: SignableMessageType.BLOCK_V2; data: allForks.FullOrBlindedBeaconBlock} - | {type: SignableMessageType.BLOB; data: allForks.FullOrBlindedBlobSidecar} | {type: SignableMessageType.DEPOSIT; data: ValueOf} | {type: SignableMessageType.RANDAO_REVEAL; data: {epoch: Epoch}} | {type: SignableMessageType.VOLUNTARY_EXIT; data: phase0.VoluntaryExit} @@ -88,7 +86,6 @@ const requiresForkInfo: Record = { [SignableMessageType.SYNC_COMMITTEE_CONTRIBUTION_AND_PROOF]: true, [SignableMessageType.VALIDATOR_REGISTRATION]: false, [SignableMessageType.BLS_TO_EXECUTION_CHANGE]: true, - [SignableMessageType.BLOB]: true, }; type Web3SignerSerializedRequest = { @@ -232,9 +229,5 @@ function serializerSignableMessagePayload(config: BeaconConfig, payload: Signabl case SignableMessageType.BLS_TO_EXECUTION_CHANGE: return {BLS_TO_EXECUTION_CHANGE: ssz.capella.BLSToExecutionChange.toJson(payload.data)}; - - case SignableMessageType.BLOB: - // TODO DENEB: freetheblobs - throw Error("web3signer for blob signing not yet implemented"); } } diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index 37908afaf86c..1431f4f3c56e 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -210,5 +210,6 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record ({message: block, signature: signedBlock.signature})); + validatorStore.getBuilderSelectionParams.returns({ + selection: routes.validator.BuilderSelection.MaxProfit, + boostFactor: BigInt(100), + }); + validatorStore.getGraffiti.returns("aaaa"); + validatorStore.getFeeRecipient.returns("0x00"); + validatorStore.strictFeeRecipientCheck.returns(false); + api.validator.produceBlockV3.resolves({ response: { data: signedBlock.message, @@ -65,6 +74,7 @@ describe("BlockDutiesService", function () { executionPayloadValue: BigInt(1), consensusBlockValue: BigInt(1), executionPayloadBlinded: false, + executionPayloadSource: ProducedBlockSource.engine, }, ok: true, status: HttpStatusCode.OK, @@ -84,5 +94,76 @@ describe("BlockDutiesService", function () { [signedBlock, {broadcastValidation: routes.beacon.BroadcastValidation.consensus}], "wrong publishBlock() args" ); + + // ProduceBlockV3 is called with all correct arguments + expect(api.validator.produceBlockV3.getCall(0).args).to.deep.equal( + [ + 1, + signedBlock.message.body.randaoReveal, + "aaaa", + false, + { + feeRecipient: "0x00", + builderSelection: routes.validator.BuilderSelection.MaxProfit, + strictFeeRecipientCheck: false, + blindedLocal: false, + builderBoostFactor: BigInt(100), + }, + ], + "wrong produceBlockV3() args" + ); + }); + + it("Should produce, sign, and publish a blinded block", async function () { + // Reply with some duties + const slot = 0; // genesisTime is right now, so test with slot = currentSlot + api.validator.getProposerDuties.resolves({ + response: { + dependentRoot: ZERO_HASH_HEX, + executionOptimistic: false, + data: [{slot: slot, validatorIndex: 0, pubkey: pubkeys[0]}], + }, + ok: true, + status: HttpStatusCode.OK, + }); + + const clock = new ClockMock(); + // use produceBlockV3 + const blockService = new BlockProposingService(config, loggerVc, api, clock, validatorStore, null, { + useProduceBlockV3: true, + broadcastValidation: routes.beacon.BroadcastValidation.consensus, + blindedLocal: true, + }); + + const signedBlock = ssz.bellatrix.SignedBlindedBeaconBlock.defaultValue(); + validatorStore.signRandao.resolves(signedBlock.message.body.randaoReveal); + validatorStore.signBlock.callsFake(async (_, block) => ({message: block, signature: signedBlock.signature})); + api.validator.produceBlockV3.resolves({ + response: { + data: signedBlock.message, + version: ForkName.bellatrix, + executionPayloadValue: BigInt(1), + consensusBlockValue: BigInt(1), + executionPayloadBlinded: true, + executionPayloadSource: ProducedBlockSource.engine, + }, + ok: true, + status: HttpStatusCode.OK, + }); + api.beacon.publishBlindedBlockV2.resolves(); + + // Trigger block production for slot 1 + const notifyBlockProductionFn = blockService["dutiesService"]["notifyBlockProductionFn"]; + notifyBlockProductionFn(1, [pubkeys[0]]); + + // Resolve all promises + await sleep(20, controller.signal); + + // Must have submitted the block received on signBlock() + expect(api.beacon.publishBlindedBlockV2.callCount).to.equal(1, "publishBlindedBlockV2() must be called once"); + expect(api.beacon.publishBlindedBlockV2.getCall(0).args).to.deep.equal( + [signedBlock, {broadcastValidation: routes.beacon.BroadcastValidation.consensus}], + "wrong publishBlock() args" + ); }); }); diff --git a/scripts/assert_no_yarn_warnings.sh b/scripts/assert_no_yarn_warnings.sh index 9b7ff1d76779..60dace730f92 100755 --- a/scripts/assert_no_yarn_warnings.sh +++ b/scripts/assert_no_yarn_warnings.sh @@ -5,21 +5,8 @@ OUTPUT=$(yarn install --check-files 2>&1) echo $OUTPUT -MATCH=("warning") - -# There are few yarn warnings we can't find a fix for. Excluding those. -# TODO: Keep checking occasionally if the warnings are fixed upstream. -EXCLUDE=("Pattern \[\".*\"\] is trying to unpack in the same destination") -ARGS=() - -for m in "${MATCH[@]}"; do ARGS+=(-e "$m"); done -for e in "${EXCLUDE[@]}"; do ARGS+=(--exclude "$e"); done -COMMAND="grep -qi ${ARGS[@]}" - -echo "Running $COMMAND" - # grep the output for 'warning' -if echo "$OUTPUT" | ${COMMAND}; then +if echo "$OUTPUT" | grep -qi 'warning'; then echo "There were warnings in yarn install --check-files" exit 1 else diff --git a/scripts/prepare-docs.sh b/scripts/prepare-docs.sh index c46e2596440d..78b508bf5f29 100755 --- a/scripts/prepare-docs.sh +++ b/scripts/prepare-docs.sh @@ -8,6 +8,7 @@ set -e # Copy contributing docs cp CONTRIBUTING.md $DOCS_DIR/pages/contribution/getting-started.md +cp SECURITY.md $DOCS_DIR/pages/security.md # Copy package README.md to docs cp -r packages/light-client/README.md $DOCS_DIR/pages/lightclient-prover/lightclient.md diff --git a/scripts/vitest/customMatchers.ts b/scripts/vitest/customMatchers.ts index 72735869b1c3..227c0a2c0c76 100644 --- a/scripts/vitest/customMatchers.ts +++ b/scripts/vitest/customMatchers.ts @@ -2,14 +2,14 @@ import {expect} from "vitest"; expect.extend({ - toBeValidEpochCommittee: ( + toBeValidEpochCommittee( committee: {index: number; slot: number; validators: unknown[]}, { committeeCount, validatorsPerCommittee, slotsPerEpoch, }: {committeeCount: number; validatorsPerCommittee: number; slotsPerEpoch: number} - ) => { + ) { if (committee.index < 0 || committee.index > committeeCount - 1) { return { message: () => @@ -39,10 +39,10 @@ expect.extend({ pass: true, }; }, - toBeWithMessage: (received: unknown, expected: unknown, message: string) => { - if (received === expected) { + toBeWithMessage(received: unknown, expected: unknown, message: string) { + if (Object.is(received, expected)) { return { - message: () => "Expected value is truthy", + message: () => "Received value is the same as expected value", pass: true, }; } @@ -50,19 +50,36 @@ expect.extend({ return { pass: false, message: () => message, + actual: received, + expected, }; }, - toSatisfy: (received: unknown, func: (received: unknown) => boolean) => { + toSatisfy(received: unknown, func: (received: unknown) => boolean) { if (func(received)) { return { - message: () => "Expected value satisfied the condition", + message: () => "Received value satisfied the condition", pass: true, }; } return { pass: false, - message: () => "Expected value did not satisfy the condition", + message: () => "Received value did not satisfy the condition", + }; + }, + toEqualWithMessage(received: unknown, expected: unknown, message: string) { + if (this.equals(received, expected)) { + return { + message: () => "Received value equals expected value", + pass: true, + }; + } + + return { + pass: false, + message: () => message, + actual: received, + expected, }; }, }); diff --git a/types/vitest/index.d.ts b/types/vitest/index.d.ts index effc9507161c..387edcfa5279 100644 --- a/types/vitest/index.d.ts +++ b/types/vitest/index.d.ts @@ -26,6 +26,12 @@ interface CustomMatchers { * ``` * */ toBeWithMessage(expected: unknown, message: string): R; + /** + * @deprecated + * We highly recommend to not use this matcher instead use detail test case with .toEqual + * where you don't need message to explain assertion + * */ + toEqualWithMessage(expected: unknown, message: string): R; } interface CustomAsymmetricMatchers extends CustomMatchers { diff --git a/yarn.lock b/yarn.lock index 4663c7aaf0ee..6e1a1681d774 100644 --- a/yarn.lock +++ b/yarn.lock @@ -235,6 +235,14 @@ dependencies: "@babel/highlight" "^7.10.4" +"@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" + integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA== + dependencies: + "@babel/highlight" "^7.23.4" + chalk "^2.4.2" + "@babel/core@^7.7.5": version "7.11.4" resolved "https://registry.npmjs.org/@babel/core/-/core-7.11.4.tgz" @@ -257,7 +265,7 @@ semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.11.0", "@babel/generator@^7.11.4": +"@babel/generator@^7.11.4": version "7.15.0" resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.15.0.tgz" integrity sha512-eKl4XdMrbpYvuB505KTta4AV9g+wWzmVBW69tX0H2NwKVKd2YJbKgyK6M8j/rgLbmHOYJn6rUklV677nOyJrEQ== @@ -266,21 +274,35 @@ jsesc "^2.5.1" source-map "^0.5.0" -"@babel/helper-function-name@^7.10.4": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz" - integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ== +"@babel/generator@^7.23.6": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e" + integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw== dependencies: - "@babel/helper-get-function-arity" "^7.10.4" - "@babel/template" "^7.10.4" - "@babel/types" "^7.10.4" + "@babel/types" "^7.23.6" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" -"@babel/helper-get-function-arity@^7.10.4": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz" - integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A== +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== dependencies: - "@babel/types" "^7.10.4" + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== + dependencies: + "@babel/types" "^7.22.5" "@babel/helper-member-expression-to-functions@^7.10.4": version "7.11.0" @@ -341,6 +363,13 @@ dependencies: "@babel/types" "^7.11.0" +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-string-parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" @@ -398,11 +427,25 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/parser@^7.10.4", "@babel/parser@^7.11.0", "@babel/parser@^7.11.4": +"@babel/highlight@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" + integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + +"@babel/parser@^7.10.4", "@babel/parser@^7.11.4": version "7.15.3" resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.15.3.tgz" integrity sha512-O0L6v/HvqbdJawj0iBEfVQMc3/6WP+AeOsovsIgBFyJaG+W2w7eqvZB7puddATmWuARlm1SX7DwxJ/JJUnDpEA== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.6": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b" + integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ== + "@babel/parser@^7.23.3": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" @@ -417,20 +460,30 @@ "@babel/parser" "^7.10.4" "@babel/types" "^7.10.4" -"@babel/traverse@^7.10.4", "@babel/traverse@^7.11.0": - version "7.11.0" - resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.11.0.tgz" - integrity sha512-ZB2V+LskoWKNpMq6E5UUCrjtDUh5IOTAyIl0dTjIEoXum/iKWkoIEKIRDnUucO6f+2FzNkE0oD4RLKoPIufDtg== +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.11.0" - "@babel/helper-function-name" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.11.0" - "@babel/parser" "^7.11.0" - "@babel/types" "^7.11.0" - debug "^4.1.0" + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.10.4", "@babel/traverse@^7.11.0": + version "7.23.7" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.7.tgz#9a7bf285c928cb99b5ead19c3b1ce5b310c9c305" + integrity sha512-tY3mM8rH9jM0YHFGyfC0/xf+SB5eKUu7HPj7/k3fpi9dAlsMc5YbQvDi0Sh2QTPXqMhyaAtzAr807TIyfQrmyg== + dependencies: + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.6" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.6" + "@babel/types" "^7.23.6" + debug "^4.3.1" globals "^11.1.0" - lodash "^4.17.19" "@babel/types@^7.10.4", "@babel/types@^7.11.0", "@babel/types@^7.15.0": version "7.22.5" @@ -441,6 +494,15 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.6": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd" + integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg== + dependencies: + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@babel/types@^7.23.3": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" @@ -511,15 +573,13 @@ "@noble/hashes" "^1.0.0" "@scure/bip39" "^1.0.0" -"@chainsafe/bls-keystore@^2.0.0": - version "2.0.0" - resolved "https://registry.npmjs.org/@chainsafe/bls-keystore/-/bls-keystore-2.0.0.tgz" - integrity sha512-XGtgGKdjYqKP09SUsfwaStsYuWuXB56/614dC1XhggG4LH8KTrFOjxb9SkS+T1BUu5doCXd9YA+gNLy01zv+Ww== +"@chainsafe/bls-keystore@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/bls-keystore/-/bls-keystore-3.0.0.tgz#e28c979f7664417e4917fa0d4d32fa2b9416e9c6" + integrity sha512-vlRIIXnn555wq2emhqnSR7btno17M0sCcfdQ+Dhgr7IH6n0CMoTGw9qcrpnNYwM+9OPm3matSYeZc9mNlXf7fQ== dependencies: - ajv "^6.12.2" - buffer "^5.4.3" - ethereum-cryptography "^0.1.3" - uuid "^3.3.3" + ethereum-cryptography "^1.0.0" + uuid "8.3.2" "@chainsafe/bls@7.1.1": version "7.1.1" @@ -1373,6 +1433,11 @@ dependencies: fastify-plugin "^4.0.0" +"@fastify/busboy@^2.0.0": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.0.tgz#0709e9f4cb252351c609c6e6d8d6779a8d25edff" + integrity sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA== + "@fastify/cors@^8.2.1": version "8.2.1" resolved "https://registry.yarnpkg.com/@fastify/cors/-/cors-8.2.1.tgz#dd348162bcbfb87dff4b492e2bef32d41244006a" @@ -1507,7 +1572,7 @@ dependencies: "@sinclair/typebox" "^0.27.8" -"@jridgewell/gen-mapping@^0.3.0": +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": version "0.3.3" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== @@ -2598,6 +2663,11 @@ resolved "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.0-rc.0.tgz" integrity sha512-iXKByCMfrlO5S6Oh97BuM56tM2cIBB0XsL/vWF/AtJrJEKx4MC/Xdu0xDsGXMGcNWpqF7ujMsjjnp0+UHBwnDQ== +"@opentelemetry/api@^1.4.0": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.7.0.tgz#b139c81999c23e3c8d3c0a7234480e945920fc40" + integrity sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw== + "@parcel/watcher@2.0.4": version "2.0.4" resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-2.0.4.tgz#f300fef4cc38008ff4b8c29d92588eced3ce014b" @@ -2716,7 +2786,7 @@ estree-walker "^2.0.2" magic-string "^0.30.3" -"@rollup/plugin-virtual@^3.0.1": +"@rollup/plugin-virtual@^3.0.2": version "3.0.2" resolved "https://registry.yarnpkg.com/@rollup/plugin-virtual/-/plugin-virtual-3.0.2.tgz#17e17eeecb4c9fa1c0a6e72c9e5f66382fddbb82" integrity sha512-10monEYsBp3scM4/ND4LNH5Rxvh3e/cVeL3jWTgZ2SrQ+BmUoQcopVQvnaMcOnykb1VkxUFuDAN+0FnpTFRy2A== @@ -2928,74 +2998,74 @@ resolved "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz" integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== -"@swc/core-darwin-arm64@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.93.tgz#aefd94625451988286bebccb1c072bae0a36bcdb" - integrity sha512-gEKgk7FVIgltnIfDO6GntyuQBBlAYg5imHpRgLxB1zSI27ijVVkksc6QwISzFZAhKYaBWIsFSVeL9AYSziAF7A== - -"@swc/core-darwin-x64@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.93.tgz#18409c6effdf508ddf1ebccfa77d35aaa6cd72f0" - integrity sha512-ZQPxm/fXdDQtn3yrYSL/gFfA8OfZ5jTi33yFQq6vcg/Y8talpZ+MgdSlYM0FkLrZdMTYYTNFiuBQuuvkA+av+Q== - -"@swc/core-linux-arm-gnueabihf@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.93.tgz#23a97bc94a8b2f23fb6cc4bc9d8936899e5eeff5" - integrity sha512-OYFMMI2yV+aNe3wMgYhODxHdqUB/jrK0SEMHHS44GZpk8MuBXEF+Mcz4qjkY5Q1EH7KVQqXb/gVWwdgTHpjM2A== - -"@swc/core-linux-arm64-gnu@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.93.tgz#7a17406a7cf76a959a617626d5ee2634ae9afa26" - integrity sha512-BT4dT78odKnJMNiq5HdjBsv29CiIdcCcImAPxeFqAeFw1LL6gh9nzI8E96oWc+0lVT5lfhoesCk4Qm7J6bty8w== - -"@swc/core-linux-arm64-musl@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.93.tgz#a30be7780090afefd3b8706398418cbe1d23db49" - integrity sha512-yH5fWEl1bktouC0mhh0Chuxp7HEO4uCtS/ly1Vmf18gs6wZ8DOOkgAEVv2dNKIryy+Na++ljx4Ym7C8tSJTrLw== - -"@swc/core-linux-x64-gnu@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.93.tgz#41e903fd82e059952d16051b442cbe65ee5b8cb3" - integrity sha512-OFUdx64qvrGJhXKEyxosHxgoUVgba2ztYh7BnMiU5hP8lbI8G13W40J0SN3CmFQwPP30+3oEbW7LWzhKEaYjlg== - -"@swc/core-linux-x64-musl@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.93.tgz#0866807545c44eac9b3254b374310ad5e1c573f9" - integrity sha512-4B8lSRwEq1XYm6xhxHhvHmKAS7pUp1Q7E33NQ2TlmFhfKvCOh86qvThcjAOo57x8DRwmpvEVrqvpXtYagMN6Ig== - -"@swc/core-win32-arm64-msvc@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.93.tgz#c72411dea2fd4f62a832f71a6e15424d849e7610" - integrity sha512-BHShlxtkven8ZjjvZ5QR6sC5fZCJ9bMujEkiha6W4cBUTY7ce7qGFyHmQd+iPC85d9kD/0cCiX/Xez8u0BhO7w== - -"@swc/core-win32-ia32-msvc@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.93.tgz#05c2b031b976af4ef81f5073ee114254678a5d5d" - integrity sha512-nEwNWnz4JzYAK6asVvb92yeylfxMYih7eMQOnT7ZVlZN5ba9WF29xJ6kcQKs9HRH6MvWhz9+wRgv3FcjlU6HYA== - -"@swc/core-win32-x64-msvc@1.3.93": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.93.tgz#f8748b3fd1879f13084b1b0814edf328c662935c" - integrity sha512-jibQ0zUr4kwJaQVwgmH+svS04bYTPnPw/ZkNInzxS+wFAtzINBYcU8s2PMWbDb2NGYiRSEeoSGyAvS9H+24JFA== - -"@swc/core@^1.3.10": - version "1.3.93" - resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.3.93.tgz#be4282aa44deffb0e5081a2613bac00335600630" - integrity sha512-690GRr1wUGmGYZHk7fUduX/JUwViMF2o74mnZYIWEcJaCcd9MQfkhsxPBtjeg6tF+h266/Cf3RPYhsFBzzxXcA== +"@swc/core-darwin-arm64@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.101.tgz#9ffdc0e77c31b20877fa7405c82905e0c76738d0" + integrity sha512-mNFK+uHNPRXSnfTOG34zJOeMl2waM4hF4a2NY7dkMXrPqw9CoJn4MwTXJcyMiSz1/BnNjjTCHF3Yhj0jPxmkzQ== + +"@swc/core-darwin-x64@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.101.tgz#e50130e21e3cfd3029fd6cea43e8309b58ad9fa6" + integrity sha512-B085j8XOx73Fg15KsHvzYWG262bRweGr3JooO1aW5ec5pYbz5Ew9VS5JKYS03w2UBSxf2maWdbPz2UFAxg0whw== + +"@swc/core-linux-arm-gnueabihf@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.101.tgz#8cd36328e794b3c42b6c8e578bb1f42e59ba0231" + integrity sha512-9xLKRb6zSzRGPqdz52Hy5GuB1lSjmLqa0lST6MTFads3apmx4Vgs8Y5NuGhx/h2I8QM4jXdLbpqQlifpzTlSSw== + +"@swc/core-linux-arm64-gnu@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.101.tgz#d15e3885eb13a1512ba62f00ce4f5bb19f710a0c" + integrity sha512-oE+r1lo7g/vs96Weh2R5l971dt+ZLuhaUX+n3BfDdPxNHfObXgKMjO7E+QS5RbGjv/AwiPCxQmbdCp/xN5ICJA== + +"@swc/core-linux-arm64-musl@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.101.tgz#851d4cc1079b091fee36f5f64335232210749d7a" + integrity sha512-OGjYG3H4BMOTnJWJyBIovCez6KiHF30zMIu4+lGJTCrxRI2fAjGLml3PEXj8tC3FMcud7U2WUn6TdG0/te2k6g== + +"@swc/core-linux-x64-gnu@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.101.tgz#3a2a7c584db2e05a798e28361440424914563fa3" + integrity sha512-/kBMcoF12PRO/lwa8Z7w4YyiKDcXQEiLvM+S3G9EvkoKYGgkkz4Q6PSNhF5rwg/E3+Hq5/9D2R+6nrkF287ihg== + +"@swc/core-linux-x64-musl@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.101.tgz#45d1d53945994f08e93703b8de24ccac88538d0c" + integrity sha512-kDN8lm4Eew0u1p+h1l3JzoeGgZPQ05qDE0czngnjmfpsH2sOZxVj1hdiCwS5lArpy7ktaLu5JdRnx70MkUzhXw== + +"@swc/core-win32-arm64-msvc@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.101.tgz#b2610b8354e5fbca7cc5be3f728e61b046227fa8" + integrity sha512-9Wn8TTLWwJKw63K/S+jjrZb9yoJfJwCE2RV5vPCCWmlMf3U1AXj5XuWOLUX+Rp2sGKau7wZKsvywhheWm+qndQ== + +"@swc/core-win32-ia32-msvc@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.101.tgz#c919175bb4cd5e9fcfa56fbd3708167c1d445c68" + integrity sha512-onO5KvICRVlu2xmr4//V2je9O2XgS1SGKpbX206KmmjcJhXN5EYLSxW9qgg+kgV5mip+sKTHTAu7IkzkAtElYA== + +"@swc/core-win32-x64-msvc@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.101.tgz#17743fe425caffc596fde5965c9c4cf9a48aa26a" + integrity sha512-T3GeJtNQV00YmiVw/88/nxJ/H43CJvFnpvBHCVn17xbahiVUOPOduh3rc9LgAkKiNt/aV8vU3OJR+6PhfMR7UQ== + +"@swc/core@^1.3.100": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.3.101.tgz#4e8f1583094a73c410e48a0bebdeccdc6c66d4a5" + integrity sha512-w5aQ9qYsd/IYmXADAnkXPGDMTqkQalIi+kfFf/MHRKTpaOL7DHjMXwPp/n8hJ0qNjRvchzmPtOqtPBiER50d8A== dependencies: "@swc/counter" "^0.1.1" "@swc/types" "^0.1.5" optionalDependencies: - "@swc/core-darwin-arm64" "1.3.93" - "@swc/core-darwin-x64" "1.3.93" - "@swc/core-linux-arm-gnueabihf" "1.3.93" - "@swc/core-linux-arm64-gnu" "1.3.93" - "@swc/core-linux-arm64-musl" "1.3.93" - "@swc/core-linux-x64-gnu" "1.3.93" - "@swc/core-linux-x64-musl" "1.3.93" - "@swc/core-win32-arm64-msvc" "1.3.93" - "@swc/core-win32-ia32-msvc" "1.3.93" - "@swc/core-win32-x64-msvc" "1.3.93" + "@swc/core-darwin-arm64" "1.3.101" + "@swc/core-darwin-x64" "1.3.101" + "@swc/core-linux-arm-gnueabihf" "1.3.101" + "@swc/core-linux-arm64-gnu" "1.3.101" + "@swc/core-linux-arm64-musl" "1.3.101" + "@swc/core-linux-x64-gnu" "1.3.101" + "@swc/core-linux-x64-musl" "1.3.101" + "@swc/core-win32-arm64-msvc" "1.3.101" + "@swc/core-win32-ia32-msvc" "1.3.101" + "@swc/core-win32-x64-msvc" "1.3.101" "@swc/counter@^0.1.1": version "0.1.2" @@ -3384,13 +3454,6 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== -"@types/pbkdf2@^3.0.0": - version "3.1.0" - resolved "https://registry.npmjs.org/@types/pbkdf2/-/pbkdf2-3.1.0.tgz" - integrity sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ== - dependencies: - "@types/node" "*" - "@types/qs@^6.9.7": version "6.9.7" resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" @@ -3421,13 +3484,6 @@ resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.2.tgz#ed279a64fa438bb69f2480eda44937912bb7480a" integrity sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow== -"@types/secp256k1@^4.0.1": - version "4.0.2" - resolved "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.2.tgz" - integrity sha512-QMg+9v0bbNJ2peLuHRWxzmy0HRJIG6gFZNhaRSp7S3ggSbCCxiqQB2/ybvhXyhHOCequpNkrx7OavNhrWOsW0A== - dependencies: - "@types/node" "*" - "@types/semver@^7.5.0": version "7.5.2" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.2.tgz#31f6eec1ed7ec23f4f05608d3a2d381df041f564" @@ -3665,19 +3721,19 @@ "@typescript-eslint/types" "6.7.2" eslint-visitor-keys "^3.4.1" -"@vitest/browser@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@vitest/browser/-/browser-1.0.1.tgz#d908e8015c5a449e0db28636c6afb969a8be9fcf" - integrity sha512-zKJvgfZMzahaFrIS5fbYnP2We+KRPJQUfog4mjOCOOVpLbk5DWtDD15XPYKaIY2IydD0ir0aCPrlcKlWGrcNww== +"@vitest/browser@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/browser/-/browser-1.1.0.tgz#b3c3e06d04506309a1e163103e1f65ee1391c262" + integrity sha512-59Uwoiw/zAQPmqgIKrzev8HNfeNlD8Q/nDyP9Xqg1D3kaM0tcOT/wk5RnZFW5f0JdguK0c1+vSeOPUSrOja1hQ== dependencies: estree-walker "^3.0.3" magic-string "^0.30.5" sirv "^2.0.3" -"@vitest/coverage-v8@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@vitest/coverage-v8/-/coverage-v8-1.0.1.tgz#b1249ca6e8f2617e56c7a15caa546e8b1abae4c7" - integrity sha512-Z4a7ig4VjUCT/P+LRB3IZrBRXb9xWRUM8rSBH9cKgfrU1Oe01/K2WJKtGshOnQwXZoSfQtwCGpbnHmB/qJwjcw== +"@vitest/coverage-v8@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/coverage-v8/-/coverage-v8-1.1.0.tgz#bc0bbb99fcb608f72794701a86302ff3aabbc125" + integrity sha512-kHQRk70vTdXAyQY2C0vKOHPyQD/R6IUzcGdO4vCuyr4alE5Yg1+Sk2jSdjlIrTTXdcNEs+ReWVM09mmSFJpzyQ== dependencies: "@ampproject/remapping" "^2.2.1" "@bcoe/v8-coverage" "^0.2.3" @@ -3693,57 +3749,57 @@ test-exclude "^6.0.0" v8-to-istanbul "^9.2.0" -"@vitest/expect@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-1.0.2.tgz#7fc5ee3fe0e649f5a5e3df1a9744efe0163d1237" - integrity sha512-mAIo/8uddSWkjQMLFcjqZP3WmkwvvN0OtlyZIu33jFnwme3vZds8m8EDMxtj+Uzni2DwtPfHNjJcTM8zTV1f4A== +"@vitest/expect@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-1.1.0.tgz#f58eef7de090ad65f30bb93ec54fa9f94c9d1d5d" + integrity sha512-9IE2WWkcJo2BR9eqtY5MIo3TPmS50Pnwpm66A6neb2hvk/QSLfPXBz2qdiwUOQkwyFuuXEUj5380CbwfzW4+/w== dependencies: - "@vitest/spy" "1.0.2" - "@vitest/utils" "1.0.2" + "@vitest/spy" "1.1.0" + "@vitest/utils" "1.1.0" chai "^4.3.10" -"@vitest/runner@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@vitest/runner/-/runner-1.0.2.tgz#aad21c03fdcd1f380564fad37be7d5a2feb2f733" - integrity sha512-ZcHJXPT2kg/9Hc4fNkCbItlsgZSs3m4vQbxB8LCSdzpbG85bExCmSvu6K9lWpMNdoKfAr1Jn0BwS9SWUcGnbTQ== +"@vitest/runner@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/runner/-/runner-1.1.0.tgz#b3bf60f4a78f4324ca09811dd0f87b721a96b534" + integrity sha512-zdNLJ00pm5z/uhbWF6aeIJCGMSyTyWImy3Fcp9piRGvueERFlQFbUwCpzVce79OLm2UHk9iwaMSOaU9jVHgNVw== dependencies: - "@vitest/utils" "1.0.2" + "@vitest/utils" "1.1.0" p-limit "^5.0.0" pathe "^1.1.1" -"@vitest/snapshot@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@vitest/snapshot/-/snapshot-1.0.2.tgz#df11b066c9593e3539640a41f38452a6b5889da1" - integrity sha512-9ClDz2/aV5TfWA4reV7XR9p+hE0e7bifhwxlURugj3Fw0YXeTFzHmKCNEHd6wOIFMfthbGGwhlq7TOJ2jDO4/g== +"@vitest/snapshot@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/snapshot/-/snapshot-1.1.0.tgz#b9924e4303382b43bb2c31061b173e69a6fb3437" + integrity sha512-5O/wyZg09V5qmNmAlUgCBqflvn2ylgsWJRRuPrnHEfDNT6tQpQ8O1isNGgo+VxofISHqz961SG3iVvt3SPK/QQ== dependencies: magic-string "^0.30.5" pathe "^1.1.1" pretty-format "^29.7.0" -"@vitest/spy@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@vitest/spy/-/spy-1.0.2.tgz#c28205427e77e589e3f0e6017f55d1c5b9defee3" - integrity sha512-YlnHmDntp+zNV3QoTVFI5EVHV0AXpiThd7+xnDEbWnD6fw0TH/J4/+3GFPClLimR39h6nA5m0W4Bjm5Edg4A/A== +"@vitest/spy@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/spy/-/spy-1.1.0.tgz#7f40697e4fc217ac8c3cc89a865d1751b263f561" + integrity sha512-sNOVSU/GE+7+P76qYo+VXdXhXffzWZcYIPQfmkiRxaNCSPiLANvQx5Mx6ZURJ/ndtEkUJEpvKLXqAYTKEY+lTg== dependencies: tinyspy "^2.2.0" -"@vitest/utils@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@vitest/utils/-/utils-1.0.2.tgz#fbc483a62d13a02fa4e2b470fbf565fdd616a242" - integrity sha512-GPQkGHAnFAP/+seSbB9pCsj339yRrMgILoI5H2sPevTLCYgBq0VRjF8QSllmnQyvf0EontF6KUIt2t5s2SmqoQ== +"@vitest/utils@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/utils/-/utils-1.1.0.tgz#d177a5f41bdb484bbb43c8d73a77ca782df068b5" + integrity sha512-z+s510fKmYz4Y41XhNs3vcuFTFhcij2YF7F8VQfMEYAAUfqQh0Zfg7+w9xdgFGhPf3tX3TicAe+8BDITk6ampQ== dependencies: diff-sequences "^29.6.3" loupe "^2.3.7" pretty-format "^29.7.0" -"@wdio/config@8.24.12": - version "8.24.12" - resolved "https://registry.yarnpkg.com/@wdio/config/-/config-8.24.12.tgz#07d30aafcf0ef476e9930623b9c8e0f986943d00" - integrity sha512-3HW7qG1rIHzOIybV6oHR1CqLghsN0G3Xzs90ZciGL8dYhtcLtYCHwuWmBw4mkaB5xViU4AmZDuj7ChiG8Cr6Qw== +"@wdio/config@8.27.0": + version "8.27.0" + resolved "https://registry.yarnpkg.com/@wdio/config/-/config-8.27.0.tgz#c738d8108b5161cf3f80bb34d0e1f4d700b1a9ce" + integrity sha512-zYM5daeiBVVAbQj0ASymAt0RUsocLVIwKiUHNa8gg/1GsZnztGjetXExSp1gXlxtMVM5xWUSKjh6ceFK79gWDQ== dependencies: "@wdio/logger" "8.24.12" - "@wdio/types" "8.24.12" - "@wdio/utils" "8.24.12" + "@wdio/types" "8.27.0" + "@wdio/utils" "8.27.0" decamelize "^6.0.0" deepmerge-ts "^5.0.0" glob "^10.2.2" @@ -3781,21 +3837,21 @@ dependencies: "@types/node" "^20.1.0" -"@wdio/types@8.24.12": - version "8.24.12" - resolved "https://registry.yarnpkg.com/@wdio/types/-/types-8.24.12.tgz#c7a182ecc7effdd8ed7ea1967567a84da2c89100" - integrity sha512-SaD3OacDiW06DvSgAQ7sDBbpiI9qZRg7eoVYeBg3uSGVtUq84vTETRhhV7D6xTC00IqZu+mmN2TY5/q+7Gqy7w== +"@wdio/types@8.27.0": + version "8.27.0" + resolved "https://registry.yarnpkg.com/@wdio/types/-/types-8.27.0.tgz#ef2e3a9ae083f08ee5fe5bf9e5dfc70cc55cebcb" + integrity sha512-LbP9FKh8r0uW9/dKhTIUCC1Su8PsP9TmzGKXkWt6/IMacgJiB/zW3u1CgyaLw9lG0UiQORHGoeJX9zB2HZAh4w== dependencies: "@types/node" "^20.1.0" -"@wdio/utils@8.24.12": - version "8.24.12" - resolved "https://registry.yarnpkg.com/@wdio/utils/-/utils-8.24.12.tgz#4d4e03d62728b181f44c05584f3988659c6c7a38" - integrity sha512-uzwZyBVgqz0Wz1KL3aOUaQsxT8TNkzxti4NNTSMrU256qAPqc/n75rB7V73QASapCMpy70mZZTsuPgQYYj4ytQ== +"@wdio/utils@8.27.0": + version "8.27.0" + resolved "https://registry.yarnpkg.com/@wdio/utils/-/utils-8.27.0.tgz#6cb9b29649b4e301a959a8e8aea831edec635d55" + integrity sha512-4BY+JBQssVn003P5lA289uDMie3LtGinHze5btkcW9timB6VaU+EeZS4eKTPC0pziizLhteVvXYxv3YTpeeRfA== dependencies: "@puppeteer/browsers" "^1.6.0" "@wdio/logger" "8.24.12" - "@wdio/types" "8.24.12" + "@wdio/types" "8.27.0" decamelize "^6.0.0" deepmerge-ts "^5.1.0" edgedriver "^5.3.5" @@ -4117,16 +4173,6 @@ ajv-keywords@^3.5.2: resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.12.2: - version "6.12.3" - resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.3.tgz" - integrity sha512-4K0cK3L1hsqk9xIb2z9vs/XU+PGJZ9PNpJRDS9YLzmNdX6jmVPfamLvTJr0aDAusnHyCHO6MjzlkAsgtqp9teA== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" @@ -4554,19 +4600,10 @@ aws-sdk@^2.932.0: uuid "3.3.2" xml2js "0.4.19" -axios@^1.0.0: - version "1.3.5" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.3.5.tgz#e07209b39a0d11848e3e341fa087acd71dadc542" - integrity sha512-glL/PvG/E+xCWwV8S6nCHcrfg1exGx7vxyUIivIA1iL7BIh6bePylCfVHwp6k13ao7SATxB6imau2kqY+I67kw== - dependencies: - follow-redirects "^1.15.0" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - -axios@^1.3.4: - version "1.3.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.3.4.tgz#f5760cefd9cfb51fd2481acf88c05f67c4523024" - integrity sha512-toYm+Bsyl6VC5wSkfkbbNB6ROv7KY93PEBBL6xyDczaIHasAiv4wPqQ/c4RjoQzipxRD2W5g21cOqQulZ7rHwQ== +axios@^1.0.0, axios@^1.3.4: + version "1.6.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.0.tgz#f1e5292f26b2fd5c2e66876adc5b06cdbd7d2102" + integrity sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg== dependencies: follow-redirects "^1.15.0" form-data "^4.0.0" @@ -4590,13 +4627,6 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base-x@^3.0.2: - version "3.0.8" - resolved "https://registry.npmjs.org/base-x/-/base-x-3.0.8.tgz" - integrity sha512-Rl/1AWP4J/zRrk54hhlxH4drNxPJXYUaKffODVI53/dAsV4t9fBxyxYKAVPU1XBHxYwOWP9h9H0hM2MVw4YfJA== - dependencies: - safe-buffer "^5.0.1" - base64-js@^1.0.2, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" @@ -4732,11 +4762,6 @@ bl@^5.0.0: inherits "^2.0.4" readable-stream "^3.4.0" -blakejs@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/blakejs/-/blakejs-1.1.0.tgz" - integrity sha1-ad+S75U6qIylGjLfarHFShVfx6U= - bls-eth-wasm@^0.4.8: version "0.4.8" resolved "https://registry.npmjs.org/bls-eth-wasm/-/bls-eth-wasm-0.4.8.tgz" @@ -4841,7 +4866,7 @@ browser-stdout@1.3.1: resolved "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== -browserify-aes@^1.0.0, browserify-aes@^1.0.4, browserify-aes@^1.2.0: +browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" resolved "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== @@ -4912,22 +4937,6 @@ browserslist@^4.14.5: node-releases "^2.0.6" update-browserslist-db "^1.0.4" -bs58@^4.0.0: - version "4.0.1" - resolved "https://registry.npmjs.org/bs58/-/bs58-4.0.1.tgz" - integrity sha1-vhYedsNU9veIrkBx9j806MTwpCo= - dependencies: - base-x "^3.0.2" - -bs58check@^2.1.2: - version "2.1.2" - resolved "https://registry.npmjs.org/bs58check/-/bs58check-2.1.2.tgz" - integrity sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA== - dependencies: - bs58 "^4.0.0" - create-hash "^1.1.0" - safe-buffer "^5.1.2" - buffer-crc32@^0.2.1, buffer-crc32@^0.2.13, buffer-crc32@~0.2.3: version "0.2.13" resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" @@ -4943,15 +4952,6 @@ buffer-indexof-polyfill@~1.0.0: resolved "https://registry.yarnpkg.com/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz#d2732135c5999c64b277fcf9b1abe3498254729c" integrity sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A== -"buffer-polyfill@npm:buffer@^6.0.3", buffer@^6.0.3: - name buffer-polyfill - version "6.0.3" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" - integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.2.1" - buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz" @@ -4981,6 +4981,15 @@ buffer@^5.2.1, buffer@^5.4.3, buffer@^5.5.0, buffer@^5.7.1: base64-js "^1.3.1" ieee754 "^1.1.13" +buffer@^6.0.3: + name buffer-polyfill + version "6.0.3" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.2.1" + buffers@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" @@ -5020,13 +5029,6 @@ bundle-name@^3.0.0: dependencies: run-applescript "^5.0.0" -busboy@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" - integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== - dependencies: - streamsearch "^1.1.0" - byline@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1" @@ -5268,7 +5270,7 @@ chalk@4.1.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^2.0.0, chalk@^2.4.1: +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -6313,10 +6315,10 @@ devtools-protocol@0.0.1147663: resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1147663.tgz#4ec5610b39a6250d1f87e6b9c7e16688ed0ac78e" integrity sha512-hyWmRrexdhbZ1tcJUGpO95ivbRhWXz++F4Ko+n21AY5PNln2ovoJw+8ZMNDTtip+CNFQfrtLVh/w4009dXO/eQ== -devtools-protocol@^0.0.1233178: - version "0.0.1233178" - resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1233178.tgz#dfc83cdc487c0cae8f059047293be9d6267a19f9" - integrity sha512-jmMfyaqlzddwmDaSR1AQ+5ek+f7rupZdxKuPdkRcoxrZoF70Idg/4dTgXA08TLPmwAwB54gh49Wm2l/gRM0eUg== +devtools-protocol@^0.0.1237913: + version "0.0.1237913" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1237913.tgz#ac0208ff0cbe9c53646753576b5c1d788e3caa38" + integrity sha512-Pxtmz2ZIqBkpU82HaIdsvCQBG94yTC4xajrEsWx9p38QKEfBCJktSazsHkrjf9j3dVVNPhg5LR21F6KWeXpjiQ== dezalgo@^1.0.4: version "1.0.4" @@ -6373,9 +6375,9 @@ dir-glob@^3.0.1: path-type "^4.0.0" dns-over-http-resolver@^2.1.0, dns-over-http-resolver@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/dns-over-http-resolver/-/dns-over-http-resolver-2.1.2.tgz#fb478af244dd4fed5e0f798a3e6426d92730378c" - integrity sha512-Bjbf6aZjr3HMnwGslZnoW3MJVqgbTsh39EZWpikx2yLl9xEjw4eZhlOHCFhkOu89zoWaS4rqe2Go53TXW4Byiw== + version "2.1.3" + resolved "https://registry.yarnpkg.com/dns-over-http-resolver/-/dns-over-http-resolver-2.1.3.tgz#bb7f2e10cc18d960339a6e30e21b8c1d99be7b38" + integrity sha512-zjRYFhq+CsxPAouQWzOsxNMvEN+SHisjzhX8EMxd2Y0EG3thvn6wXQgMJLnTDImkhe4jhLbOQpXtL10nALBOSA== dependencies: debug "^4.3.1" native-fetch "^4.0.2" @@ -6529,7 +6531,7 @@ electron@^26.2.2: "@types/node" "^18.11.18" extract-zip "^2.0.1" -elliptic@6.5.4, elliptic@^6.5.2, elliptic@^6.5.3: +elliptic@6.5.4, elliptic@^6.5.3: version "6.5.4" resolved "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== @@ -7182,28 +7184,7 @@ esutils@^2.0.2: resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -ethereum-cryptography@^0.1.3: - version "0.1.3" - resolved "https://registry.npmjs.org/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz" - integrity sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ== - dependencies: - "@types/pbkdf2" "^3.0.0" - "@types/secp256k1" "^4.0.1" - blakejs "^1.1.0" - browserify-aes "^1.2.0" - bs58check "^2.1.2" - create-hash "^1.2.0" - create-hmac "^1.1.7" - hash.js "^1.1.7" - keccak "^3.0.0" - pbkdf2 "^3.0.17" - randombytes "^2.1.0" - safe-buffer "^5.1.2" - scrypt-js "^3.0.0" - secp256k1 "^4.0.1" - setimmediate "^1.0.5" - -ethereum-cryptography@^1.2.0: +ethereum-cryptography@^1.0.0, ethereum-cryptography@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-1.2.0.tgz#5ccfa183e85fdaf9f9b299a79430c044268c9b3a" integrity sha512-6yFQC9b5ug6/17CQpCyE3k9eKBMdhyVjzUy1WkiuY/E4vj/SXDBbCw8QEIaXqf0Mf2SnY6RmpDcwlUmBSS0EJw== @@ -7720,15 +7701,10 @@ fn.name@1.x.x: resolved "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.0.0: - version "1.15.1" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" - integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== - -follow-redirects@^1.15.0: - version "1.15.2" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== +follow-redirects@^1.0.0, follow-redirects@^1.15.0: + version "1.15.4" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.4.tgz#cdc7d308bf6493126b17ea2191ea0ccf3e535adf" + integrity sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw== for-each@^0.3.3: version "0.3.3" @@ -7995,12 +7971,7 @@ get-caller-file@^2.0.1, get-caller-file@^2.0.5: resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-func-name@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz" - integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= - -get-func-name@^2.0.2: +get-func-name@^2.0.0, get-func-name@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.2.tgz#0d7cf20cd13fda808669ffa88f4ffc7a3943fc41" integrity sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ== @@ -8470,7 +8441,7 @@ hash-base@^3.0.0: readable-stream "^3.6.0" safe-buffer "^5.2.0" -hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3, hash.js@^1.1.7: +hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== @@ -9944,14 +9915,6 @@ karma@^6.4.2: ua-parser-js "^0.7.30" yargs "^16.1.1" -keccak@^3.0.0: - version "3.0.1" - resolved "https://registry.npmjs.org/keccak/-/keccak-3.0.1.tgz" - integrity sha512-epq90L9jlFWCW7+pQa6JOnKn2Xgl2mtI664seYR6MHskvI9agt7AnDqmAlp9TqU4/caMYbA08Hi5DMZAl5zdkA== - dependencies: - node-addon-api "^2.0.0" - node-gyp-build "^4.2.0" - keypress@0.1.x: version "0.1.0" resolved "https://registry.yarnpkg.com/keypress/-/keypress-0.1.0.tgz#4a3188d4291b66b4f65edb99f806aa9ae293592a" @@ -11205,11 +11168,6 @@ nise@^5.1.4: just-extend "^4.0.2" path-to-regexp "^1.7.0" -node-addon-api@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" - integrity sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA== - node-addon-api@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" @@ -11251,11 +11209,6 @@ node-forge@^1.1.0: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== -node-gyp-build@^4.2.0: - version "4.2.3" - resolved "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz" - integrity sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg== - node-gyp-build@^4.3.0: version "4.5.0" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" @@ -12305,7 +12258,7 @@ pathval@^1.1.1: resolved "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz" integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ== -pbkdf2@^3.0.17, pbkdf2@^3.0.3, pbkdf2@^3.0.9: +pbkdf2@^3.0.3, pbkdf2@^3.0.9: version "3.1.2" resolved "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz" integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== @@ -12503,6 +12456,14 @@ prom-client@^14.2.0: dependencies: tdigest "^0.1.1" +prom-client@^15.1.0: + version "15.1.0" + resolved "https://registry.yarnpkg.com/prom-client/-/prom-client-15.1.0.tgz#816a4a2128da169d0471093baeccc6d2f17a4613" + integrity sha512-cCD7jLTqyPdjEPBo/Xk4Iu8jxjuZgZJ3e/oET3L+ZwOuap/7Cw3dH/TJSsZKs1TQLZ2IHpIlRAKw82ef06kmMw== + dependencies: + "@opentelemetry/api" "^1.4.0" + tdigest "^0.1.1" + promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" @@ -13375,20 +13336,11 @@ schema-utils@^3.2.0: ajv "^6.12.5" ajv-keywords "^3.5.2" -scrypt-js@3.0.1, scrypt-js@^3.0.0: +scrypt-js@3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/scrypt-js/-/scrypt-js-3.0.1.tgz" integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== -secp256k1@^4.0.1: - version "4.0.2" - resolved "https://registry.npmjs.org/secp256k1/-/secp256k1-4.0.2.tgz" - integrity sha512-UDar4sKvWAksIlfX3xIaQReADn+WFnHvbVujpcbr+9Sf/69odMwy2MUsz5CKLQgX9nsIyrjuxL2imVyoNHa3fg== - dependencies: - elliptic "^6.5.2" - node-addon-api "^2.0.0" - node-gyp-build "^4.2.0" - secure-json-parse@^2.5.0: version "2.7.0" resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.7.0.tgz#5a5f9cd6ae47df23dba3151edd06855d47e09862" @@ -13948,11 +13900,6 @@ streamroller@^3.1.1: debug "^4.3.4" fs-extra "^8.1.0" -streamsearch@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" - integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== - streamx@^2.15.0: version "2.15.1" resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.15.1.tgz#396ad286d8bc3eeef8f5cea3f029e81237c024c6" @@ -14909,11 +14856,11 @@ undici-types@~5.26.4: integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== undici@^5.12.0: - version "5.22.1" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.22.1.tgz#877d512effef2ac8be65e695f3586922e1a57d7b" - integrity sha512-Ji2IJhFXZY0x/0tVBXeQwgPlLWw13GVzpsWPQ3rV50IFMMof2I55PZZxtm4P6iNq+L5znYN9nSTAq0ZyE6lSJw== + version "5.28.2" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.2.tgz#fea200eac65fc7ecaff80a023d1a0543423b4c91" + integrity sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w== dependencies: - busboy "^1.6.0" + "@fastify/busboy" "^2.0.0" unique-filename@^1.1.1: version "1.1.1" @@ -15112,7 +15059,7 @@ uuid@^3.3.2, uuid@^3.3.3: resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^9.0.0: +uuid@^9.0.0, uuid@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== @@ -15180,10 +15127,10 @@ vary@^1: resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= -vite-node@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-1.0.2.tgz#5e6096e31b851f245ccbd353bf3939130dfd0224" - integrity sha512-h7BbMJf46fLvFW/9Ygo3snkIBEHFh6fHpB4lge98H5quYrDhPFeI3S0LREz328uqPWSnii2yeJXktQ+Pmqk5BQ== +vite-node@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-1.1.0.tgz#0ebcb7398692e378954786dfba28e905e28a76b4" + integrity sha512-jV48DDUxGLEBdHCQvxL1mEh7+naVy+nhUUUaPAZLd3FJgXuxQiewHcfeZebbJ6onDqNGkP4r3MhQ342PRlG81Q== dependencies: cac "^6.7.14" debug "^4.3.4" @@ -15191,24 +15138,22 @@ vite-node@1.0.2: picocolors "^1.0.0" vite "^5.0.0" -vite-plugin-node-polyfills@^0.17.0: - version "0.17.0" - resolved "https://registry.yarnpkg.com/vite-plugin-node-polyfills/-/vite-plugin-node-polyfills-0.17.0.tgz#1044a4955174ddaeedbc3679b179e1efac9da006" - integrity sha512-iPmPn7376e5u6QvoTSJa16hf5Q0DFwHFXJk2uYpsNlmI3JdPms7hWyh55o+OysJ5jo9J5XPhLC9sMOYifwFd1w== +vite-plugin-node-polyfills@^0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/vite-plugin-node-polyfills/-/vite-plugin-node-polyfills-0.18.0.tgz#2ad147960f7a35dbbb1c9f9c1ae928bd0f438c1e" + integrity sha512-zkdLD3gpOhLFyxYRMJ5apk0RcODhomuS3XQgExowiX8naoc251JfcP3toqnfDlMdF0xuPYahre/H38xAcq8ApA== dependencies: "@rollup/plugin-inject" "^5.0.5" - buffer-polyfill "npm:buffer@^6.0.3" node-stdlib-browser "^1.2.0" - process "^0.11.10" -vite-plugin-top-level-await@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/vite-plugin-top-level-await/-/vite-plugin-top-level-await-1.3.1.tgz#7e7293be01489b508692627529c0a3b3218a23a3" - integrity sha512-55M1h4NAwkrpxPNOJIBzKZFihqLUzIgnElLSmPNPMR2Fn9+JHKaNg3sVX1Fq+VgvuBksQYxiD3OnwQAUu7kaPQ== +vite-plugin-top-level-await@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/vite-plugin-top-level-await/-/vite-plugin-top-level-await-1.4.1.tgz#607dfe084157550fa33df18062b99ceea774cd9c" + integrity sha512-hogbZ6yT7+AqBaV6lK9JRNvJDn4/IJvHLu6ET06arNfo0t2IsyCaon7el9Xa8OumH+ESuq//SDf8xscZFE0rWw== dependencies: - "@rollup/plugin-virtual" "^3.0.1" - "@swc/core" "^1.3.10" - uuid "^9.0.0" + "@rollup/plugin-virtual" "^3.0.2" + "@swc/core" "^1.3.100" + uuid "^9.0.1" vite@^5.0.0: version "5.0.6" @@ -15226,16 +15171,16 @@ vitest-when@^0.3.0: resolved "https://registry.yarnpkg.com/vitest-when/-/vitest-when-0.3.0.tgz#663d4274f1e7302bd24ec00dda8269d20b2eff04" integrity sha512-wYfmzd+GkvdNNhbeb/40PnKpetUP5I7qxvdbu1OAXRXaLrnLfSrJTa/dMIbqqrc8SA0vhonpw5p0RHDXwhDM1Q== -vitest@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/vitest/-/vitest-1.0.2.tgz#a7c3bf41bd5ef8c1c781c98c84a749d26b31f944" - integrity sha512-F3NVwwpXfRSDnJmyv+ALPwSRVt0zDkRRE18pwUHSUPXAlWQ47rY1dc99ziMW5bBHyqwK2ERjMisLNoef64qk9w== - dependencies: - "@vitest/expect" "1.0.2" - "@vitest/runner" "1.0.2" - "@vitest/snapshot" "1.0.2" - "@vitest/spy" "1.0.2" - "@vitest/utils" "1.0.2" +vitest@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/vitest/-/vitest-1.1.0.tgz#47ba67c564aa137b53b0197d2a992908e7f5b04d" + integrity sha512-oDFiCrw7dd3Jf06HoMtSRARivvyjHJaTxikFxuqJjO76U436PqlVw1uLn7a8OSPrhSfMGVaRakKpA2lePdw79A== + dependencies: + "@vitest/expect" "1.1.0" + "@vitest/runner" "1.1.0" + "@vitest/snapshot" "1.1.0" + "@vitest/spy" "1.1.0" + "@vitest/utils" "1.1.0" acorn-walk "^8.3.0" cac "^6.7.14" chai "^4.3.10" @@ -15250,7 +15195,7 @@ vitest@^1.0.2: tinybench "^2.5.1" tinypool "^0.8.1" vite "^5.0.0" - vite-node "1.0.2" + vite-node "1.1.0" why-is-node-running "^2.2.2" vm-browserify@^1.0.1: @@ -15510,40 +15455,40 @@ web3@^4.0.3: web3-utils "^4.0.3" web3-validator "^1.0.2" -webdriver@8.24.12: - version "8.24.12" - resolved "https://registry.yarnpkg.com/webdriver/-/webdriver-8.24.12.tgz#fd443550f2fa25498af8d6a7a1261dc3d6c4f462" - integrity sha512-03DQIClHoaAqTsmDkxGwo4HwHfkn9LzJ1wfNyUerzKg8DnyXeiT6ILqj6EXLfsvh5zddU2vhYGLFXSerPgkuOQ== +webdriver@8.27.0: + version "8.27.0" + resolved "https://registry.yarnpkg.com/webdriver/-/webdriver-8.27.0.tgz#27e936a03c08b2d72ed6bd01a6a46f8189ef0abf" + integrity sha512-n1IA+rR3u84XxU9swiKUM06BkEC0GDimfZkBML57cny+utQOUbdM/mBpqCUnkWX/RBz/p2EfHdKNyOs3/REaog== dependencies: "@types/node" "^20.1.0" "@types/ws" "^8.5.3" - "@wdio/config" "8.24.12" + "@wdio/config" "8.27.0" "@wdio/logger" "8.24.12" "@wdio/protocols" "8.24.12" - "@wdio/types" "8.24.12" - "@wdio/utils" "8.24.12" + "@wdio/types" "8.27.0" + "@wdio/utils" "8.27.0" deepmerge-ts "^5.1.0" got "^12.6.1" ky "^0.33.0" ws "^8.8.0" -webdriverio@^8.24.12: - version "8.24.12" - resolved "https://registry.yarnpkg.com/webdriverio/-/webdriverio-8.24.12.tgz#05a2107ae8a3927e1a01503a05fc2050fa4e06bd" - integrity sha512-Ddu0NNRMVkTzRzqvm3m0wt2eLUn+Plz2Cj+1QXDnVpddYJvk9J3elZC2hqNyscEtecQ+h2y3r36OcJqkl9jPag== +webdriverio@^8.27.0: + version "8.27.0" + resolved "https://registry.yarnpkg.com/webdriverio/-/webdriverio-8.27.0.tgz#4068b0164ab66bfb62d6eb6b8d97df2d140922d5" + integrity sha512-Qh5VCiBjEmxnmXcL1QEFoDzFqTtaWKrXriuU5G0yHKCModGAt2G7IHTkAok3CpmkVJfZpEvY630aP1MvgDtFhw== dependencies: "@types/node" "^20.1.0" - "@wdio/config" "8.24.12" + "@wdio/config" "8.27.0" "@wdio/logger" "8.24.12" "@wdio/protocols" "8.24.12" "@wdio/repl" "8.24.12" - "@wdio/types" "8.24.12" - "@wdio/utils" "8.24.12" + "@wdio/types" "8.27.0" + "@wdio/utils" "8.27.0" archiver "^6.0.0" aria-query "^5.0.0" css-shorthand-properties "^1.1.1" css-value "^0.0.1" - devtools-protocol "^0.0.1233178" + devtools-protocol "^0.0.1237913" grapheme-splitter "^1.0.2" import-meta-resolve "^4.0.0" is-plain-obj "^4.1.0" @@ -15555,7 +15500,7 @@ webdriverio@^8.24.12: resq "^1.9.1" rgb2hex "0.2.5" serialize-error "^11.0.1" - webdriver "8.24.12" + webdriver "8.27.0" webidl-conversions@^3.0.0: version "3.0.1"