diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 3ff9018372c9..6e27a89c3044 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -4,21 +4,41 @@ on:
push:
branches:
- stable
+ workflow_dispatch:
+ inputs:
+ ref:
+ description: 'Ref to deploy, defaults to `unstable`'
+ required: false
+ default: 'unstable'
+ type: string
jobs:
docs:
runs-on: buildjet-4vcpu-ubuntu-2204
+ env:
+ DEPLOY_REF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || 'stable' }}
steps:
- # - Uses YAML anchors in the future
+ # Log out the ref being deployed
+ - name: Log Deployment Ref
+ if: github.event_name == 'workflow_dispatch'
+ run: |
+ echo "Deploying ref: $DEPLOY_REF"
+
+ # Checkout the correct ref being deployed
- uses: actions/checkout@v3
+ with:
+ ref: ${{ env.DEPLOY_REF }}
+
- uses: actions/setup-node@v3
with:
node-version: 20
check-latest: true
cache: yarn
+
- name: Node.js version
id: node
run: echo "v8CppApiVersion=$(node --print "process.versions.modules")" >> $GITHUB_OUTPUT
+
- name: Restore dependencies
uses: actions/cache@master
id: cache-deps
@@ -27,13 +47,14 @@ jobs:
node_modules
packages/*/node_modules
key: ${{ runner.os }}-${{ steps.node.outputs.v8CppApiVersion }}-${{ hashFiles('**/yarn.lock', '**/package.json') }}
+
- name: Install & build
if: steps.cache-deps.outputs.cache-hit != 'true'
run: yarn install --frozen-lockfile && yarn build
+
- name: Build
run: yarn build
if: steps.cache-deps.outputs.cache-hit == 'true'
- #
- name: Build and collect docs
run: yarn build:docs
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 76d62ae576be..43ceee898d85 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -271,9 +271,9 @@ jobs:
key: ${{ runner.os }}-node-${{ matrix.node }}-${{ github.sha }}
fail-on-cache-miss: true
- name: Install Chrome browser
- run: npx @puppeteer/browsers install chrome
+ run: npx @puppeteer/browsers install chromedriver@latest --path /tmp
- name: Install Firefox browser
- run: npx @puppeteer/browsers install firefox
+ run: npx @puppeteer/browsers install firefox@latest --path /tmp
- name: Browser tests
run: |
export DISPLAY=':99.0'
diff --git a/.gitignore b/.gitignore
index a85d4af7794e..a0deed473c4a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,6 +43,7 @@ packages/**/typedocs
docs/pages/**/*-cli.md
docs/pages/assets
docs/pages/images
+docs/pages/security.md
docs/pages/lightclient-prover/lightclient.md
docs/pages/lightclient-prover/prover.md
docs/pages/api/api-reference.md
diff --git a/dashboards/lodestar_block_processor.json b/dashboards/lodestar_block_processor.json
index d1a856f2f71d..8e68d611cc0d 100644
--- a/dashboards/lodestar_block_processor.json
+++ b/dashboards/lodestar_block_processor.json
@@ -110,6 +110,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -192,6 +193,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -276,6 +278,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
@@ -359,6 +362,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
@@ -442,6 +446,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
@@ -525,6 +530,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -607,6 +613,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -758,7 +765,7 @@
"reverse": false
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -862,7 +869,7 @@
"reverse": false
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -942,7 +949,7 @@
"reverse": false
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -987,6 +994,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 4,
@@ -1072,6 +1080,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1148,22 +1157,22 @@
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
- "fillOpacity": 22,
- "gradientMode": "opacity",
+ "fillOpacity": 0,
+ "gradientMode": "none",
"hideFrom": {
- "graph": false,
"legend": false,
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
- "pointSize": 4,
+ "pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
- "showPoints": "never",
- "spanNulls": true,
+ "showPoints": "auto",
+ "spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
@@ -1183,21 +1192,19 @@
"x": 0,
"y": 50
},
- "id": 524,
+ "id": 534,
"options": {
- "graph": {},
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
- "showLegend": false
+ "showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
- "pluginVersion": "7.4.5",
"targets": [
{
"datasource": {
@@ -1205,15 +1212,14 @@
"uid": "${DS_PROMETHEUS}"
},
"editorMode": "code",
- "exemplar": false,
- "expr": "rate(lodestar_stfn_epoch_transition_commit_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_commit_seconds_count[$rate_interval])",
- "interval": "",
- "legendFormat": "epoch transition",
+ "expr": "rate(lodestar_stfn_epoch_transition_step_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_step_seconds_count[$rate_interval])",
+ "instant": false,
+ "legendFormat": "{{step}}",
"range": true,
"refId": "A"
}
],
- "title": "Epoch transition commit step avg time",
+ "title": "Epoch Transition By Steps",
"type": "timeseries"
},
{
@@ -1241,6 +1247,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1325,9 +1332,10 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
- "pointSize": 5,
+ "pointSize": 4,
"scaleDistribution": {
"type": "linear"
},
@@ -1342,25 +1350,9 @@
}
},
"mappings": [],
- "unit": "percentunit"
+ "unit": "s"
},
- "overrides": [
- {
- "matcher": {
- "id": "byName",
- "options": "process block time"
- },
- "properties": [
- {
- "id": "color",
- "value": {
- "fixedColor": "orange",
- "mode": "fixed"
- }
- }
- ]
- }
- ]
+ "overrides": []
},
"gridPos": {
"h": 8,
@@ -1368,7 +1360,7 @@
"x": 0,
"y": 58
},
- "id": 122,
+ "id": 524,
"options": {
"graph": {},
"legend": {
@@ -1389,14 +1381,16 @@
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
+ "editorMode": "code",
"exemplar": false,
- "expr": "rate(lodestar_stfn_epoch_transition_seconds_sum[13m])",
+ "expr": "rate(lodestar_stfn_epoch_transition_commit_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_commit_seconds_count[$rate_interval])",
"interval": "",
- "legendFormat": "process block time",
+ "legendFormat": "epoch transition",
+ "range": true,
"refId": "A"
}
],
- "title": "Epoch transition utilization rate",
+ "title": "Epoch transition commit step avg time",
"type": "timeseries"
},
{
@@ -1424,6 +1418,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1523,6 +1518,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1540,20 +1536,19 @@
}
},
"mappings": [],
- "min": 0,
- "unit": "none"
+ "unit": "percentunit"
},
"overrides": [
{
"matcher": {
"id": "byName",
- "options": "number of epoch transition"
+ "options": "process block time"
},
"properties": [
{
"id": "color",
"value": {
- "fixedColor": "yellow",
+ "fixedColor": "orange",
"mode": "fixed"
}
}
@@ -1567,7 +1562,7 @@
"x": 0,
"y": 66
},
- "id": 124,
+ "id": 122,
"options": {
"graph": {},
"legend": {
@@ -1589,13 +1584,13 @@
"uid": "${DS_PROMETHEUS}"
},
"exemplar": false,
- "expr": "384 * rate(lodestar_stfn_epoch_transition_seconds_count[13m])",
+ "expr": "rate(lodestar_stfn_epoch_transition_seconds_sum[13m])",
"interval": "",
- "legendFormat": "number of epoch transition",
+ "legendFormat": "process block time",
"refId": "A"
}
],
- "title": "Epoch transitions / epoch",
+ "title": "Epoch transition utilization rate",
"type": "timeseries"
},
{
@@ -1623,6 +1618,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1722,6 +1718,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1739,9 +1736,26 @@
}
},
"mappings": [],
- "unit": "s"
+ "min": 0,
+ "unit": "none"
},
- "overrides": []
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "number of epoch transition"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "yellow",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
},
"gridPos": {
"h": 8,
@@ -1749,7 +1763,7 @@
"x": 0,
"y": 74
},
- "id": 526,
+ "id": 124,
"options": {
"graph": {},
"legend": {
@@ -1770,15 +1784,14 @@
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
- "editorMode": "code",
- "expr": "rate(lodestar_stfn_hash_tree_root_seconds_sum[$rate_interval])\n/ on(source)\nrate(lodestar_stfn_hash_tree_root_seconds_count[$rate_interval])",
+ "exemplar": false,
+ "expr": "384 * rate(lodestar_stfn_epoch_transition_seconds_count[13m])",
"interval": "",
- "legendFormat": "__auto",
- "range": true,
+ "legendFormat": "number of epoch transition",
"refId": "A"
}
],
- "title": "State hash_tree_root avg time",
+ "title": "Epoch transitions / epoch",
"type": "timeseries"
},
{
@@ -1806,6 +1819,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1901,6 +1915,91 @@
"title": "State SSZ cache miss rate on preState",
"type": "timeseries"
},
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 22,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "graph": false,
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "unit": "s"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 82
+ },
+ "id": 526,
+ "options": {
+ "graph": {},
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
+ },
+ "tooltip": {
+ "mode": "multi",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "7.4.5",
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "editorMode": "code",
+ "expr": "rate(lodestar_stfn_hash_tree_root_seconds_sum[$rate_interval])\n/ on(source)\nrate(lodestar_stfn_hash_tree_root_seconds_count[$rate_interval])",
+ "interval": "",
+ "legendFormat": "__auto",
+ "range": true,
+ "refId": "A"
+ }
+ ],
+ "title": "State hash_tree_root avg time",
+ "type": "timeseries"
+ },
{
"collapsed": false,
"datasource": {
@@ -1911,7 +2010,7 @@
"h": 1,
"w": 24,
"x": 0,
- "y": 82
+ "y": 90
},
"id": 92,
"panels": [],
@@ -1936,7 +2035,7 @@
"h": 3,
"w": 24,
"x": 0,
- "y": 83
+ "y": 91
},
"id": 154,
"options": {
@@ -1948,7 +2047,7 @@
"content": "Verifies signature sets in a thread pool of workers. Must ensure that signatures are verified fast and efficiently.",
"mode": "markdown"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -1989,6 +2088,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2014,7 +2114,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 86
+ "y": 94
},
"id": 94,
"options": {
@@ -2069,6 +2169,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2093,7 +2194,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 86
+ "y": 94
},
"id": 519,
"options": {
@@ -2150,6 +2251,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2175,7 +2277,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 94
+ "y": 102
},
"id": 151,
"options": {
@@ -2236,6 +2338,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2261,7 +2364,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 94
+ "y": 102
},
"id": 96,
"options": {
@@ -2322,6 +2425,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2347,7 +2451,7 @@
"h": 5,
"w": 12,
"x": 0,
- "y": 102
+ "y": 110
},
"id": 150,
"options": {
@@ -2408,6 +2512,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2433,7 +2538,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 102
+ "y": 110
},
"id": 95,
"options": {
@@ -2494,6 +2599,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2520,7 +2626,7 @@
"h": 6,
"w": 12,
"x": 0,
- "y": 107
+ "y": 115
},
"id": 148,
"options": {
@@ -2591,6 +2697,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2616,7 +2723,7 @@
"h": 7,
"w": 12,
"x": 12,
- "y": 110
+ "y": 118
},
"id": 147,
"options": {
@@ -2677,6 +2784,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2702,7 +2810,7 @@
"h": 5,
"w": 12,
"x": 0,
- "y": 113
+ "y": 121
},
"id": 98,
"options": {
@@ -2759,6 +2867,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2800,7 +2909,7 @@
"h": 7,
"w": 12,
"x": 12,
- "y": 117
+ "y": 125
},
"id": 153,
"options": {
@@ -2870,6 +2979,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2895,7 +3005,7 @@
"h": 6,
"w": 12,
"x": 0,
- "y": 118
+ "y": 126
},
"id": 97,
"options": {
@@ -2937,7 +3047,7 @@
"h": 1,
"w": 24,
"x": 0,
- "y": 124
+ "y": 132
},
"id": 309,
"panels": [],
@@ -2977,6 +3087,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3032,7 +3143,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 125
+ "y": 133
},
"id": 305,
"options": {
@@ -3088,6 +3199,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3128,7 +3240,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 125
+ "y": 133
},
"id": 307,
"options": {
@@ -3195,6 +3307,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3219,7 +3332,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 133
+ "y": 141
},
"id": 335,
"options": {
@@ -3286,6 +3399,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3310,7 +3424,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 133
+ "y": 141
},
"id": 334,
"options": {
@@ -3351,7 +3465,7 @@
"h": 1,
"w": 24,
"x": 0,
- "y": 141
+ "y": 149
},
"id": 136,
"panels": [],
@@ -3393,6 +3507,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3418,7 +3533,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 142
+ "y": 150
},
"id": 130,
"options": {
@@ -3477,6 +3592,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3517,7 +3633,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 142
+ "y": 150
},
"id": 140,
"options": {
@@ -3577,6 +3693,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3618,7 +3735,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 150
+ "y": 158
},
"id": 132,
"options": {
@@ -3701,6 +3818,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineStyle": {
"fill": "solid"
@@ -3745,7 +3863,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 150
+ "y": 158
},
"id": 138,
"options": {
@@ -3817,6 +3935,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3866,7 +3985,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 158
+ "y": 166
},
"id": 531,
"options": {
@@ -3957,6 +4076,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3981,7 +4101,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 158
+ "y": 166
},
"id": 533,
"options": {
@@ -4026,7 +4146,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_block_production.json b/dashboards/lodestar_block_production.json
index b999e47a33d4..96ab44c6a550 100644
--- a/dashboards/lodestar_block_production.json
+++ b/dashboards/lodestar_block_production.json
@@ -54,180 +54,206 @@
"liveNow": false,
"panels": [
{
- "type": "timeseries",
- "title": "Full block production avg time with steps",
+ "collapsed": false,
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"gridPos": {
+ "h": 1,
+ "w": 24,
"x": 0,
- "y": 1,
- "w": 12,
- "h": 8
+ "y": 0
},
+ "id": 166,
+ "panels": [],
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "refId": "A"
+ }
+ ],
+ "title": "Block Production",
+ "type": "row"
+ },
+ {
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 30,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "normal"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "unit": "s"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 1
+ },
"id": 546,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "multi",
+ "sort": "none"
+ }
+ },
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
- "refId": "proposerSlashing",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])",
- "range": true,
- "instant": false,
- "hide": false,
"editorMode": "code",
+ "exemplar": false,
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"proposerSlashing\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
"legendFormat": "{{step}}",
- "exemplar": false
+ "range": true,
+ "refId": "proposerSlashing"
},
{
- "refId": "attesterSlashings",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"attesterSlashings\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "attesterSlashings"
},
{
- "refId": "voluntaryExits",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"voluntaryExits\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "voluntaryExits"
},
{
- "refId": "blsToExecutionChanges",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"blsToExecutionChanges\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "blsToExecutionChanges"
},
{
- "refId": "attestations",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"attestations\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"attestations\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "attestations"
},
{
- "refId": "eth1DataAndDeposits",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"eth1DataAndDeposits\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "eth1DataAndDeposits"
},
{
- "refId": "syncAggregate",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"syncAggregate\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"syncAggregate\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "syncAggregate"
},
{
- "refId": "executionPayload",
- "expr": "rate(beacon_block_production_execution_steps_seconds{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds{step=\"executionPayload\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_execution_steps_seconds_sum{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_execution_steps_seconds_count{step=\"executionPayload\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "executionPayload"
}
],
- "options": {
- "tooltip": {
- "mode": "multi",
- "sort": "none"
- },
- "legend": {
- "showLegend": true,
- "displayMode": "list",
- "placement": "bottom",
- "calcs": []
- }
- },
- "fieldConfig": {
- "defaults": {
- "custom": {
- "drawStyle": "line",
- "lineInterpolation": "linear",
- "barAlignment": 0,
- "lineWidth": 1,
- "fillOpacity": 30,
- "gradientMode": "opacity",
- "spanNulls": false,
- "insertNulls": false,
- "showPoints": "auto",
- "pointSize": 5,
- "stacking": {
- "mode": "normal",
- "group": "A"
- },
- "axisPlacement": "auto",
- "axisLabel": "",
- "axisColorMode": "text",
- "scaleDistribution": {
- "type": "linear"
- },
- "axisCenteredZero": false,
- "hideFrom": {
- "tooltip": false,
- "viz": false,
- "legend": false
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "color": {
- "mode": "palette-classic"
- },
- "mappings": [],
- "unit": "s"
- },
- "overrides": []
- },
- "transformations": []
+ "title": "Full block production avg time with steps",
+ "transformations": [],
+ "type": "timeseries"
},
{
"datasource": {
@@ -236,62 +262,62 @@
},
"fieldConfig": {
"defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
"custom": {
- "drawStyle": "line",
- "lineInterpolation": "linear",
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
"barAlignment": 0,
- "lineWidth": 1,
+ "drawStyle": "line",
"fillOpacity": 30,
"gradientMode": "opacity",
- "spanNulls": false,
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
"insertNulls": false,
- "showPoints": "auto",
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
"pointSize": 5,
- "stacking": {
- "mode": "normal",
- "group": "A"
- },
- "axisPlacement": "auto",
- "axisLabel": "",
- "axisColorMode": "text",
"scaleDistribution": {
"type": "linear"
},
- "axisCenteredZero": false,
- "hideFrom": {
- "tooltip": false,
- "viz": false,
- "legend": false
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
- "color": {
- "mode": "palette-classic"
- },
"mappings": [],
"unit": "s"
},
"overrides": []
},
"gridPos": {
- "x": 12,
- "y": 1,
+ "h": 8,
"w": 12,
- "h": 8
+ "x": 12,
+ "y": 1
},
"id": 547,
"options": {
- "tooltip": {
- "mode": "multi",
- "sort": "none"
- },
"legend": {
- "showLegend": true,
+ "calcs": [],
"displayMode": "list",
"placement": "bottom",
- "calcs": []
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "multi",
+ "sort": "none"
}
},
"targets": [
@@ -300,136 +326,110 @@
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
- "refId": "proposerSlashing",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"proposerSlashing\"}[$rate_interval])",
- "range": true,
- "instant": false,
- "hide": false,
"editorMode": "code",
+ "exemplar": false,
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"proposerSlashing\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"proposerSlashing\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
"legendFormat": "{{step}}",
- "exemplar": false
- },
- {
- "refId": "attesterSlashings",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"attesterSlashings\"}[$rate_interval])",
"range": true,
- "instant": false,
- "datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
- },
- "hide": false,
- "editorMode": "code",
- "legendFormat": "{{step}}"
+ "refId": "proposerSlashing"
},
{
- "refId": "voluntaryExits",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"voluntaryExits\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"attesterSlashings\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"attesterSlashings\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "attesterSlashings"
},
{
- "refId": "blsToExecutionChanges",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"blsToExecutionChanges\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"voluntaryExits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"voluntaryExits\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "voluntaryExits"
},
{
- "refId": "attestations",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"attestations\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"blsToExecutionChanges\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"blsToExecutionChanges\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "blsToExecutionChanges"
},
{
- "refId": "eth1DataAndDeposits",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"eth1DataAndDeposits\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"attestations\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"attestations\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "attestations"
},
{
- "refId": "syncAggregate",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"syncAggregate\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"eth1DataAndDeposits\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"eth1DataAndDeposits\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "eth1DataAndDeposits"
},
{
- "refId": "executionPayload",
- "expr": "rate(beacon_block_production_builder_steps_seconds{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds{step=\"executionPayload\"}[$rate_interval])",
- "range": true,
- "instant": false,
"datasource": {
- "uid": "${DS_PROMETHEUS}",
- "type": "prometheus"
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
},
- "hide": false,
"editorMode": "code",
- "legendFormat": "{{step}}"
- }
- ],
- "title": "Blinded block production avg time with steps",
- "type": "timeseries",
- "transformations": []
- },
- {
- "collapsed": false,
- "datasource": {
- "type": "prometheus",
- "uid": "${DS_PROMETHEUS}"
- },
- "gridPos": {
- "h": 1,
- "w": 24,
- "x": 0,
- "y": 0
- },
- "id": 166,
- "panels": [],
- "targets": [
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"syncAggregate\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"syncAggregate\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "syncAggregate"
+ },
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
- "refId": "A"
+ "editorMode": "code",
+ "expr": "rate(beacon_block_production_builder_steps_seconds_sum{step=\"executionPayload\"}[$rate_interval])\n/\nrate(beacon_block_production_builder_steps_seconds_count{step=\"executionPayload\"}[$rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "{{step}}",
+ "range": true,
+ "refId": "executionPayload"
}
],
- "title": "Block Production",
- "type": "row"
+ "title": "Blinded block production avg time with steps",
+ "transformations": [],
+ "type": "timeseries"
},
{
"datasource": {
@@ -455,6 +455,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -540,7 +541,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 1
+ "y": 9
},
"id": 168,
"options": {
@@ -611,6 +612,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -636,7 +638,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 1
+ "y": 9
},
"id": 170,
"options": {
@@ -657,11 +659,13 @@
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
+ "editorMode": "code",
"exemplar": false,
"expr": "rate(beacon_block_production_seconds_sum[$rate_interval])\n/\nrate(beacon_block_production_seconds_count[$rate_interval])",
"format": "heatmap",
"interval": "",
- "legendFormat": "{{instance}} - {{source}}",
+ "legendFormat": "{{source}}",
+ "range": true,
"refId": "A"
}
],
@@ -692,6 +696,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -716,7 +721,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 9
+ "y": 17
},
"id": 528,
"options": {
@@ -780,7 +785,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 9
+ "y": 17
},
"heatmap": {},
"hideZeroBuckets": false,
@@ -826,7 +831,7 @@
"unit": "s"
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"reverseYBuckets": false,
"targets": [
{
@@ -882,6 +887,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -906,7 +912,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 17
+ "y": 25
},
"id": 511,
"options": {
@@ -1036,7 +1042,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 17
+ "y": 25
},
"hiddenSeries": false,
"id": 378,
@@ -1056,7 +1062,7 @@
"alertThreshold": true
},
"percentage": false,
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"pointradius": 0.5,
"points": true,
"renderer": "flot",
@@ -1131,7 +1137,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 25
+ "y": 33
},
"hiddenSeries": false,
"id": 376,
@@ -1153,7 +1159,7 @@
"alertThreshold": true
},
"percentage": false,
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"pointradius": 0.5,
"points": true,
"renderer": "flot",
@@ -1233,6 +1239,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1257,7 +1264,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 25
+ "y": 33
},
"id": 532,
"options": {
@@ -1334,6 +1341,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1358,7 +1366,7 @@
"h": 7,
"w": 12,
"x": 0,
- "y": 33
+ "y": 41
},
"id": 531,
"options": {
@@ -1441,6 +1449,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1465,7 +1474,7 @@
"h": 7,
"w": 12,
"x": 12,
- "y": 33
+ "y": 41
},
"id": 534,
"options": {
@@ -1516,7 +1525,7 @@
"h": 6,
"w": 12,
"x": 0,
- "y": 40
+ "y": 48
},
"id": 535,
"options": {
@@ -1620,7 +1629,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 40
+ "y": 48
},
"id": 537,
"options": {
@@ -1669,7 +1678,7 @@
"h": 1,
"w": 24,
"x": 0,
- "y": 48
+ "y": 56
},
"id": 541,
"panels": [],
@@ -1700,7 +1709,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 49
+ "y": 57
},
"id": 543,
"options": {
@@ -1778,7 +1787,7 @@
"h": 8,
"w": 12,
"x": 12,
- "y": 49
+ "y": 57
},
"id": 545,
"options": {
@@ -1880,7 +1889,7 @@
"h": 8,
"w": 12,
"x": 0,
- "y": 57
+ "y": 65
},
"id": 539,
"options": {
@@ -1925,7 +1934,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_bls_thread_pool.json b/dashboards/lodestar_bls_thread_pool.json
index a8021ace1102..160312a92d57 100644
--- a/dashboards/lodestar_bls_thread_pool.json
+++ b/dashboards/lodestar_bls_thread_pool.json
@@ -13,7 +13,10 @@
"list": [
{
"builtIn": 1,
- "datasource": "-- Grafana --",
+ "datasource": {
+ "type": "datasource",
+ "uid": "grafana"
+ },
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
@@ -32,7 +35,6 @@
"fiscalYearStartMonth": 0,
"graphTooltip": 1,
"id": null,
- "iteration": 1661342107287,
"links": [
{
"asDropdown": true,
@@ -53,6 +55,10 @@
"panels": [
{
"collapsed": false,
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"gridPos": {
"h": 1,
"w": 24,
@@ -61,10 +67,23 @@
},
"id": 92,
"panels": [],
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "refId": "A"
+ }
+ ],
"title": "BLS worker pool",
"type": "row"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"gridPos": {
"h": 3,
"w": 24,
@@ -73,12 +92,21 @@
},
"id": 154,
"options": {
+ "code": {
+ "language": "plaintext",
+ "showLineNumbers": false,
+ "showMiniMap": false
+ },
"content": "Verifies signature sets in a thread pool of workers. Must ensure that signatures are verified fast and efficiently.",
"mode": "markdown"
},
- "pluginVersion": "8.4.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval])",
"interval": "",
"legendFormat": "{{workerId}}",
@@ -89,6 +117,10 @@
"type": "text"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Utilization rate = total CPU time per worker per second. Graph is stacked. This ratios should be high since BLS verification is the limiting factor in the node's throughput.",
"fieldConfig": {
"defaults": {
@@ -96,6 +128,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -107,6 +141,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -139,7 +174,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "multi",
@@ -149,6 +185,10 @@
"pluginVersion": "8.4.0-beta1",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval])",
"interval": "",
"legendFormat": "{{workerId}}",
@@ -159,12 +199,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -176,6 +222,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -207,7 +254,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -231,6 +279,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Average sync time to validate a single signature set. Note that the set may have been verified in batch. In most normal hardware this value should be ~1-2ms",
"fieldConfig": {
"defaults": {
@@ -238,6 +290,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -250,6 +304,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -282,8 +337,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "single",
@@ -296,6 +352,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "sum(rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval]))/sum(rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval]))",
"interval": "",
"legendFormat": "pool",
@@ -306,6 +366,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Raw throughput of the thread pool. How many individual signature sets are successfully validated per second",
"fieldConfig": {
"defaults": {
@@ -313,6 +377,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -325,6 +391,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -357,8 +424,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "single",
@@ -371,6 +439,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval])",
"interval": "",
"legendFormat": "pool",
@@ -381,6 +453,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Total length of the job queue. Note: this queue is not bounded",
"fieldConfig": {
"defaults": {
@@ -388,6 +464,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -400,6 +478,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -432,8 +511,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "single",
@@ -446,6 +526,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "lodestar_bls_thread_pool_queue_length",
"interval": "",
"legendFormat": "pool",
@@ -456,6 +540,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "How much async time job spent waiting in the job queue before being picked up. This number should be really low <100ms to ensure signatures are validated fast.",
"fieldConfig": {
"defaults": {
@@ -463,6 +551,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -475,6 +565,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -507,8 +598,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "single",
@@ -521,6 +613,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_queue_job_wait_time_seconds_sum[$rate_interval])/rate(lodestar_bls_thread_pool_queue_job_wait_time_seconds_count[$rate_interval])",
"interval": "",
"legendFormat": "pool",
@@ -531,6 +627,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Async time from sending a message to the worker and the worker receiving it.",
"fieldConfig": {
"defaults": {
@@ -538,6 +638,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -550,6 +652,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -584,7 +687,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "multi",
@@ -621,6 +725,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "What percentage of total signature sets were verified in batch, which is an optimization to reduce verification costs by x2. For a synced node this should be ~100%",
"fieldConfig": {
"defaults": {
@@ -628,6 +736,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -640,6 +750,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -672,8 +783,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "single",
@@ -686,6 +798,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_batch_sigs_success_total[$rate_interval])/rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval])",
"interval": "",
"legendFormat": "pool",
@@ -696,6 +812,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Average signatures per set. This number is decided by the time of object submitted to the pool:\n- Sync blocks: 128\n- Aggregates: 3\n- Attestations: 1",
"fieldConfig": {
"defaults": {
@@ -703,6 +823,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -715,6 +837,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -747,8 +870,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -758,6 +882,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_sig_sets_started_total[$rate_interval])/(rate(lodestar_bls_thread_pool_jobs_started_total[$rate_interval])>0)",
"interval": "",
"legendFormat": "pool",
@@ -768,6 +896,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "How many individual signature sets are invalid vs (valid + invalid). We don't control this number since peers may send us invalid signatures. This number should be very low since we should ban bad peers. If it's too high the batch optimization may not be worth it.",
"fieldConfig": {
"defaults": {
@@ -775,6 +907,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -786,6 +920,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -834,7 +969,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "multi",
@@ -871,6 +1007,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "Average sets per job. A set may contain +1 signatures. This number should be higher than 1 to reduce communication costs",
"fieldConfig": {
"defaults": {
@@ -878,6 +1018,8 @@
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -890,6 +1032,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -922,8 +1065,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -933,6 +1077,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_bls_thread_pool_jobs_started_total[$rate_interval])/rate(lodestar_bls_thread_pool_job_groups_started_total[$rate_interval])",
"interval": "",
"legendFormat": "pool",
@@ -941,10 +1089,105 @@
],
"title": "BLS worker pool - sets per job",
"type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "unit": "s"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 42
+ },
+ "id": 520,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "multi",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "editorMode": "code",
+ "expr": "rate(lodestar_bls_thread_pool_signature_deserialization_main_thread_time_seconds_sum[$rate_interval]) * 384",
+ "instant": false,
+ "legendFormat": "signature_deserialization",
+ "range": true,
+ "refId": "A"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "editorMode": "code",
+ "expr": "rate(lodestar_bls_thread_pool_pubkeys_aggregation_main_thread_time_seconds_sum[$rate_interval]) * 384",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "pubkey_aggregation",
+ "range": true,
+ "refId": "B"
+ }
+ ],
+ "title": "BLS jobItemWorkReq cpu time per epoch",
+ "type": "timeseries"
}
],
"refresh": "10s",
- "schemaVersion": 35,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_debug_gossipsub.json b/dashboards/lodestar_debug_gossipsub.json
index eaed0c9842f6..d83c075de7be 100644
--- a/dashboards/lodestar_debug_gossipsub.json
+++ b/dashboards/lodestar_debug_gossipsub.json
@@ -103,6 +103,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -220,7 +221,7 @@
"text": {},
"textMode": "auto"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -289,7 +290,7 @@
"text": {},
"textMode": "value"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -341,7 +342,7 @@
"text": {},
"textMode": "name"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -396,7 +397,7 @@
"text": {},
"textMode": "name"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -451,7 +452,7 @@
"text": {},
"textMode": "name"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -493,6 +494,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -703,6 +705,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -786,6 +789,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -957,6 +961,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1037,6 +1042,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -8729,7 +8735,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar",
diff --git a/dashboards/lodestar_discv5.json b/dashboards/lodestar_discv5.json
index 02c0a3b38956..31f115936df2 100644
--- a/dashboards/lodestar_discv5.json
+++ b/dashboards/lodestar_discv5.json
@@ -91,6 +91,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -170,6 +171,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -251,6 +253,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -333,6 +336,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -415,6 +419,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -496,6 +501,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2051,7 +2057,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_execution_engine.json b/dashboards/lodestar_execution_engine.json
index cd72b712f9a8..2c4cadc131f1 100644
--- a/dashboards/lodestar_execution_engine.json
+++ b/dashboards/lodestar_execution_engine.json
@@ -103,6 +103,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -186,6 +187,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -270,6 +272,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
@@ -354,6 +357,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
@@ -438,6 +442,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
@@ -522,6 +527,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3322,7 +3328,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_libp2p.json b/dashboards/lodestar_libp2p.json
index 08f500cf5a40..7c6a76ec7175 100644
--- a/dashboards/lodestar_libp2p.json
+++ b/dashboards/lodestar_libp2p.json
@@ -103,6 +103,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -183,6 +184,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -289,6 +291,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -368,6 +371,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -447,6 +451,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -544,6 +549,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -945,7 +951,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_multinode.json b/dashboards/lodestar_multinode.json
index ac710364985b..9a8fecaf0128 100644
--- a/dashboards/lodestar_multinode.json
+++ b/dashboards/lodestar_multinode.json
@@ -13,7 +13,10 @@
"list": [
{
"builtIn": 1,
- "datasource": "-- Grafana --",
+ "datasource": {
+ "type": "datasource",
+ "uid": "grafana"
+ },
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
@@ -51,6 +54,10 @@
"liveNow": false,
"panels": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -100,7 +107,7 @@
},
"textMode": "auto"
},
- "pluginVersion": "8.3.1",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -119,12 +126,18 @@
"type": "stat"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -136,6 +149,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -167,10 +181,12 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
- "mode": "single"
+ "mode": "single",
+ "sort": "none"
}
},
"targets": [
@@ -190,12 +206,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -207,6 +229,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -238,10 +261,12 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
- "mode": "single"
+ "mode": "single",
+ "sort": "none"
}
},
"targets": [
@@ -261,12 +286,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -278,6 +309,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -310,10 +342,12 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
- "mode": "single"
+ "mode": "single",
+ "sort": "none"
}
},
"targets": [
@@ -334,12 +368,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -351,6 +391,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -383,10 +424,12 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
- "mode": "single"
+ "mode": "single",
+ "sort": "none"
}
},
"targets": [
@@ -407,6 +450,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -441,7 +488,7 @@
},
"textMode": "auto"
},
- "pluginVersion": "8.3.1",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -473,6 +520,10 @@
"type": "stat"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -526,6 +577,10 @@
"type": "stat"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -574,7 +629,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single"
@@ -598,6 +654,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -647,7 +707,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single"
@@ -672,7 +733,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 33,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_networking.json b/dashboards/lodestar_networking.json
index 77e4be04048f..e17cabf32048 100644
--- a/dashboards/lodestar_networking.json
+++ b/dashboards/lodestar_networking.json
@@ -104,6 +104,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 4,
@@ -189,6 +190,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -270,6 +272,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -351,6 +354,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -534,9 +538,10 @@
"values": false
},
"showUnfilled": true,
- "text": {}
+ "text": {},
+ "valueMode": "color"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -588,9 +593,10 @@
"values": false
},
"showUnfilled": true,
- "text": {}
+ "text": {},
+ "valueMode": "color"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -631,6 +637,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -711,6 +718,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -829,7 +837,7 @@
"reverse": false
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -886,6 +894,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1000,6 +1009,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -1138,6 +1148,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 4,
@@ -1223,6 +1234,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 4,
@@ -1309,6 +1321,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineStyle": {
"fill": "solid"
@@ -1450,7 +1463,7 @@
"unit": "short"
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"reverseYBuckets": false,
"targets": [
{
@@ -1508,6 +1521,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 4,
@@ -1648,7 +1662,7 @@
"unit": "short"
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"reverseYBuckets": false,
"targets": [
{
@@ -1760,7 +1774,7 @@
"unit": "short"
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"reverseYBuckets": false,
"targets": [
{
@@ -1818,6 +1832,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 4,
@@ -1931,6 +1946,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2015,6 +2031,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2141,7 +2158,7 @@
"alertThreshold": true
},
"percentage": false,
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"pointradius": 2,
"points": false,
"renderer": "flot",
@@ -2251,7 +2268,7 @@
"alertThreshold": true
},
"percentage": false,
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"pointradius": 2,
"points": false,
"renderer": "flot",
@@ -2347,7 +2364,7 @@
"alertThreshold": true
},
"percentage": false,
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"pointradius": 2,
"points": false,
"renderer": "flot",
@@ -2426,6 +2443,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2505,6 +2523,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2635,6 +2654,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2718,6 +2738,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2801,6 +2822,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2886,6 +2908,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -2967,6 +2990,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3050,6 +3074,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3135,6 +3160,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3230,6 +3256,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3348,6 +3375,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3476,6 +3504,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3556,6 +3585,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3674,7 +3704,7 @@
"reverse": false
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -3717,6 +3747,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3799,86 +3830,7 @@
"tooltip": false,
"viz": false
},
- "lineInterpolation": "linear",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": false,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "unit": "percentunit"
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 0,
- "y": 154
- },
- "id": 540,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom",
- "showLegend": true
- },
- "tooltip": {
- "mode": "multi",
- "sort": "none"
- }
- },
- "targets": [
- {
- "datasource": {
- "type": "prometheus",
- "uid": "${DS_PROMETHEUS}"
- },
- "editorMode": "code",
- "expr": "lodestar_gossip_validation_queue_current_drop_ratio",
- "legendFormat": "{{topic}}",
- "range": true,
- "refId": "A"
- }
- ],
- "title": "Drop Ratio",
- "type": "timeseries"
- },
- {
- "datasource": {
- "type": "prometheus",
- "uid": "${DS_PROMETHEUS}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisCenteredZero": false,
- "axisColorMode": "text",
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 0,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -3971,6 +3923,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4050,6 +4003,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4159,6 +4113,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4263,6 +4218,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4342,6 +4298,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4450,6 +4407,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4558,6 +4516,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4574,7 +4533,8 @@
"mode": "off"
}
},
- "mappings": []
+ "mappings": [],
+ "unit": "percentunit"
},
"overrides": []
},
@@ -4584,7 +4544,7 @@
"x": 0,
"y": 187
},
- "id": 615,
+ "id": 624,
"options": {
"legend": {
"calcs": [],
@@ -4604,25 +4564,14 @@
"uid": "${DS_PROMETHEUS}"
},
"editorMode": "code",
- "expr": "rate(lodestar_gossip_attestation_use_head_block_state_count{caller=\"validateGossipAttestation\"}[$rate_interval])",
- "legendFormat": "head_state",
+ "expr": "rate(lodestar_gossip_attestation_shuffling_cache_hit_count[$rate_interval])\n/\n(\n rate(lodestar_gossip_attestation_shuffling_cache_hit_count[$rate_interval])\n +\n (\n rate(lodestar_gossip_attestation_shuffling_cache_miss_count[$rate_interval])\n or\n vector(0)\n )\n)\nor\nvector(1)\n",
+ "instant": false,
+ "legendFormat": "hit_percentage",
"range": true,
"refId": "A"
- },
- {
- "datasource": {
- "type": "prometheus",
- "uid": "${DS_PROMETHEUS}"
- },
- "editorMode": "code",
- "expr": "rate(lodestar_gossip_attestation_use_head_block_state_dialed_to_target_epoch_count{caller=\"validateGossipAttestation\"}[$rate_interval])",
- "hide": false,
- "legendFormat": "head_state_dialed_to_target_epoch",
- "range": true,
- "refId": "B"
}
],
- "title": "Used States",
+ "title": "Shuffling Cache Hit",
"type": "timeseries"
},
{
@@ -4649,6 +4598,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4770,6 +4720,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -4886,7 +4837,7 @@
"reverse": false
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -4943,6 +4894,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5071,6 +5023,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5199,6 +5152,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5303,6 +5257,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5409,6 +5364,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5489,6 +5445,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5570,6 +5527,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5651,6 +5609,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5732,6 +5691,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5815,6 +5775,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5898,6 +5859,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5979,6 +5941,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -6060,6 +6023,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -6141,6 +6105,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -6199,7 +6164,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_state_cache_regen.json b/dashboards/lodestar_state_cache_regen.json
index 0d1360837d7f..62b4f6fc479f 100644
--- a/dashboards/lodestar_state_cache_regen.json
+++ b/dashboards/lodestar_state_cache_regen.json
@@ -13,7 +13,10 @@
"list": [
{
"builtIn": 1,
- "datasource": "-- Grafana --",
+ "datasource": {
+ "type": "datasource",
+ "uid": "grafana"
+ },
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
@@ -33,7 +36,6 @@
"fiscalYearStartMonth": 0,
"graphTooltip": 1,
"id": null,
- "iteration": 1661328981106,
"links": [
{
"asDropdown": true,
@@ -54,6 +56,10 @@
"panels": [
{
"collapsed": false,
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"gridPos": {
"h": 1,
"w": 24,
@@ -62,16 +68,31 @@
},
"id": 22,
"panels": [],
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "refId": "A"
+ }
+ ],
"title": "stateCache and stateCheckpointCache Stats",
"type": "row"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -83,6 +104,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -131,7 +153,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -167,12 +190,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -184,6 +213,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -232,7 +262,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -268,12 +299,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -285,6 +322,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -316,7 +354,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -325,6 +364,10 @@
},
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"exemplar": false,
"expr": "lodestar_state_cache_size{}",
"interval": "",
@@ -336,12 +379,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -353,6 +402,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -384,7 +434,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -393,6 +444,10 @@
},
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"exemplar": false,
"expr": "lodestar_cp_state_cache_size{}",
"interval": "",
@@ -400,6 +455,10 @@
"refId": "A"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"exemplar": false,
"expr": "lodestar_cp_state_epoch_size",
"hide": false,
@@ -412,12 +471,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -429,6 +494,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -460,7 +526,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -484,12 +551,18 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@@ -501,6 +574,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -532,7 +606,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -557,6 +632,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -651,7 +730,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -700,6 +780,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -794,7 +878,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -843,6 +928,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -938,7 +1027,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -987,6 +1077,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1082,7 +1176,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1132,6 +1227,10 @@
},
{
"collapsed": false,
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"gridPos": {
"h": 1,
"w": 24,
@@ -1140,10 +1239,23 @@
},
"id": 40,
"panels": [],
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "refId": "A"
+ }
+ ],
"title": "Regen call stats",
"type": "row"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"description": "",
"fieldConfig": {
"defaults": {
@@ -1193,7 +1305,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1217,6 +1330,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1265,7 +1382,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1289,6 +1407,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1338,7 +1460,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1362,6 +1485,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1411,7 +1538,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1435,6 +1563,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1486,7 +1618,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1510,6 +1643,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1561,7 +1698,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1585,6 +1723,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1636,7 +1778,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1660,6 +1803,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1711,7 +1858,8 @@
"legend": {
"calcs": [],
"displayMode": "list",
- "placement": "bottom"
+ "placement": "bottom",
+ "showLegend": true
},
"tooltip": {
"mode": "single",
@@ -1736,6 +1884,10 @@
},
{
"collapsed": false,
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"gridPos": {
"h": 1,
"w": 24,
@@ -1744,10 +1896,23 @@
},
"id": 54,
"panels": [],
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
+ "refId": "A"
+ }
+ ],
"title": "Regen queue",
"type": "row"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1798,8 +1963,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -1824,6 +1990,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1874,8 +2044,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -1885,6 +2056,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "12*rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])",
"interval": "",
"legendFormat": "regen_queue",
@@ -1895,6 +2070,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -1945,8 +2124,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -1956,6 +2136,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_regen_queue_dropped_jobs_total[$rate_interval])/(rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])+rate(lodestar_regen_queue_dropped_jobs_total[$rate_interval]))",
"interval": "",
"legendFormat": "regen_queue",
@@ -1966,6 +2150,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -2016,8 +2204,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -2027,6 +2216,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_regen_queue_job_time_seconds_sum[$rate_interval])/rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])",
"interval": "",
"legendFormat": "regen_queue",
@@ -2037,6 +2230,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -2087,8 +2284,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -2098,6 +2296,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "rate(lodestar_regen_queue_job_wait_time_seconds_sum[$rate_interval])/rate(lodestar_regen_queue_job_wait_time_seconds_count[$rate_interval])",
"interval": "",
"legendFormat": "regen_queue",
@@ -2108,6 +2310,10 @@
"type": "timeseries"
},
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"fieldConfig": {
"defaults": {
"color": {
@@ -2158,8 +2364,9 @@
"graph": {},
"legend": {
"calcs": [],
- "displayMode": "hidden",
- "placement": "bottom"
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": false
},
"tooltip": {
"mode": "multi",
@@ -2169,6 +2376,10 @@
"pluginVersion": "7.4.5",
"targets": [
{
+ "datasource": {
+ "type": "prometheus",
+ "uid": "${DS_PROMETHEUS}"
+ },
"expr": "lodestar_regen_queue_length",
"interval": "",
"legendFormat": "regen_queue",
@@ -2180,7 +2391,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 35,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_sync.json b/dashboards/lodestar_sync.json
index 1a17db6da5ff..6cc82bedde47 100644
--- a/dashboards/lodestar_sync.json
+++ b/dashboards/lodestar_sync.json
@@ -103,6 +103,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 1,
@@ -197,6 +198,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineStyle": {
"fill": "solid"
@@ -305,6 +307,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 1,
@@ -384,6 +387,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 1,
@@ -463,6 +467,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 1,
@@ -542,6 +547,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 1,
@@ -621,6 +627,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 1,
@@ -1658,7 +1665,7 @@
}
],
"refresh": "10s",
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_validator_client.json b/dashboards/lodestar_validator_client.json
index 35f7f3ccc458..7cc41cffdb34 100644
--- a/dashboards/lodestar_validator_client.json
+++ b/dashboards/lodestar_validator_client.json
@@ -84,6 +84,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -213,7 +214,7 @@
"text": {},
"textMode": "name"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -269,7 +270,7 @@
"text": {},
"textMode": "name"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -323,7 +324,7 @@
"text": {},
"textMode": "name"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -377,7 +378,7 @@
},
"textMode": "auto"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -430,7 +431,7 @@
},
"textMode": "auto"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -483,7 +484,7 @@
},
"textMode": "auto"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -521,7 +522,7 @@
"content": "_Validator metrics =D_",
"mode": "markdown"
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"targets": [
{
"datasource": {
@@ -557,6 +558,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -721,6 +723,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -869,7 +872,7 @@
"unit": "s"
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"reverseYBuckets": false,
"targets": [
{
@@ -983,7 +986,7 @@
"unit": "s"
}
},
- "pluginVersion": "9.3.2",
+ "pluginVersion": "10.1.1",
"reverseYBuckets": false,
"targets": [
{
@@ -2014,7 +2017,7 @@
],
"refresh": "10s",
"revision": 1,
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/dashboards/lodestar_vm_host.json b/dashboards/lodestar_vm_host.json
index 1185f7409319..7471defd0e8e 100644
--- a/dashboards/lodestar_vm_host.json
+++ b/dashboards/lodestar_vm_host.json
@@ -98,6 +98,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -240,6 +241,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -352,6 +354,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -449,6 +452,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -566,6 +570,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -663,6 +668,7 @@
"tooltip": false,
"viz": false
},
+ "insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
@@ -5585,7 +5591,7 @@
],
"refresh": "10s",
"revision": 1,
- "schemaVersion": 37,
+ "schemaVersion": 38,
"style": "dark",
"tags": [
"lodestar"
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
index 056325e19104..270a01b311de 100644
--- a/docs/mkdocs.yml
+++ b/docs/mkdocs.yml
@@ -73,6 +73,7 @@ extra:
nav:
- Home: index.md
- Introduction: introduction.md
+ - Security: security.md
- Getting Started:
- Quick Start: getting-started/quick-start.md
- Installation: getting-started/installation.md
@@ -121,7 +122,7 @@ nav:
- Dependency Graph: contribution/depgraph.md
# - Repo: contribution/repo.md
- Testing:
- - Overview: contribution/testing/overview.md
+ - Overview: contribution/testing/index.md
# - Unit Tests: contribution/testing/unit-tests.md
# - Integration Tests: contribution/testing/integration-tests.md
# - E2E Tests: contribution/testing/e2e-tests.md
diff --git a/docs/pages/beacon-management/networking.md b/docs/pages/beacon-management/networking.md
index 9305b683ae47..993b1cdfda26 100644
--- a/docs/pages/beacon-management/networking.md
+++ b/docs/pages/beacon-management/networking.md
@@ -1,38 +1,38 @@
# Networking
-Starting up Lodestar will automatically connect it to peers on the network. Peers are found through the discv5 protocol and one peers are established communications happen via gossipsub over libp2p. While not necessary, having a basic understanding of how the various protocols and transport work will help with debugging and troubleshooting as some of the more common challenges come up with [firewalls](#firewall-management) and [NAT traversal](#nat-traversal).
+Starting up Lodestar will automatically connect it to peers on the network. Peers are found through the discv5 protocol and once peers are established communications happen via gossipsub over libp2p. While not necessary, having a basic understanding of how the various protocols and transport work will help with debugging and troubleshooting as some of the more common challenges come up with [firewalls](#firewall-management) and [NAT traversal](#nat-traversal).
## Networking Flags
Some of the important Lodestar flags related to networking are:
-- [`--discv5`](./configuration.md#--discv5)
-- [`--listenAddress`](./configuration.md#--listenAddress)
-- [`--port`](./configuration.md#--port)
-- [`--discoveryPort`](./configuration.md#--discoveryPort)
-- [`--listenAddress6`](./configuration.md#--listenAddress6)
-- [`--port6`](./configuration.md#--port6)
-- [`--discoveryPort6`](./configuration.md#--discoveryPort6)
-- [`--bootnodes`](./configuration.md#--bootnodes)
-- [`--deterministicLongLivedAttnets`](./configuration.md#--deterministicLongLivedAttnets)
-- [`--subscribeAllSubnets`](./configuration.md#--subscribeAllSubnets)
-- [`--disablePeerScoring`](./configuration.md#--disablePeerScoring)
-- [`--enr.ip`](./configuration.md#--enr.ip)
-- [`--enr.tcp`](./configuration.md#--enr.tcp)
-- [`--enr.udp`](./configuration.md#--enr.udp)
-- [`--enr.ip6`](./configuration.md#--enr.ip6)
-- [`--enr.tcp6`](./configuration.md#--enr.tcp6)
-- [`--enr.udp6`](./configuration.md#--enr.udp6)
-- [`--nat`](./configuration.md#--nat)
-- [`--private`](./configuration.md#`--private`)
+- [`--discv5`](./beacon-cli.md#-discv5)
+- [`--listenAddress`](./beacon-cli.md#-listenaddress)
+- [`--port`](./beacon-cli.md#-port)
+- [`--discoveryPort`](./beacon-cli.md#-discoveryport)
+- [`--listenAddress6`](./beacon-cli.md#-listenaddress6)
+- [`--port6`](./beacon-cli.md#-port6)
+- [`--discoveryPort6`](./beacon-cli.md#-discoveryport6)
+- [`--bootnodes`](./beacon-cli.md#-bootnodes)
+- [`--deterministicLongLivedAttnets`](./beacon-cli.md#-deterministiclonglivedattnets)
+- [`--subscribeAllSubnets`](./beacon-cli.md#-subscribeallsubnets)
+- [`--disablePeerScoring`](./beacon-cli.md#-disablepeerscoring)
+- [`--enr.ip`](./beacon-cli.md#-enrip)
+- [`--enr.tcp`](./beacon-cli.md#-enrtcp)
+- [`--enr.udp`](./beacon-cli.md#-enrudp)
+- [`--enr.ip6`](./beacon-cli.md#-enrip6)
+- [`--enr.tcp6`](./beacon-cli.md#-enrtcp6)
+- [`--enr.udp6`](./beacon-cli.md#-enrudp6)
+- [`--nat`](./beacon-cli.md#-nat)
+- [`--private`](./beacon-cli.md#`-private`)
## Peer Discovery (Discv5)
-In Ethereum, discv5 plays a pivotal role in the peer discovery process, facilitating nodes to find and locate each other in order to form the peer-to-peer network. The process begins with an interaction between new nodes and bootnodes at start-up. Bootnodes are nodes with hard-coded addresses, or can be overridden via the cli flag `--bootnodes`, to bootstrap the discovery process. Through a method called FINDNODE-NODES, a new node establishes a bond with each bootnode, and it returns a list of peers for the new node to connect to. Following this trail, the new node engages through FINDNODE-NODES with the provided peers to further establish a web of connections.
+In Ethereum, discv5 plays a pivotal role in the peer discovery process, facilitating nodes to find and locate each other in order to form the peer-to-peer network. The process begins with an interaction between new nodes and bootnodes at start-up. Bootnodes are nodes with hard-coded addresses, or can be overridden via the cli flag [`--bootnodes`](./beacon-cli.md#-bootnodes), to bootstrap the discovery process. Through a method called FINDNODE-NODES, a new node establishes a bond with each bootnode, and it returns a list of peers for the new node to connect to. Following this trail, the new node engages through FINDNODE-NODES with the provided peers to further establish a web of connections.
Discv5 operates as a peer advertisement medium in this network, where nodes can act as both providers and consumers of data. Every participating node in the Discv5 protocol discovers peer data from other nodes and later relays it, making the discovery process dynamic and efficient.
-Discv5 is designed to be a standalone protocol running via UDP on a dedicated port solely for peer discovery. Peer data is exchanged via self-certified, flexible peer records (ENRs). These key features cater to the Ethereum network and being a good peer often means running a discv5 worker. Lodestar offers simple configuration to setup and run a bootnode independently of a beacon node. See [bootnode](./bootnode.md) for more information and configuration options.
+Discv5 is designed to be a standalone protocol running via UDP on a dedicated port solely for peer discovery. Peer data is exchanged via self-certified, flexible peer records (ENRs). These key features cater to the Ethereum network and being a good peer often means running a discv5 worker. Lodestar offers simple configuration to setup and run a bootnode independently of a beacon node. See the [bootnode cli](../bootnode/bootnode-cli.md) page for more information and configuration options.
## ENR
diff --git a/docs/pages/beacon-management/syncing.md b/docs/pages/beacon-management/syncing.md
index 21cd05d8a8a2..40b5b4ba96b5 100644
--- a/docs/pages/beacon-management/syncing.md
+++ b/docs/pages/beacon-management/syncing.md
@@ -2,7 +2,7 @@
Syncing an Ethereum node involves obtaining a copy of the blockchain data from other peers in the network to reach a consistent state. This process is crucial for new nodes or nodes that have been offline and need to catch up with the network's current state. Syncing can be performed for both the execution layer and the beacon chain, although the focus here will be primarily on the beacon chain.
-Lodestar allows for several methods of syncing however the recommended method is `checkpoint sync` as it is the fastest and least resource intensive. It is generally a good idea to sync via a [`--checkpointSyncUrl`](./configuration.md#--checkpointSyncUrl). If starting at a specific point is necessary specify the [`--checkpointState`](./configuration.md#--checkpointState) that should be where the sync begins.
+Lodestar allows for several methods of syncing however the recommended method is `checkpoint sync` as it is the fastest and least resource intensive. It is generally a good idea to sync via a [`--checkpointSyncUrl`](./beacon-cli.md#-checkpointsyncurl). If starting at a specific point is necessary specify the [`--checkpointState`](./beacon-cli.md#-checkpointstate) that should be where the sync begins.
## Weak Subjectivity
@@ -36,7 +36,7 @@ The implementation of the different syncing styles in Lodestar are actually one
There are several flags that can be used to configure the sync process.
-- [`--checkpointSyncUrl`](./configuration.md#--checkpointSyncUrl)
-- [`--checkpointState`](./configuration.md#--checkpointState)
-- [`--wssCheckpoint`](./configuration.md#--wssCheckpoint)
-- [`--forceCheckpointSync`](./configuration.md#--forceCheckpointSync)
+- [`--checkpointSyncUrl`](./beacon-cli.md#-checkpointsyncurl)
+- [`--checkpointState`](./beacon-cli.md#-checkpointstate)
+- [`--wssCheckpoint`](./beacon-cli.md#-wsscheckpoint)
+- [`--forceCheckpointSync`](./beacon-cli.md#-forcecheckpointsync)
diff --git a/docs/pages/contribution/testing/end-to-end-tests.md b/docs/pages/contribution/testing/end-to-end-tests.md
new file mode 100644
index 000000000000..3f405128c7cb
--- /dev/null
+++ b/docs/pages/contribution/testing/end-to-end-tests.md
@@ -0,0 +1,3 @@
+# End-To-End Tests
+
+Check back soon for more information!! We are in the process of updating our docs.
diff --git a/docs/pages/contribution/testing/performance-tests.md b/docs/pages/contribution/testing/performance-tests.md
index e69de29bb2d1..6e2d9c86319b 100644
--- a/docs/pages/contribution/testing/performance-tests.md
+++ b/docs/pages/contribution/testing/performance-tests.md
@@ -0,0 +1,3 @@
+# Performance Tests
+
+Check back soon for more information!! We are in the process of updating our docs.
diff --git a/docs/pages/contribution/testing/simulation-tests.md b/docs/pages/contribution/testing/simulation-tests.md
index ed36d1351307..c1059e5c4177 100644
--- a/docs/pages/contribution/testing/simulation-tests.md
+++ b/docs/pages/contribution/testing/simulation-tests.md
@@ -1,4 +1,4 @@
-# Simulation Testing
+# Simulation Tests
"Sim" testing for Lodestar is the most comprehensive, and complex, testing that is run. The goal is to fully simulate a testnet and to actuate the code in a way that closely mimics what will happen when turning on Lodestar in the wild. This is a very complex task and requires a lot of moving parts to work together. The following sections will describe the various components and how they work together.
@@ -54,7 +54,7 @@ GETH_DOCKER_IMAGE=ethereum/client-go:v1.11.6 \
## Sim Test Infrastructure
-When setting up and running the simulations, interactions with the nodes is through the published node API's. All functionality is actuated via http request and by "plugging in" this way it is possible to run the nodes in a stand-alone fashion, as they would be run in production, but to still achieve a tightly monitored and controlled environment. If code needs to be executed on a "class by class" basis or with mocking involved then the test is not a simulation test and would fall into one of the other testing categories. See the [Testing](../testing.md) page for more information on the other types of tests available for Lodestar.
+When setting up and running the simulations, interactions with the nodes is through the published node API's. All functionality is actuated via http request and by "plugging in" this way it is possible to run the nodes in a stand-alone fashion, as they would be run in production, but to still achieve a tightly monitored and controlled environment. If code needs to be executed on a "class by class" basis or with mocking involved then the test is not a simulation test and would fall into one of the other testing categories. See the [Testing Overview](./index.md) page for more information on the other types of tests available for Lodestar.
### Simulation Environment
diff --git a/docs/pages/contribution/testing/spec-tests.md b/docs/pages/contribution/testing/spec-tests.md
index e69de29bb2d1..b7a65dafd072 100644
--- a/docs/pages/contribution/testing/spec-tests.md
+++ b/docs/pages/contribution/testing/spec-tests.md
@@ -0,0 +1,3 @@
+# Specification Tests
+
+Check back soon for more information!! We are in the process of updating our docs.
diff --git a/docs/pages/contribution/testing/unit-tests.md b/docs/pages/contribution/testing/unit-tests.md
index e69de29bb2d1..cbf4b4ae2264 100644
--- a/docs/pages/contribution/testing/unit-tests.md
+++ b/docs/pages/contribution/testing/unit-tests.md
@@ -0,0 +1,3 @@
+# Unit Tests
+
+Check back soon for more information!! We are in the process of updating our docs.
diff --git a/docs/pages/data-retention.md b/docs/pages/data-retention.md
index c8512858441f..41daa8dc458d 100644
--- a/docs/pages/data-retention.md
+++ b/docs/pages/data-retention.md
@@ -6,7 +6,7 @@ There are several processes that need to store data for Lodestar. These data set
```bash
$executionDir # this changes depending on the execution client
- └── execution-db
+ └── execution-db
$dataDir # specified by --dataDir on the beacon command
├── .log_rotate_audit.json
@@ -49,6 +49,6 @@ Configuring your node to store and prune data is key to success. On average you
`keystores`, `keystore-cache` and `peerstore` are not usually very large and are not expected to grow much during normal operation.
-Logs can also become quite large so please check out the section on [log management](../logging-and-metrics/log-management.md) for more information.
+Logs can also become quite large so please check out the section on [log management](./logging-and-metrics/log-management.md) for more information.
-There is really only one flag that is needed to manage the data for Lodestar, [`--dataDir`](./configuration.md#--dataDir). Other than that handling log management is really the heart of the data management story. Beacon node data is what it is. Depending on the execution client that is chosen, there may be flags to help with data storage growth but that is outside the scope of this document.
+There is really only one flag that is needed to manage the data for Lodestar, [`--dataDir`](./beacon-management/beacon-cli.md#-datadir). Other than that handling log management is really the heart of the data management story. Beacon node data is what it is. Depending on the execution client that is chosen, there may be flags to help with data storage growth but that is outside the scope of this document.
diff --git a/docs/pages/getting-started/installation.md b/docs/pages/getting-started/installation.md
index 61ecb5b128ef..4fdfc3e82367 100644
--- a/docs/pages/getting-started/installation.md
+++ b/docs/pages/getting-started/installation.md
@@ -90,4 +90,4 @@ See [Command Line Reference](./../reference/cli.md) for further information.
!!! danger
For mainnet (production) usage, we only recommend installing with docker due to [NPM supply chain attacks](https://hackaday.com/2021/10/22/supply-chain-attack-npm-library-used-by-facebook-and-others-was-compromised/). Until a [safer installation method has been found](https://github.com/ChainSafe/lodestar/issues/3596), do not use this install method except for experimental purposes only.
-
\ No newline at end of file
+
diff --git a/docs/pages/getting-started/starting-a-node.md b/docs/pages/getting-started/starting-a-node.md
index 46b6f2e456c8..dd11381bde10 100644
--- a/docs/pages/getting-started/starting-a-node.md
+++ b/docs/pages/getting-started/starting-a-node.md
@@ -14,7 +14,7 @@ Make sure Lodestar is installed in your local environment, following the chosen
./lodestar --help
```
-For a complete list of beacon node CLI commands and options, see the [Command Line Reference](../../reference/cli/)
+For a complete list of beacon node CLI commands and options, see the [`beacon` CLI Command](../beacon-management/beacon-cli.md) section.
To select a known testnet or mainnet, use the `--network` flag. `mainnet` is selected by default, and a list of available networks is listed with the `--help` flag. Setting the `--network` flag will conveniently configure the beacon node or validator client for the selected network. For power users, any configuration option should be able to be overridden.
@@ -181,4 +181,4 @@ Apr-20 15:16:17.017[] info: Synced - slot: 6264979 - head: 0xde9
6. Peer info: Current total number of outbound or inbound peers, for e.g.: `peers: 27`
-For more insight into how a Lodestar beacon node is functioning, you may setup lodestar metrics and use the prepared Grafana dashboards that are found in the repository. Check out our section on [Prometheus and Grafana](./prometheus-grafana.md) for more details.
+For more insight into how a Lodestar beacon node is functioning, you may setup lodestar metrics and use the prepared Grafana dashboards that are found in the repository. Check out our section on [Prometheus and Grafana](../logging-and-metrics/prometheus-grafana.md) for more details.
diff --git a/docs/pages/google0c42298b7ec08b7e.html b/docs/pages/google0c42298b7ec08b7e.html
new file mode 100644
index 000000000000..7edebde149af
--- /dev/null
+++ b/docs/pages/google0c42298b7ec08b7e.html
@@ -0,0 +1 @@
+google-site-verification: google0c42298b7ec08b7e.html
\ No newline at end of file
diff --git a/docs/pages/index.md b/docs/pages/index.md
index 82674eb89fe8..4af149a7a0ef 100644
--- a/docs/pages/index.md
+++ b/docs/pages/index.md
@@ -1,19 +1,19 @@
![lodestar logo](assets/lodestar_icon_text_black_stroke.png)
-## Welcome to the Lodestar documentation!
+## Welcome to the Lodestar documentation
> **Lodestar is an open-source Ethereum Consensus client and Typescript ecosystem, maintained by ChainSafe Systems**
### Getting started
-- Follow the installation method for [source install](install/source.md), [NPM install](install/npm.md), or [Docker install](install/docker.md) to install Lodestar. Or use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart).
-- Use [Lodestar libraries](libraries) in your next Ethereum Typescript project.
-- Run a beacon node on [mainnet or a public testnet](usage/beacon-management.md).
-- Utilize the whole stack by [starting a local testnet](usage/local).
-- View the Lodestar [CLI commands and options](https://chainsafe.github.io/lodestar/reference/cli/)
-- Prospective contributors can read the [contributing section](https://chainsafe.github.io/lodestar/contributing/) to understand how we develop and test on Lodestar.
+- Follow the installation method for [source install](./getting-started/installation.md/#build-from-source) or [Docker install](./getting-started/installation.md/#docker-installation) to install Lodestar. Or use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart).
+- Use [Lodestar libraries](./supporting-libraries/index.md) in your next Ethereum Typescript project.
+- Run a beacon node on [mainnet or a public testnet](./getting-started/starting-a-node.md).
+- Utilize the whole stack by [starting a local testnet](./advanced-topics/setting-up-a-testnet.md).
+- View the Lodestar [CLI commands and options](./beacon-management/beacon-cli.md)
+- Prospective contributors can read the [contributing section](./contribution/getting-started.md) to understand how we develop and test on Lodestar.
- If you have questions [submit an issue](https://github.com/ChainSafe/lodestar/issues/new) or join us on [Discord](https://discord.gg/yjyvFRP)!
-- Please note our [security policy](https://github.com/ChainSafe/lodestar/blob/unstable/SECURITY.md).
+- Please note our [security policy](./security.md).
- Sign up to our [mailing list](https://chainsafe.typeform.com/lodestar) for announcements and any critical information about Lodestar.
## Specifications
diff --git a/docs/pages/introduction.md b/docs/pages/introduction.md
index f8fe03386c0a..776b018641b8 100644
--- a/docs/pages/introduction.md
+++ b/docs/pages/introduction.md
@@ -10,11 +10,11 @@ In Ethereum's Proof of Stake (PoS) model, validators replace miners from the Pro
In an effort to promote client diversity there are several beacon-nodes being developed. Each is programmed in a different language and by a different team. The following is a list of the current beacon-node clients:
-[Lodestar](https://chainsafe.io/lodestar.html)
-[Prysm](https://prysmaticlabs.com/)
-[Lighthouse](https://lighthouse.sigmaprime.io/)
-[Teku](https://consensys.net/knowledge-base/ethereum-2/teku/)
-[Nimbus](https://nimbus.team/)
+- [Lodestar](https://chainsafe.io/lodestar.html)
+- [Prysm](https://prysmaticlabs.com/)
+- [Lighthouse](https://lighthouse.sigmaprime.io/)
+- [Teku](https://consensys.net/knowledge-base/ethereum-2/teku/)
+- [Nimbus](https://nimbus.team/)
## Why Client Diversity?
diff --git a/docs/pages/logging-and-metrics/log-management.md b/docs/pages/logging-and-metrics/log-management.md
index e69de29bb2d1..a0ee1d5fec07 100644
--- a/docs/pages/logging-and-metrics/log-management.md
+++ b/docs/pages/logging-and-metrics/log-management.md
@@ -0,0 +1,3 @@
+# Log Management
+
+Check back soon for more information!!
diff --git a/docs/pages/reference/cli.md b/docs/pages/reference/cli.md
new file mode 100644
index 000000000000..1b57913b99fc
--- /dev/null
+++ b/docs/pages/reference/cli.md
@@ -0,0 +1,8 @@
+# Page relocated
+
+_**Welcome! This page has been moved. Please checkout our new docs layout from the Table of Contents! Below are some helpful links to the CLI pages that were split out from this original document**_
+
+- [Beacon Node CLI](../beacon-management/beacon-cli.md)
+- [Validator CLI](../validator-management/validator-cli.md)
+- [Bootnode CLI](../bootnode/bootnode-cli.md)
+- [Light Client CLI](../lightclient-prover/lightclient-cli.md)
diff --git a/docs/pages/supporting-libraries/index.md b/docs/pages/supporting-libraries/index.md
index eb1e7821db18..555294393ec1 100644
--- a/docs/pages/supporting-libraries/index.md
+++ b/docs/pages/supporting-libraries/index.md
@@ -6,7 +6,7 @@
- [`@chainsafe/js-libp2p-noise`](https://github.com/NodeFactoryIo/js-libp2p-noise) - [Noise](https://noiseprotocol.org/noise.html) handshake for `js-libp2p`
- [`@chainsafe/js-libp2p-gossipsub`](https://github.com/ChainSafe/js-libp2p-gossipsub) - [Gossipsub](https://github.com/libp2p/specs/tree/master/pubsub/gossipsub) protocol for `js-libp2p`
-- [@chainsafe/libp2p-yamux](https://github.com/ChainSafe/libp2p-yamux)
+- [`@chainsafe/libp2p-yamux`](https://github.com/ChainSafe/js-libp2p-yamux)
### Discv5
@@ -14,14 +14,14 @@
## Serialization and Hashing
-- [`ssz`](https://github.com/ChainSafe/ssz) - Simple Serialize (SSZ)
-- [`persistent-merkle-tree`](https://github.com/ChainSafe/persistent-merkle-tree) - binary merkle tree implemented as a [persistent data structure](https://en.wikipedia.org/wiki/Persistent_data_structure)
-- [`as-sha256`](https://github.com/ChainSafe/as-sha256) - Small AssemblyScript implementation of SHA256
+- [`@chainsafe/ssz`](https://github.com/ChainSafe/ssz) - Simple Serialize (SSZ)
+- [`@chainsafe/persistent-merkle-tree`](https://github.com/ChainSafe/persistent-merkle-tree) - binary merkle tree implemented as a [persistent data structure](https://en.wikipedia.org/wiki/Persistent_data_structure)
+- [`@chainsafe/as-sha256`](https://github.com/ChainSafe/as-sha256) - Small AssemblyScript implementation of SHA256
## BLS
-- [`bls`](https://github.com/ChainSafe/bls) - Isomorphic Ethereum Consensus BLS sign / verify / aggregate
-- [`blst-ts`](https://github.com/ChainSafe/blst) - Node specific Ethereum Consensus BLS sign / verify / aggregate
-- [`bls-keystore`](https://github.com/ChainSafe/bls-keystore) - store / retrieve a BLS secret key from an [EIP-2335](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2335.md) JSON keystore
-- [`bls-keygen`](https://github.com/ChainSafe/bls-keygen) - utility functions to generate BLS secret keys, following [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md)
-- [`bls-hd-key`](https://github.com/ChainSafe/bls-hd-key) - low level [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) functionality
+- [`@chainsafe/bls`](https://github.com/ChainSafe/bls) - Isomorphic Ethereum Consensus BLS sign / verify / aggregate
+- [`@chainsafe/blst-ts`](https://github.com/ChainSafe/blst-ts) - Node specific Ethereum Consensus BLS sign / verify / aggregate
+- [`@chainsafe/bls-keystore`](https://github.com/ChainSafe/bls-keystore) - store / retrieve a BLS secret key from an [EIP-2335](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2335.md) JSON keystore
+- [`@chainsafe/bls-keygen`](https://github.com/ChainSafe/bls-keygen) - utility functions to generate BLS secret keys, following [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md)
+- [`@chainsafe/bls-hd-key`](https://github.com/ChainSafe/bls-hd-key) - low level [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) functionality
diff --git a/lerna.json b/lerna.json
index bb0c43fed5fe..487caa95b0a2 100644
--- a/lerna.json
+++ b/lerna.json
@@ -4,7 +4,7 @@
],
"npmClient": "yarn",
"useNx": true,
- "version": "1.13.0",
+ "version": "1.14.0",
"stream": true,
"command": {
"version": {
diff --git a/package.json b/package.json
index 158ac2affe68..8e6dad1fdea2 100644
--- a/package.json
+++ b/package.json
@@ -53,8 +53,8 @@
"@types/sinon-chai": "^3.2.9",
"@typescript-eslint/eslint-plugin": "6.7.2",
"@typescript-eslint/parser": "6.7.2",
- "@vitest/coverage-v8": "^1.0.1",
- "@vitest/browser": "^1.0.1",
+ "@vitest/coverage-v8": "^1.1.0",
+ "@vitest/browser": "^1.1.0",
"c8": "^8.0.1",
"chai": "^4.3.8",
"chai-as-promised": "^7.1.1",
@@ -97,17 +97,18 @@
"ts-node": "^10.9.1",
"typescript": "^5.2.2",
"typescript-docs-verifier": "^2.5.0",
- "vite-plugin-node-polyfills": "^0.17.0",
- "vite-plugin-top-level-await": "^1.3.1",
- "vitest": "^1.0.2",
+ "vite-plugin-node-polyfills": "^0.18.0",
+ "vite-plugin-top-level-await": "^1.4.1",
+ "vitest": "^1.1.0",
"vitest-when": "^0.3.0",
"wait-port": "^1.1.0",
- "webdriverio": "^8.24.12",
+ "webdriverio": "^8.27.0",
"webpack": "^5.88.2"
},
"resolutions": {
"dns-over-http-resolver": "^2.1.1",
"chai": "^4.3.10",
- "loupe": "^2.3.6"
+ "loupe": "^2.3.6",
+ "vite": "^5.0.0"
}
}
diff --git a/packages/api/package.json b/packages/api/package.json
index 2dfcbc73b65c..b7708ac5aa26 100644
--- a/packages/api/package.json
+++ b/packages/api/package.json
@@ -11,7 +11,7 @@
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
- "version": "1.13.0",
+ "version": "1.14.0",
"type": "module",
"exports": {
".": {
@@ -71,10 +71,10 @@
"dependencies": {
"@chainsafe/persistent-merkle-tree": "^0.6.1",
"@chainsafe/ssz": "^0.14.0",
- "@lodestar/config": "^1.13.0",
- "@lodestar/params": "^1.13.0",
- "@lodestar/types": "^1.13.0",
- "@lodestar/utils": "^1.13.0",
+ "@lodestar/config": "^1.14.0",
+ "@lodestar/params": "^1.14.0",
+ "@lodestar/types": "^1.14.0",
+ "@lodestar/utils": "^1.14.0",
"eventsource": "^2.0.2",
"qs": "^6.11.1"
},
diff --git a/packages/api/src/beacon/routes/beacon/block.ts b/packages/api/src/beacon/routes/beacon/block.ts
index 53ebb93692dc..b56006fe4191 100644
--- a/packages/api/src/beacon/routes/beacon/block.ts
+++ b/packages/api/src/beacon/routes/beacon/block.ts
@@ -1,17 +1,7 @@
import {ContainerType} from "@chainsafe/ssz";
import {ForkName} from "@lodestar/params";
import {ChainForkConfig} from "@lodestar/config";
-import {
- phase0,
- allForks,
- Slot,
- Root,
- ssz,
- RootHex,
- deneb,
- isSignedBlockContents,
- isSignedBlindedBlockContents,
-} from "@lodestar/types";
+import {phase0, allForks, Slot, Root, ssz, RootHex, deneb, isSignedBlockContents} from "@lodestar/types";
import {
RoutesData,
@@ -30,10 +20,7 @@ import {
import {HttpStatusCode} from "../../../utils/client/httpStatusCode.js";
import {parseAcceptHeader, writeAcceptHeader} from "../../../utils/acceptHeader.js";
import {ApiClientResponse, ResponseFormat} from "../../../interfaces.js";
-import {
- allForksSignedBlockContentsReqSerializer,
- allForksSignedBlindedBlockContentsReqSerializer,
-} from "../../../utils/routes.js";
+import {allForksSignedBlockContentsReqSerializer} from "../../../utils/routes.js";
// See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes
@@ -207,7 +194,7 @@ export type Api = {
* Publish a signed blinded block by submitting it to the mev relay and patching in the block
* transactions beacon node gets in response.
*/
- publishBlindedBlock(blindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents): Promise<
+ publishBlindedBlock(blindedBlock: allForks.SignedBlindedBeaconBlock): Promise<
ApiClientResponse<
{
[HttpStatusCode.OK]: void;
@@ -218,7 +205,7 @@ export type Api = {
>;
publishBlindedBlockV2(
- blindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents,
+ blindedBlockOrContents: allForks.SignedBlindedBeaconBlock,
opts: {broadcastValidation?: BroadcastValidation}
): Promise<
ApiClientResponse<
@@ -315,16 +302,9 @@ export function getReqSerializers(config: ChainForkConfig): ReqSerializers
config.getBlindedForkTypes(data.message.slot).SignedBeaconBlock;
- const AllForksSignedBlindedBlockOrContents: TypeJson = {
- toJson: (data) =>
- isSignedBlindedBlockContents(data)
- ? allForksSignedBlindedBlockContentsReqSerializer(getSignedBlindedBeaconBlockType).toJson(data)
- : getSignedBlindedBeaconBlockType(data).toJson(data),
-
- fromJson: (data) =>
- (data as {signed_blinded_block: unknown}).signed_blinded_block !== undefined
- ? allForksSignedBlindedBlockContentsReqSerializer(getSignedBlindedBeaconBlockType).fromJson(data)
- : getSignedBlindedBeaconBlockType(data as allForks.SignedBlindedBeaconBlock).fromJson(data),
+ const AllForksSignedBlindedBlock: TypeJson = {
+ toJson: (data) => getSignedBlindedBeaconBlockType(data).toJson(data),
+ fromJson: (data) => getSignedBlindedBeaconBlockType(data as allForks.SignedBlindedBeaconBlock).fromJson(data),
};
return {
@@ -353,14 +333,14 @@ export function getReqSerializers(config: ChainForkConfig): ReqSerializers ({
- body: AllForksSignedBlindedBlockOrContents.toJson(item),
+ body: AllForksSignedBlindedBlock.toJson(item),
query: {broadcast_validation: broadcastValidation},
}),
parseReq: ({body, query}) => [
- AllForksSignedBlindedBlockOrContents.fromJson(body),
+ AllForksSignedBlindedBlock.fromJson(body),
{broadcastValidation: query.broadcast_validation as BroadcastValidation},
],
schema: {
diff --git a/packages/api/src/beacon/routes/validator.ts b/packages/api/src/beacon/routes/validator.ts
index f5ae20937a0e..0746797cbf0e 100644
--- a/packages/api/src/beacon/routes/validator.ts
+++ b/packages/api/src/beacon/routes/validator.ts
@@ -1,5 +1,5 @@
import {ContainerType, fromHexString, toHexString, Type} from "@chainsafe/ssz";
-import {ForkName, ForkBlobs, isForkBlobs, isForkExecution, ForkPreBlobs} from "@lodestar/params";
+import {ForkName, ForkBlobs, isForkBlobs, isForkExecution, ForkPreBlobs, ForkExecution} from "@lodestar/params";
import {
allForks,
altair,
@@ -13,12 +13,14 @@ import {
Slot,
ssz,
UintNum64,
+ UintBn64,
ValidatorIndex,
RootHex,
StringType,
SubcommitteeIndex,
Wei,
Gwei,
+ ProducedBlockSource,
} from "@lodestar/types";
import {ApiClientResponse} from "../../interfaces.js";
import {HttpStatusCode} from "../../utils/client/httpStatusCode.js";
@@ -37,7 +39,7 @@ import {
TypeJson,
} from "../../utils/index.js";
import {fromU64Str, fromGraffitiHex, toU64Str, U64Str, toGraffitiHex} from "../../utils/serdes.js";
-import {allForksBlockContentsResSerializer, allForksBlindedBlockContentsResSerializer} from "../../utils/routes.js";
+import {allForksBlockContentsResSerializer} from "../../utils/routes.js";
import {ExecutionOptimistic} from "./beacon/block.js";
export enum BuilderSelection {
@@ -52,21 +54,24 @@ export enum BuilderSelection {
export type ExtraProduceBlockOps = {
feeRecipient?: string;
builderSelection?: BuilderSelection;
+ builderBoostFactor?: UintBn64;
strictFeeRecipientCheck?: boolean;
+ blindedLocal?: boolean;
};
export type ProduceBlockOrContentsRes = {executionPayloadValue: Wei; consensusBlockValue: Gwei} & (
| {data: allForks.BeaconBlock; version: ForkPreBlobs}
| {data: allForks.BlockContents; version: ForkBlobs}
);
-export type ProduceBlindedBlockOrContentsRes = {executionPayloadValue: Wei; consensusBlockValue: Gwei} & (
- | {data: allForks.BlindedBeaconBlock; version: ForkPreBlobs}
- | {data: allForks.BlindedBlockContents; version: ForkBlobs}
-);
+export type ProduceBlindedBlockRes = {executionPayloadValue: Wei; consensusBlockValue: Gwei} & {
+ data: allForks.BlindedBeaconBlock;
+ version: ForkExecution;
+};
-export type ProduceFullOrBlindedBlockOrContentsRes =
+export type ProduceFullOrBlindedBlockOrContentsRes = {executionPayloadSource: ProducedBlockSource} & (
| (ProduceBlockOrContentsRes & {executionPayloadBlinded: false})
- | (ProduceBlindedBlockOrContentsRes & {executionPayloadBlinded: true});
+ | (ProduceBlindedBlockRes & {executionPayloadBlinded: true})
+);
// See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes
@@ -287,7 +292,7 @@ export type Api = {
): Promise<
ApiClientResponse<
{
- [HttpStatusCode.OK]: ProduceBlindedBlockOrContentsRes;
+ [HttpStatusCode.OK]: ProduceBlindedBlockRes;
},
HttpStatusCode.BAD_REQUEST | HttpStatusCode.SERVICE_UNAVAILABLE
>
@@ -484,7 +489,9 @@ export type ReqTypes = {
skip_randao_verification?: boolean;
fee_recipient?: string;
builder_selection?: string;
+ builder_boost_factor?: string;
strict_fee_recipient_check?: boolean;
+ blinded_local?: boolean;
};
};
produceBlindedBlock: {params: {slot: number}; query: {randao_reveal: string; graffiti: string}};
@@ -551,7 +558,9 @@ export function getReqSerializers(): ReqSerializers {
fee_recipient: opts?.feeRecipient,
skip_randao_verification: skipRandaoVerification,
builder_selection: opts?.builderSelection,
+ builder_boost_factor: opts?.builderBoostFactor?.toString(),
strict_fee_recipient_check: opts?.strictFeeRecipientCheck,
+ blinded_local: opts?.blindedLocal,
},
}),
parseReq: ({params, query}) => [
@@ -562,7 +571,9 @@ export function getReqSerializers(): ReqSerializers {
{
feeRecipient: query.fee_recipient,
builderSelection: query.builder_selection as BuilderSelection,
+ builderBoostFactor: parseBuilderBoostFactor(query.builder_boost_factor),
strictFeeRecipientCheck: query.strict_fee_recipient_check,
+ blindedLocal: query.blinded_local,
},
],
schema: {
@@ -573,7 +584,9 @@ export function getReqSerializers(): ReqSerializers {
fee_recipient: Schema.String,
skip_randao_verification: Schema.Boolean,
builder_selection: Schema.String,
+ builder_boost_factor: Schema.String,
strict_fee_recipient_check: Schema.Boolean,
+ blinded_local: Schema.Boolean,
},
},
};
@@ -721,13 +734,11 @@ export function getReturnTypes(): ReturnTypes {
isForkBlobs(fork) ? allForksBlockContentsResSerializer(fork) : ssz[fork].BeaconBlock
)
) as TypeJson;
- const produceBlindedBlockOrContents = WithBlockValues(
- WithVersion((fork: ForkName) =>
- isForkBlobs(fork)
- ? allForksBlindedBlockContentsResSerializer(fork)
- : ssz.allForksBlinded[isForkExecution(fork) ? fork : ForkName.bellatrix].BeaconBlock
+ const produceBlindedBlock = WithBlockValues(
+ WithVersion(
+ (fork: ForkName) => ssz.allForksBlinded[isForkExecution(fork) ? fork : ForkName.bellatrix].BeaconBlock
)
- ) as TypeJson;
+ ) as TypeJson;
return {
getAttesterDuties: WithDependentRootExecutionOptimistic(ArrayOf(AttesterDuty)),
@@ -741,24 +752,36 @@ export function getReturnTypes(): ReturnTypes {
if (data.executionPayloadBlinded) {
return {
execution_payload_blinded: true,
- ...(produceBlindedBlockOrContents.toJson(data) as Record),
+ execution_payload_source: data.executionPayloadSource,
+ ...(produceBlindedBlock.toJson(data) as Record),
};
} else {
return {
execution_payload_blinded: false,
+ execution_payload_source: data.executionPayloadSource,
...(produceBlockOrContents.toJson(data) as Record),
};
}
},
fromJson: (data) => {
- if ((data as {execution_payload_blinded: true}).execution_payload_blinded) {
- return {executionPayloadBlinded: true, ...produceBlindedBlockOrContents.fromJson(data)};
+ const executionPayloadBlinded = (data as {execution_payload_blinded: boolean}).execution_payload_blinded;
+ if (executionPayloadBlinded === undefined) {
+ throw Error(`Invalid executionPayloadBlinded=${executionPayloadBlinded} for fromJson deserialization`);
+ }
+
+ // extract source from the data and assign defaults in the spec complaint manner if not present in response
+ const executionPayloadSource =
+ (data as {execution_payload_source: ProducedBlockSource}).execution_payload_source ??
+ (executionPayloadBlinded ? ProducedBlockSource.builder : ProducedBlockSource.engine);
+
+ if (executionPayloadBlinded) {
+ return {executionPayloadBlinded, executionPayloadSource, ...produceBlindedBlock.fromJson(data)};
} else {
- return {executionPayloadBlinded: false, ...produceBlockOrContents.fromJson(data)};
+ return {executionPayloadBlinded, executionPayloadSource, ...produceBlockOrContents.fromJson(data)};
}
},
},
- produceBlindedBlock: produceBlindedBlockOrContents,
+ produceBlindedBlock,
produceAttestationData: ContainerData(ssz.phase0.AttestationData),
produceSyncCommitteeContribution: ContainerData(ssz.altair.SyncCommitteeContribution),
@@ -768,3 +791,7 @@ export function getReturnTypes(): ReturnTypes {
getLiveness: jsonType("snake"),
};
}
+
+function parseBuilderBoostFactor(builderBoostFactorInput?: string | number | bigint): bigint | undefined {
+ return builderBoostFactorInput !== undefined ? BigInt(builderBoostFactorInput) : undefined;
+}
diff --git a/packages/api/src/beacon/server/validator.ts b/packages/api/src/beacon/server/validator.ts
index 6bf446e05a16..5d6c22557060 100644
--- a/packages/api/src/beacon/server/validator.ts
+++ b/packages/api/src/beacon/server/validator.ts
@@ -4,6 +4,28 @@ import {ServerRoutes, getGenericJsonServer} from "../../utils/server/index.js";
import {ServerApi} from "../../interfaces.js";
export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerRoutes {
- // All routes return JSON, use a server auto-generator
- return getGenericJsonServer, ReqTypes>({routesData, getReturnTypes, getReqSerializers}, config, api);
+ const reqSerializers = getReqSerializers();
+ const returnTypes = getReturnTypes();
+
+ // Most of routes return JSON, use a server auto-generator
+ const serverRoutes = getGenericJsonServer, ReqTypes>(
+ {routesData, getReturnTypes, getReqSerializers},
+ config,
+ api
+ );
+ return {
+ ...serverRoutes,
+ produceBlockV3: {
+ ...serverRoutes.produceBlockV3,
+ handler: async (req, res) => {
+ const response = await api.produceBlockV3(...reqSerializers.produceBlockV3.parseReq(req));
+ void res.header("Eth-Consensus-Version", response.version);
+ void res.header("Eth-Execution-Payload-Blinded", response.executionPayloadBlinded);
+ void res.header("Eth-Execution-Payload-Value", response.executionPayloadValue);
+ void res.header("Eth-Consensus-Block-Value", response.consensusBlockValue);
+
+ return returnTypes.produceBlockV3.toJson(response);
+ },
+ },
+ };
}
diff --git a/packages/api/src/builder/routes.ts b/packages/api/src/builder/routes.ts
index 0136f1deeac4..6f5a55f0dcff 100644
--- a/packages/api/src/builder/routes.ts
+++ b/packages/api/src/builder/routes.ts
@@ -34,7 +34,7 @@ export type Api = {
HttpStatusCode.NOT_FOUND | HttpStatusCode.BAD_REQUEST
>
>;
- submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlockOrContents): Promise<
+ submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise<
ApiClientResponse<
{
[HttpStatusCode.OK]: {
diff --git a/packages/api/src/keymanager/routes.ts b/packages/api/src/keymanager/routes.ts
index 09f5e7610604..48f928e86100 100644
--- a/packages/api/src/keymanager/routes.ts
+++ b/packages/api/src/keymanager/routes.ts
@@ -72,6 +72,10 @@ export type GasLimitData = {
pubkey: string;
gasLimit: number;
};
+export type BuilderBoostFactorData = {
+ pubkey: string;
+ builderBoostFactor: bigint;
+};
export type SignerDefinition = {
pubkey: PubkeyHex;
@@ -247,6 +251,27 @@ export type Api = {
>
>;
+ getBuilderBoostFactor(
+ pubkey: string
+ ): Promise>;
+ setBuilderBoostFactor(
+ pubkey: string,
+ builderBoostFactor: bigint
+ ): Promise<
+ ApiClientResponse<
+ {[HttpStatusCode.OK]: void; [HttpStatusCode.NO_CONTENT]: void},
+ HttpStatusCode.UNAUTHORIZED | HttpStatusCode.FORBIDDEN | HttpStatusCode.NOT_FOUND
+ >
+ >;
+ deleteBuilderBoostFactor(
+ pubkey: string
+ ): Promise<
+ ApiClientResponse<
+ {[HttpStatusCode.OK]: void; [HttpStatusCode.NO_CONTENT]: void},
+ HttpStatusCode.UNAUTHORIZED | HttpStatusCode.FORBIDDEN | HttpStatusCode.NOT_FOUND
+ >
+ >;
+
/**
* Create a signed voluntary exit message for an active validator, identified by a public key known to the validator
* client. This endpoint returns a `SignedVoluntaryExit` object, which can be used to initiate voluntary exit via the
@@ -290,6 +315,10 @@ export const routesData: RoutesData = {
setGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "POST", statusOk: 202},
deleteGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "DELETE", statusOk: 204},
+ getBuilderBoostFactor: {url: "/eth/v1/validator/{pubkey}/builder_boost_factor", method: "GET"},
+ setBuilderBoostFactor: {url: "/eth/v1/validator/{pubkey}/builder_boost_factor", method: "POST", statusOk: 202},
+ deleteBuilderBoostFactor: {url: "/eth/v1/validator/{pubkey}/builder_boost_factor", method: "DELETE", statusOk: 204},
+
signVoluntaryExit: {url: "/eth/v1/validator/{pubkey}/voluntary_exit", method: "POST"},
};
@@ -326,6 +355,10 @@ export type ReqTypes = {
setGasLimit: {params: {pubkey: string}; body: {gas_limit: string}};
deleteGasLimit: {params: {pubkey: string}};
+ getBuilderBoostFactor: {params: {pubkey: string}};
+ setBuilderBoostFactor: {params: {pubkey: string}; body: {builder_boost_factor: string}};
+ deleteBuilderBoostFactor: {params: {pubkey: string}};
+
signVoluntaryExit: {params: {pubkey: string}; query: {epoch?: number}};
};
@@ -423,6 +456,33 @@ export function getReqSerializers(): ReqSerializers {
params: {pubkey: Schema.StringRequired},
},
},
+
+ getBuilderBoostFactor: {
+ writeReq: (pubkey) => ({params: {pubkey}}),
+ parseReq: ({params: {pubkey}}) => [pubkey],
+ schema: {
+ params: {pubkey: Schema.StringRequired},
+ },
+ },
+ setBuilderBoostFactor: {
+ writeReq: (pubkey, builderBoostFactor) => ({
+ params: {pubkey},
+ body: {builder_boost_factor: builderBoostFactor.toString(10)},
+ }),
+ parseReq: ({params: {pubkey}, body: {builder_boost_factor}}) => [pubkey, BigInt(builder_boost_factor)],
+ schema: {
+ params: {pubkey: Schema.StringRequired},
+ body: Schema.Object,
+ },
+ },
+ deleteBuilderBoostFactor: {
+ writeReq: (pubkey) => ({params: {pubkey}}),
+ parseReq: ({params: {pubkey}}) => [pubkey],
+ schema: {
+ params: {pubkey: Schema.StringRequired},
+ },
+ },
+
signVoluntaryExit: {
writeReq: (pubkey, epoch) => ({params: {pubkey}, query: epoch !== undefined ? {epoch} : {}}),
parseReq: ({params: {pubkey}, query: {epoch}}) => [pubkey, epoch],
@@ -455,6 +515,15 @@ export function getReturnTypes(): ReturnTypes {
{jsonCase: "eth2"}
)
),
+ getBuilderBoostFactor: ContainerData(
+ new ContainerType(
+ {
+ pubkey: stringType,
+ builderBoostFactor: ssz.UintBn64,
+ },
+ {jsonCase: "eth2"}
+ )
+ ),
signVoluntaryExit: ContainerData(ssz.phase0.SignedVoluntaryExit),
};
}
diff --git a/packages/api/src/utils/client/metrics.ts b/packages/api/src/utils/client/metrics.ts
index c8bc3c0637a4..65089e92e7ec 100644
--- a/packages/api/src/utils/client/metrics.ts
+++ b/packages/api/src/utils/client/metrics.ts
@@ -1,49 +1,9 @@
+import {Gauge, GaugeExtra, Histogram} from "@lodestar/utils";
+
export type Metrics = {
- requestTime: Histogram<"routeId">;
- streamTime: Histogram<"routeId">;
- requestErrors: Gauge<"routeId">;
- requestToFallbacks: Gauge<"routeId">;
- urlsScore: Gauge<"urlIndex">;
+ requestTime: Histogram<{routeId: string}>;
+ streamTime: Histogram<{routeId: string}>;
+ requestErrors: Gauge<{routeId: string}>;
+ requestToFallbacks: Gauge<{routeId: string}>;
+ urlsScore: GaugeExtra<{urlIndex: number}>;
};
-
-type LabelValues = Partial>;
-type CollectFn = (metric: Gauge) => void;
-
-export interface Gauge {
- /**
- * Increment gauge for given labels
- * @param labels Object with label keys and values
- * @param value The value to increment with
- */
- inc(labels: LabelValues, value?: number): void;
-
- /**
- * Increment gauge
- * @param value The value to increment with
- */
- inc(value?: number): void;
-
- /**
- * Set gauge value for labels
- * @param labels Object with label keys and values
- * @param value The value to set
- */
- set(labels: LabelValues, value: number): void;
-
- /**
- * Set gauge value
- * @param value The value to set
- */
- set(value: number): void;
-
- addCollect(collectFn: CollectFn): void;
-}
-
-export interface Histogram {
- /**
- * Start a timer where the value in seconds will observed
- * @param labels Object with label keys and values
- * @return Function to invoke when timer should be stopped
- */
- startTimer(labels?: LabelValues): (labels?: LabelValues) => number;
-}
diff --git a/packages/api/src/utils/routes.ts b/packages/api/src/utils/routes.ts
index 213a561efd58..77d177f7b24c 100644
--- a/packages/api/src/utils/routes.ts
+++ b/packages/api/src/utils/routes.ts
@@ -11,12 +11,14 @@ export function allForksSignedBlockContentsReqSerializer(
return {
toJson: (data) => ({
signed_block: blockSerializer(data.signedBlock).toJson(data.signedBlock),
- signed_blob_sidecars: ssz.deneb.SignedBlobSidecars.toJson(data.signedBlobSidecars),
+ kzg_proofs: ssz.deneb.KZGProofs.toJson(data.kzgProofs),
+ blobs: ssz.deneb.Blobs.toJson(data.blobs),
}),
- fromJson: (data: {signed_block: unknown; signed_blob_sidecars: unknown}) => ({
+ fromJson: (data: {signed_block: unknown; kzg_proofs: unknown; blobs: unknown}) => ({
signedBlock: blockSerializer(data.signed_block as allForks.SignedBeaconBlock).fromJson(data.signed_block),
- signedBlobSidecars: ssz.deneb.SignedBlobSidecars.fromJson(data.signed_blob_sidecars),
+ kzgProofs: ssz.deneb.KZGProofs.fromJson(data.kzg_proofs),
+ blobs: ssz.deneb.Blobs.fromJson(data.blobs),
}),
};
}
@@ -25,44 +27,13 @@ export function allForksBlockContentsResSerializer(fork: ForkBlobs): TypeJson ({
block: (ssz.allForks[fork].BeaconBlock as allForks.AllForksSSZTypes["BeaconBlock"]).toJson(data.block),
- blob_sidecars: ssz.deneb.BlobSidecars.toJson(data.blobSidecars),
+ kzg_proofs: ssz.deneb.KZGProofs.toJson(data.kzgProofs),
+ blobs: ssz.deneb.Blobs.toJson(data.blobs),
}),
- fromJson: (data: {block: unknown; blob_sidecars: unknown}) => ({
+ fromJson: (data: {block: unknown; blob_sidecars: unknown; kzg_proofs: unknown; blobs: unknown}) => ({
block: ssz.allForks[fork].BeaconBlock.fromJson(data.block),
- blobSidecars: ssz.deneb.BlobSidecars.fromJson(data.blob_sidecars),
- }),
- };
-}
-
-export function allForksSignedBlindedBlockContentsReqSerializer(
- blockSerializer: (data: allForks.SignedBlindedBeaconBlock) => TypeJson
-): TypeJson {
- return {
- toJson: (data) => ({
- signed_blinded_block: blockSerializer(data.signedBlindedBlock).toJson(data.signedBlindedBlock),
- signed_blinded_blob_sidecars: ssz.deneb.SignedBlindedBlobSidecars.toJson(data.signedBlindedBlobSidecars),
- }),
-
- fromJson: (data: {signed_blinded_block: unknown; signed_blinded_blob_sidecars: unknown}) => ({
- signedBlindedBlock: blockSerializer(data.signed_blinded_block as allForks.SignedBlindedBeaconBlock).fromJson(
- data.signed_blinded_block
- ),
- signedBlindedBlobSidecars: ssz.deneb.SignedBlindedBlobSidecars.fromJson(data.signed_blinded_blob_sidecars),
- }),
- };
-}
-
-export function allForksBlindedBlockContentsResSerializer(fork: ForkBlobs): TypeJson {
- return {
- toJson: (data) => ({
- blinded_block: (ssz.allForksBlinded[fork].BeaconBlock as allForks.AllForksBlindedSSZTypes["BeaconBlock"]).toJson(
- data.blindedBlock
- ),
- blinded_blob_sidecars: ssz.deneb.BlindedBlobSidecars.toJson(data.blindedBlobSidecars),
- }),
- fromJson: (data: {blinded_block: unknown; blinded_blob_sidecars: unknown}) => ({
- blindedBlock: ssz.allForksBlinded[fork].BeaconBlock.fromJson(data.blinded_block),
- blindedBlobSidecars: ssz.deneb.BlindedBlobSidecars.fromJson(data.blinded_blob_sidecars),
+ kzgProofs: ssz.deneb.KZGProofs.fromJson(data.kzg_proofs),
+ blobs: ssz.deneb.Blobs.fromJson(data.blobs),
}),
};
}
diff --git a/packages/api/test/unit/beacon/oapiSpec.test.ts b/packages/api/test/unit/beacon/oapiSpec.test.ts
index c1abd32cb591..1a300eba6f36 100644
--- a/packages/api/test/unit/beacon/oapiSpec.test.ts
+++ b/packages/api/test/unit/beacon/oapiSpec.test.ts
@@ -6,7 +6,7 @@ import {OpenApiFile} from "../../utils/parseOpenApiSpec.js";
import {routes} from "../../../src/beacon/index.js";
import {ReqSerializers} from "../../../src/utils/types.js";
import {Schema} from "../../../src/utils/schema.js";
-import {runTestCheckAgainstSpec} from "../../utils/checkAgainstSpec.js";
+import {IgnoredProperty, runTestCheckAgainstSpec} from "../../utils/checkAgainstSpec.js";
import {fetchOpenApiSpec} from "../../utils/fetchOpenApiSpec.js";
// Import all testData and merge below
import {testData as beaconTestData} from "./testData/beacon.js";
@@ -23,7 +23,7 @@ import {testData as validatorTestData} from "./testData/validator.js";
// eslint-disable-next-line @typescript-eslint/naming-convention
const __dirname = path.dirname(fileURLToPath(import.meta.url));
-const version = "v2.3.0";
+const version = "v2.4.2";
const openApiFile: OpenApiFile = {
url: `https://github.com/ethereum/beacon-APIs/releases/download/${version}/beacon-node-oapi.json`,
filepath: path.join(__dirname, "../../../oapi-schemas/beacon-node-oapi.json"),
@@ -84,11 +84,105 @@ const testDatas = {
...validatorTestData,
};
+const ignoredOperations = [
+ /* missing route */
+ /* https://github.com/ChainSafe/lodestar/issues/5694 */
+ "getSyncCommitteeRewards",
+ "getBlockRewards",
+ "getAttestationsRewards",
+ "getDepositSnapshot", // Won't fix for now, see https://github.com/ChainSafe/lodestar/issues/5697
+ "getBlindedBlock", // https://github.com/ChainSafe/lodestar/issues/5699
+ "getNextWithdrawals", // https://github.com/ChainSafe/lodestar/issues/5696
+ "getDebugForkChoice", // https://github.com/ChainSafe/lodestar/issues/5700
+ /* https://github.com/ChainSafe/lodestar/issues/6080 */
+ "getLightClientBootstrap",
+ "getLightClientUpdatesByRange",
+ "getLightClientFinalityUpdate",
+ "getLightClientOptimisticUpdate",
+ "getPoolBLSToExecutionChanges",
+ "submitPoolBLSToExecutionChange",
+];
+
+const ignoredProperties: Record = {
+ /*
+ https://github.com/ChainSafe/lodestar/issues/5693
+ missing finalized
+ */
+ getStateRoot: {response: ["finalized"]},
+ getStateFork: {response: ["finalized"]},
+ getStateFinalityCheckpoints: {response: ["finalized"]},
+ getStateValidators: {response: ["finalized"]},
+ getStateValidator: {response: ["finalized"]},
+ getStateValidatorBalances: {response: ["finalized"]},
+ getEpochCommittees: {response: ["finalized"]},
+ getEpochSyncCommittees: {response: ["finalized"]},
+ getStateRandao: {response: ["finalized"]},
+ getBlockHeaders: {response: ["finalized"]},
+ getBlockHeader: {response: ["finalized"]},
+ getBlockV2: {response: ["finalized"]},
+ getBlockRoot: {response: ["finalized"]},
+ getBlockAttestations: {response: ["finalized"]},
+ getStateV2: {response: ["finalized"]},
+
+ /*
+ https://github.com/ChainSafe/lodestar/issues/6168
+ /query/syncing_status - must be integer
+ */
+ getHealth: {request: ["query.syncing_status"]},
+
+ /**
+ * https://github.com/ChainSafe/lodestar/issues/6185
+ * - must have required property 'query'
+ */
+ getBlobSidecars: {request: ["query"]},
+
+ /*
+ https://github.com/ChainSafe/lodestar/issues/4638
+ /query - must have required property 'skip_randao_verification'
+ */
+ produceBlockV2: {request: ["query.skip_randao_verification"]},
+ produceBlindedBlock: {request: ["query.skip_randao_verification"]},
+};
+
const openApiJson = await fetchOpenApiSpec(openApiFile);
-runTestCheckAgainstSpec(openApiJson, routesData, reqSerializers, returnTypes, testDatas, {
- // TODO: Investigate why schema validation fails otherwise
- routesDropOneOf: ["produceBlockV2", "produceBlindedBlock", "publishBlindedBlock"],
-});
+runTestCheckAgainstSpec(
+ openApiJson,
+ routesData,
+ reqSerializers,
+ returnTypes,
+ testDatas,
+ {
+ // TODO: Investigate why schema validation fails otherwise (see https://github.com/ChainSafe/lodestar/issues/6187)
+ routesDropOneOf: [
+ "produceBlockV2",
+ "produceBlockV3",
+ "produceBlindedBlock",
+ "publishBlindedBlock",
+ "publishBlindedBlockV2",
+ ],
+ },
+ ignoredOperations,
+ ignoredProperties
+);
+
+const ignoredTopics = [
+ /*
+ https://github.com/ChainSafe/lodestar/issues/6167
+ eventTestData[bls_to_execution_change] does not match spec's example
+ */
+ "bls_to_execution_change",
+ /*
+ https://github.com/ChainSafe/lodestar/issues/6170
+ Error: Invalid slot=0 fork=phase0 for lightclient fork types
+ */
+ "light_client_finality_update",
+ "light_client_optimistic_update",
+ /*
+ https://github.com/ethereum/beacon-APIs/pull/379
+ SyntaxError: Unexpected non-whitespace character after JSON at position 629 (line 1 column 630)
+ */
+ "payload_attributes",
+];
// eventstream types are defined as comments in the description of "examples".
// The function runTestCheckAgainstSpec() can't handle those, so the custom code before:
@@ -113,7 +207,9 @@ describe("eventstream event data", () => {
const eventSerdes = routes.events.getEventSerdes(config);
const knownTopics = new Set(Object.values(routes.events.eventTypes));
- for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {})) {
+ for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {}).filter(
+ ([topic]) => !ignoredTopics.includes(topic)
+ )) {
it(topic, () => {
if (!knownTopics.has(topic)) {
throw Error(`topic ${topic} not implemented`);
@@ -130,7 +226,6 @@ describe("eventstream event data", () => {
if (testEvent == null) {
throw Error(`No eventTestData for ${topic}`);
}
-
const testEventJson = eventSerdes.toJson({
type: topic as routes.events.EventType,
message: testEvent,
diff --git a/packages/api/test/unit/beacon/testData/validator.ts b/packages/api/test/unit/beacon/testData/validator.ts
index b827bad0be90..2688f2080eba 100644
--- a/packages/api/test/unit/beacon/testData/validator.ts
+++ b/packages/api/test/unit/beacon/testData/validator.ts
@@ -1,5 +1,5 @@
import {ForkName} from "@lodestar/params";
-import {ssz} from "@lodestar/types";
+import {ssz, ProducedBlockSource} from "@lodestar/types";
import {Api} from "../../../../src/beacon/routes/validator.js";
import {GenericServerTestCases} from "../../../utils/genericServerTest.js";
@@ -50,7 +50,13 @@ export const testData: GenericServerTestCases = {
randaoReveal,
graffiti,
undefined,
- {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined},
+ {
+ feeRecipient,
+ builderSelection: undefined,
+ strictFeeRecipientCheck: undefined,
+ blindedLocal: undefined,
+ builderBoostFactor: 100n,
+ },
] as unknown as GenericServerTestCases["produceBlock"]["args"],
res: {data: ssz.phase0.BeaconBlock.defaultValue()},
},
@@ -60,7 +66,13 @@ export const testData: GenericServerTestCases = {
randaoReveal,
graffiti,
undefined,
- {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined},
+ {
+ feeRecipient,
+ builderSelection: undefined,
+ strictFeeRecipientCheck: undefined,
+ blindedLocal: undefined,
+ builderBoostFactor: 100n,
+ },
] as unknown as GenericServerTestCases["produceBlockV2"]["args"],
res: {
data: ssz.altair.BeaconBlock.defaultValue(),
@@ -75,7 +87,13 @@ export const testData: GenericServerTestCases = {
randaoReveal,
graffiti,
true,
- {feeRecipient, builderSelection: undefined, strictFeeRecipientCheck: undefined},
+ {
+ feeRecipient,
+ builderSelection: undefined,
+ strictFeeRecipientCheck: undefined,
+ blindedLocal: undefined,
+ builderBoostFactor: 100n,
+ },
],
res: {
data: ssz.altair.BeaconBlock.defaultValue(),
@@ -83,6 +101,7 @@ export const testData: GenericServerTestCases = {
executionPayloadValue: ssz.Wei.defaultValue(),
consensusBlockValue: ssz.Gwei.defaultValue(),
executionPayloadBlinded: false,
+ executionPayloadSource: ProducedBlockSource.engine,
},
},
produceBlindedBlock: {
@@ -91,7 +110,13 @@ export const testData: GenericServerTestCases = {
randaoReveal,
graffiti,
undefined,
- {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined},
+ {
+ feeRecipient,
+ builderSelection: undefined,
+ strictFeeRecipientCheck: undefined,
+ blindedLocal: undefined,
+ builderBoostFactor: 100n,
+ },
] as unknown as GenericServerTestCases["produceBlindedBlock"]["args"],
res: {
data: ssz.bellatrix.BlindedBeaconBlock.defaultValue(),
diff --git a/packages/api/test/unit/keymanager/testData.ts b/packages/api/test/unit/keymanager/testData.ts
index a4fc72fc8e2d..2c66610c8733 100644
--- a/packages/api/test/unit/keymanager/testData.ts
+++ b/packages/api/test/unit/keymanager/testData.ts
@@ -13,6 +13,7 @@ const pubkeyRand = "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff
const ethaddressRand = "0xabcf8e0d4e9587369b2301d0790347320302cc09";
const graffitiRandUtf8 = "636861696e736166652f6c6f64657374";
const gasLimitRand = 30_000_000;
+const builderBoostFactorRand = BigInt(100);
export const testData: GenericServerTestCases = {
listKeys: {
@@ -99,4 +100,16 @@ export const testData: GenericServerTestCases = {
args: [pubkeyRand, 1],
res: {data: ssz.phase0.SignedVoluntaryExit.defaultValue()},
},
+ getBuilderBoostFactor: {
+ args: [pubkeyRand],
+ res: {data: {pubkey: pubkeyRand, builderBoostFactor: builderBoostFactorRand}},
+ },
+ setBuilderBoostFactor: {
+ args: [pubkeyRand, builderBoostFactorRand],
+ res: undefined,
+ },
+ deleteBuilderBoostFactor: {
+ args: [pubkeyRand],
+ res: undefined,
+ },
};
diff --git a/packages/api/test/utils/checkAgainstSpec.ts b/packages/api/test/utils/checkAgainstSpec.ts
index 01e7df255db2..ed65279bca22 100644
--- a/packages/api/test/utils/checkAgainstSpec.ts
+++ b/packages/api/test/utils/checkAgainstSpec.ts
@@ -1,16 +1,16 @@
import Ajv, {ErrorObject} from "ajv";
import {expect, describe, beforeAll, it} from "vitest";
import {ReqGeneric, ReqSerializer, ReturnTypes, RouteDef} from "../../src/utils/types.js";
-import {applyRecursively, OpenApiJson, parseOpenApiSpec, ParseOpenApiSpecOpts} from "./parseOpenApiSpec.js";
+import {applyRecursively, JsonSchema, OpenApiJson, parseOpenApiSpec, ParseOpenApiSpecOpts} from "./parseOpenApiSpec.js";
import {GenericServerTestCases} from "./genericServerTest.js";
const ajv = new Ajv({
- // strict: true,
- // strictSchema: true,
+ strict: true,
+ strictTypes: false, // TODO Enable once beacon-APIs is fixed. See https://github.com/ChainSafe/lodestar/issues/6206
allErrors: true,
});
-// TODO: Still necessary?
+// Ensure embedded schema 'example' do not fail validation
ajv.addKeyword({
keyword: "example",
validate: () => true,
@@ -19,17 +19,69 @@ ajv.addKeyword({
ajv.addFormat("hex", /^0x[a-fA-F0-9]+$/);
+/**
+ * A set of properties that will be ignored during tests execution.
+ * This allows for a black-list mechanism to have a test pass while some part of the spec is not yet implemented.
+ *
+ * Properties can be nested using dot notation, following JSONPath semantic.
+ *
+ * Example:
+ * - query
+ * - query.skip_randao_verification
+ */
+export type IgnoredProperty = {
+ /**
+ * Properties to ignore in the request schema
+ */
+ request?: string[];
+ /**
+ * Properties to ignore in the response schema
+ */
+ response?: string[];
+};
+
+/**
+ * Recursively remove a property from a schema
+ *
+ * @param schema Schema to remove a property from
+ * @param property JSONPath like property to remove from the schema
+ */
+function deleteNested(schema: JsonSchema | undefined, property: string): void {
+ const properties = schema?.properties;
+ if (property.includes(".")) {
+ // Extract first segment, keep the rest as dotted
+ const [key, ...rest] = property.split(".");
+ deleteNested(properties?.[key], rest.join("."));
+ } else {
+ // Remove property from 'required'
+ if (schema?.required) {
+ schema.required = schema.required?.filter((e) => property !== e);
+ }
+ // Remove property from 'properties'
+ delete properties?.[property];
+ }
+}
+
export function runTestCheckAgainstSpec(
openApiJson: OpenApiJson,
routesData: Record,
reqSerializers: Record>,
returnTypes: Record[string]>,
testDatas: Record[string]>,
- opts?: ParseOpenApiSpecOpts
+ opts?: ParseOpenApiSpecOpts,
+ ignoredOperations: string[] = [],
+ ignoredProperties: Record = {}
): void {
const openApiSpec = parseOpenApiSpec(openApiJson, opts);
for (const [operationId, routeSpec] of openApiSpec.entries()) {
+ const isIgnored = ignoredOperations.some((id) => id === operationId);
+ if (isIgnored) {
+ continue;
+ }
+
+ const ignoredProperty = ignoredProperties[operationId];
+
describe(operationId, () => {
const {requestSchema, responseOkSchema} = routeSpec;
const routeId = operationId;
@@ -68,7 +120,15 @@ export function runTestCheckAgainstSpec(
stringifyProperties((reqJson as ReqGeneric).params ?? {});
stringifyProperties((reqJson as ReqGeneric).query ?? {});
- // Validate response
+ const ignoredProperties = ignoredProperty?.request;
+ if (ignoredProperties) {
+ // Remove ignored properties from schema validation
+ for (const property of ignoredProperties) {
+ deleteNested(routeSpec.requestSchema, property);
+ }
+ }
+
+ // Validate request
validateSchema(routeSpec.requestSchema, reqJson, "request");
});
}
@@ -87,6 +147,13 @@ export function runTestCheckAgainstSpec(
}
}
+ const ignoredProperties = ignoredProperty?.response;
+ if (ignoredProperties) {
+ // Remove ignored properties from schema validation
+ for (const property of ignoredProperties) {
+ deleteNested(routeSpec.responseOkSchema, property);
+ }
+ }
// Validate response
validateSchema(responseOkSchema, resJson, "response");
});
diff --git a/packages/api/test/utils/parseOpenApiSpec.ts b/packages/api/test/utils/parseOpenApiSpec.ts
index 5faf0082012d..84b024e5950e 100644
--- a/packages/api/test/utils/parseOpenApiSpec.ts
+++ b/packages/api/test/utils/parseOpenApiSpec.ts
@@ -11,7 +11,7 @@ type RouteUrl = string;
/** "get" | "post" */
type HttpMethod = string;
-type JsonSchema = {
+export type JsonSchema = {
type: "object";
properties?: Record;
required?: string[];
diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json
index b29169777549..68ff28c7d9e2 100644
--- a/packages/beacon-node/package.json
+++ b/packages/beacon-node/package.json
@@ -11,7 +11,7 @@
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
- "version": "1.13.0",
+ "version": "1.14.0",
"type": "module",
"exports": {
".": {
@@ -79,8 +79,8 @@
"test": "yarn test:unit && yarn test:e2e",
"test:unit:minimal": "vitest --run --segfaultRetry 3 --dir test/unit/ --coverage",
"test:unit:mainnet": "LODESTAR_PRESET=mainnet nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit-mainnet/**/*.test.ts'",
- "test:unit": "yarn test:unit:minimal && yarn test:unit:mainnet",
- "test:e2e": "LODESTAR_PRESET=minimal vitest --run --segfaultRetry 3 --poolOptions.threads.singleThread --dir test/e2e",
+ "test:unit": "wrapper() { yarn test:unit:minimal $@ && yarn test:unit:mainnet $@; }; wrapper",
+ "test:e2e": "LODESTAR_PRESET=minimal vitest --run --segfaultRetry 3 --poolOptions.threads.singleThread true --dir test/e2e",
"test:sim": "mocha 'test/sim/**/*.test.ts'",
"test:sim:merge-interop": "mocha 'test/sim/merge-interop.test.ts'",
"test:sim:mergemock": "mocha 'test/sim/mergemock.test.ts'",
@@ -119,18 +119,18 @@
"@libp2p/peer-id-factory": "^3.0.4",
"@libp2p/prometheus-metrics": "^2.0.7",
"@libp2p/tcp": "8.0.8",
- "@lodestar/api": "^1.13.0",
- "@lodestar/config": "^1.13.0",
- "@lodestar/db": "^1.13.0",
- "@lodestar/fork-choice": "^1.13.0",
- "@lodestar/light-client": "^1.13.0",
- "@lodestar/logger": "^1.13.0",
- "@lodestar/params": "^1.13.0",
- "@lodestar/reqresp": "^1.13.0",
- "@lodestar/state-transition": "^1.13.0",
- "@lodestar/types": "^1.13.0",
- "@lodestar/utils": "^1.13.0",
- "@lodestar/validator": "^1.13.0",
+ "@lodestar/api": "^1.14.0",
+ "@lodestar/config": "^1.14.0",
+ "@lodestar/db": "^1.14.0",
+ "@lodestar/fork-choice": "^1.14.0",
+ "@lodestar/light-client": "^1.14.0",
+ "@lodestar/logger": "^1.14.0",
+ "@lodestar/params": "^1.14.0",
+ "@lodestar/reqresp": "^1.14.0",
+ "@lodestar/state-transition": "^1.14.0",
+ "@lodestar/types": "^1.14.0",
+ "@lodestar/utils": "^1.14.0",
+ "@lodestar/validator": "^1.14.0",
"@multiformats/multiaddr": "^12.1.3",
"@types/datastore-level": "^3.0.0",
"buffer-xor": "^2.0.2",
@@ -145,7 +145,7 @@
"jwt-simple": "0.5.6",
"libp2p": "0.46.12",
"multiformats": "^11.0.1",
- "prom-client": "^14.2.0",
+ "prom-client": "^15.1.0",
"qs": "^6.11.1",
"snappyjs": "^0.7.0",
"strict-event-emitter-types": "^2.0.0",
diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts
index c54e040ceb06..89565426426e 100644
--- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts
+++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts
@@ -1,17 +1,13 @@
import {fromHexString, toHexString} from "@chainsafe/ssz";
import {routes, ServerApi, ResponseFormat} from "@lodestar/api";
-import {
- computeTimeAtSlot,
- parseSignedBlindedBlockOrContents,
- reconstructFullBlockOrContents,
- DataAvailableStatus,
-} from "@lodestar/state-transition";
+import {computeTimeAtSlot, reconstructFullBlockOrContents} from "@lodestar/state-transition";
import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params";
import {sleep, toHex} from "@lodestar/utils";
import {allForks, deneb, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types";
import {BlockSource, getBlockInput, ImportBlockOpts, BlockInput} from "../../../../chain/blocks/types.js";
import {promiseAllMaybeAsync} from "../../../../util/promises.js";
import {isOptimisticBlock} from "../../../../util/forkChoice.js";
+import {computeBlobSidecars} from "../../../../util/blobs.js";
import {BlockError, BlockErrorCode} from "../../../../chain/errors/index.js";
import {OpSource} from "../../../../metrics/validatorMonitor.js";
import {NetworkEvent} from "../../../../network/index.js";
@@ -46,22 +42,23 @@ export function getBeaconBlockApi({
opts: PublishBlockOpts = {}
) => {
const seenTimestampSec = Date.now() / 1000;
- let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, signedBlobs: deneb.SignedBlobSidecars;
+ let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, blobSidecars: deneb.BlobSidecars;
if (isSignedBlockContents(signedBlockOrContents)) {
- ({signedBlock, signedBlobSidecars: signedBlobs} = signedBlockOrContents);
+ ({signedBlock} = signedBlockOrContents);
+ blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents);
blockForImport = getBlockInput.postDeneb(
config,
signedBlock,
BlockSource.api,
- signedBlobs.map((sblob) => sblob.message),
+ blobSidecars,
// don't bundle any bytes for block and blobs
null,
- signedBlobs.map(() => null)
+ blobSidecars.map(() => null)
);
} else {
signedBlock = signedBlockOrContents;
- signedBlobs = [];
+ blobSidecars = [];
// TODO: Once API supports submitting data as SSZ, replace null with blockBytes
blockForImport = getBlockInput.preDeneb(config, signedBlock, BlockSource.api, null);
}
@@ -121,19 +118,13 @@ export function getBeaconBlockApi({
}
try {
- await verifyBlocksInEpoch.call(
- chain as BeaconChain,
- parentBlock,
- [blockForImport],
- [DataAvailableStatus.available],
- {
- ...opts,
- verifyOnly: true,
- skipVerifyBlockSignatures: true,
- skipVerifyExecutionPayload: true,
- seenTimestampSec,
- }
- );
+ await verifyBlocksInEpoch.call(chain as BeaconChain, parentBlock, [blockForImport], {
+ ...opts,
+ verifyOnly: true,
+ skipVerifyBlockSignatures: true,
+ skipVerifyExecutionPayload: true,
+ seenTimestampSec,
+ });
} catch (error) {
chain.logger.error("Consensus checks failed while publishing the block", valLogMeta, error as Error);
chain.persistInvalidSszValue(
@@ -195,18 +186,15 @@ export function getBeaconBlockApi({
}
throw e;
}),
- ...signedBlobs.map((signedBlob) => () => network.publishBlobSidecar(signedBlob)),
+ ...blobSidecars.map((blobSidecar) => () => network.publishBlobSidecar(blobSidecar)),
];
await promiseAllMaybeAsync(publishPromises);
};
const publishBlindedBlock: ServerApi["publishBlindedBlock"] = async (
- signedBlindedBlockOrContents,
+ signedBlindedBlock,
opts: PublishBlockOpts = {}
) => {
- const {signedBlindedBlock, signedBlindedBlobSidecars} =
- parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents);
-
const slot = signedBlindedBlock.message.slot;
const blockRoot = toHex(
chain.config
@@ -217,27 +205,31 @@ export function getBeaconBlockApi({
// Either the payload/blobs are cached from i) engine locally or ii) they are from the builder
//
// executionPayload can be null or a real payload in locally produced so check for presence of root
- const source = chain.producedBlockRoot.has(blockRoot) ? ProducedBlockSource.engine : ProducedBlockSource.builder;
-
- const executionPayload = chain.producedBlockRoot.get(blockRoot) ?? null;
- const blobSidecars = executionPayload
- ? chain.producedBlobSidecarsCache.get(toHex(executionPayload.blockHash))
- : undefined;
- const blobs = blobSidecars ? blobSidecars.map((blobSidecar) => blobSidecar.blob) : null;
-
- chain.logger.debug("Assembling blinded block for publishing", {source, blockRoot, slot});
+ const executionPayload = chain.producedBlockRoot.get(blockRoot);
+ if (executionPayload !== undefined) {
+ const source = ProducedBlockSource.engine;
+ chain.logger.debug("Reconstructing signedBlockOrContents", {blockRoot, slot, source});
+
+ const contents = executionPayload
+ ? chain.producedContentsCache.get(toHex(executionPayload.blockHash)) ?? null
+ : null;
+ const signedBlockOrContents = reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents});
+
+ chain.logger.info("Publishing assembled block", {blockRoot, slot, source});
+ return publishBlock(signedBlockOrContents, opts);
+ } else {
+ const source = ProducedBlockSource.builder;
+ chain.logger.debug("Reconstructing signedBlockOrContents", {blockRoot, slot, source});
- const signedBlockOrContents =
- source === ProducedBlockSource.engine
- ? reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs})
- : await reconstructBuilderBlockOrContents(chain, signedBlindedBlockOrContents);
+ const signedBlockOrContents = await reconstructBuilderBlockOrContents(chain, signedBlindedBlock);
- // the full block is published by relay and it's possible that the block is already known to us
- // by gossip
- //
- // see: https://github.com/ChainSafe/lodestar/issues/5404
- chain.logger.info("Publishing assembled block", {blockRoot, slot, source});
- return publishBlock(signedBlockOrContents, {...opts, ignoreIfKnown: true});
+ // the full block is published by relay and it's possible that the block is already known to us
+ // by gossip
+ //
+ // see: https://github.com/ChainSafe/lodestar/issues/5404
+ chain.logger.info("Publishing assembled block", {blockRoot, slot, source});
+ return publishBlock(signedBlockOrContents, {...opts, ignoreIfKnown: true});
+ }
};
return {
@@ -431,13 +423,13 @@ export function getBeaconBlockApi({
async function reconstructBuilderBlockOrContents(
chain: ApiModules["chain"],
- signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents
+ signedBlindedBlock: allForks.SignedBlindedBeaconBlock
): Promise {
const executionBuilder = chain.executionBuilder;
if (!executionBuilder) {
throw Error("exeutionBuilder required to publish SignedBlindedBeaconBlock");
}
- const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlockOrContents);
+ const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlock);
return signedBlockOrContents;
}
diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts
index 8f92fa483908..839c1df7c463 100644
--- a/packages/beacon-node/src/api/impl/validator/index.ts
+++ b/packages/beacon-node/src/api/impl/validator/index.ts
@@ -9,7 +9,6 @@ import {
computeEpochAtSlot,
getCurrentSlot,
beaconBlockToBlinded,
- blobSidecarsToBlinded,
} from "@lodestar/state-transition";
import {
GENESIS_SLOT,
@@ -20,6 +19,7 @@ import {
isForkExecution,
ForkSeq,
} from "@lodestar/params";
+import {MAX_BUILDER_BOOST_FACTOR} from "@lodestar/validator";
import {
Root,
Slot,
@@ -31,11 +31,11 @@ import {
allForks,
BLSSignature,
isBlindedBeaconBlock,
- isBlindedBlockContents,
+ isBlockContents,
phase0,
} from "@lodestar/types";
import {ExecutionStatus} from "@lodestar/fork-choice";
-import {toHex, racePromisesWithCutoff, RaceEvent} from "@lodestar/utils";
+import {toHex, racePromisesWithCutoff, RaceEvent, gweiToWei} from "@lodestar/utils";
import {AttestationError, AttestationErrorCode, GossipAction, SyncCommitteeError} from "../../../chain/errors/index.js";
import {validateApiAggregateAndProof} from "../../../chain/validation/index.js";
import {ZERO_HASH} from "../../../constants/index.js";
@@ -280,7 +280,7 @@ export function getValidatorApi({
);
}
- const produceBlindedBlockOrContents = async function produceBlindedBlockOrContents(
+ const produceBuilderBlindedBlock = async function produceBuilderBlindedBlock(
slot: Slot,
randaoReveal: BLSSignature,
graffiti: string,
@@ -288,7 +288,12 @@ export function getValidatorApi({
{
skipHeadChecksAndUpdate,
}: Omit & {skipHeadChecksAndUpdate?: boolean} = {}
- ): Promise {
+ ): Promise {
+ const version = config.getForkName(slot);
+ if (!isForkExecution(version)) {
+ throw Error(`Invalid fork=${version} for produceBuilderBlindedBlock`);
+ }
+
const source = ProducedBlockSource.builder;
metrics?.blockProductionRequests.inc({source});
@@ -329,31 +334,17 @@ export function getValidatorApi({
root: toHexString(config.getBlindedForkTypes(slot).BeaconBlock.hashTreeRoot(block)),
});
- const version = config.getForkName(block.slot);
if (chain.opts.persistProducedBlocks) {
void chain.persistBlock(block, "produced_builder_block");
}
- if (isForkBlobs(version)) {
- const blockHash = toHex((block as bellatrix.BlindedBeaconBlock).body.executionPayloadHeader.blockHash);
- const blindedBlobSidecars = chain.producedBlindedBlobSidecarsCache.get(blockHash);
- if (blindedBlobSidecars === undefined) {
- throw Error("blobSidecars missing in cache");
- }
- return {
- data: {blindedBlock: block, blindedBlobSidecars} as allForks.BlindedBlockContents,
- version,
- executionPayloadValue,
- consensusBlockValue,
- };
- } else {
- return {data: block, version, executionPayloadValue, consensusBlockValue};
- }
+
+ return {data: block, version, executionPayloadValue, consensusBlockValue};
} finally {
if (timer) timer({source});
}
};
- const produceFullBlockOrContents = async function produceFullBlockOrContents(
+ const produceEngineFullBlockOrContents = async function produceEngineFullBlockOrContents(
slot: Slot,
randaoReveal: BLSSignature,
graffiti: string,
@@ -362,7 +353,7 @@ export function getValidatorApi({
strictFeeRecipientCheck,
skipHeadChecksAndUpdate,
}: Omit & {skipHeadChecksAndUpdate?: boolean} = {}
- ): Promise {
+ ): Promise {
const source = ProducedBlockSource.engine;
metrics?.blockProductionRequests.inc({source});
@@ -380,7 +371,7 @@ export function getValidatorApi({
let timer;
try {
timer = metrics?.blockProductionTime.startTimer();
- const {block, executionPayloadValue, consensusBlockValue} = await chain.produceBlock({
+ const {block, executionPayloadValue, consensusBlockValue, shouldOverrideBuilder} = await chain.produceBlock({
slot,
randaoReveal,
graffiti: toGraffitiBuffer(graffiti || ""),
@@ -407,218 +398,269 @@ export function getValidatorApi({
}
if (isForkBlobs(version)) {
const blockHash = toHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash);
- const blobSidecars = chain.producedBlobSidecarsCache.get(blockHash);
- if (blobSidecars === undefined) {
- throw Error("blobSidecars missing in cache");
+ const contents = chain.producedContentsCache.get(blockHash);
+ if (contents === undefined) {
+ throw Error("contents missing in cache");
}
+
return {
- data: {block, blobSidecars} as allForks.BlockContents,
+ data: {block, ...contents} as allForks.BlockContents,
version,
executionPayloadValue,
consensusBlockValue,
+ shouldOverrideBuilder,
};
} else {
- return {data: block, version, executionPayloadValue, consensusBlockValue};
+ return {data: block, version, executionPayloadValue, consensusBlockValue, shouldOverrideBuilder};
}
} finally {
if (timer) timer({source});
}
};
- const produceBlockV3: ServerApi["produceBlockV3"] = async function produceBlockV3(
- slot,
- randaoReveal,
- graffiti,
- // TODO deneb: skip randao verification
- _skipRandaoVerification?: boolean,
- {feeRecipient, builderSelection, strictFeeRecipientCheck}: routes.validator.ExtraProduceBlockOps = {}
- ) {
- notWhileSyncing();
- await waitForSlot(slot); // Must never request for a future slot > currentSlot
-
- // Process the queued attestations in the forkchoice for correct head estimation
- // forkChoice.updateTime() might have already been called by the onSlot clock
- // handler, in which case this should just return.
- chain.forkChoice.updateTime(slot);
- chain.recomputeForkChoiceHead();
-
- const fork = config.getForkName(slot);
- // set some sensible opts
- builderSelection = builderSelection ?? routes.validator.BuilderSelection.MaxProfit;
- const isBuilderEnabled =
- ForkSeq[fork] >= ForkSeq.bellatrix &&
- chain.executionBuilder !== undefined &&
- builderSelection !== routes.validator.BuilderSelection.ExecutionOnly;
-
- logger.verbose("Assembling block with produceBlockV3 ", {
- fork,
- builderSelection,
+ const produceEngineOrBuilderBlock: ServerApi["produceBlockV3"] =
+ async function produceEngineOrBuilderBlock(
slot,
- isBuilderEnabled,
- strictFeeRecipientCheck,
- });
- // Start calls for building execution and builder blocks
- const blindedBlockPromise = isBuilderEnabled
- ? // can't do fee recipient checks as builder bid doesn't return feeRecipient as of now
- produceBlindedBlockOrContents(slot, randaoReveal, graffiti, {
- feeRecipient,
- // skip checking and recomputing head in these individual produce calls
- skipHeadChecksAndUpdate: true,
- }).catch((e) => {
- logger.error("produceBlindedBlockOrContents failed to produce block", {slot}, e);
- return null;
- })
- : null;
+ randaoReveal,
+ graffiti,
+ // TODO deneb: skip randao verification
+ _skipRandaoVerification?: boolean,
+ {
+ feeRecipient,
+ builderSelection,
+ builderBoostFactor,
+ strictFeeRecipientCheck,
+ }: routes.validator.ExtraProduceBlockOps = {}
+ ) {
+ notWhileSyncing();
+ await waitForSlot(slot); // Must never request for a future slot > currentSlot
- const fullBlockPromise =
- // At any point either the builder or execution or both flows should be active.
- //
- // Ideally such a scenario should be prevented on startup, but proposerSettingsFile or keymanager
- // configurations could cause a validator pubkey to have builder disabled with builder selection builder only
- // (TODO: independently make sure such an options update is not successful for a validator pubkey)
- //
- // So if builder is disabled ignore builder selection of builderonly if caused by user mistake
- !isBuilderEnabled || builderSelection !== routes.validator.BuilderSelection.BuilderOnly
- ? // TODO deneb: builderSelection needs to be figured out if to be done beacon side
- // || builderSelection !== BuilderSelection.BuilderOnly
- produceFullBlockOrContents(slot, randaoReveal, graffiti, {
+ // Process the queued attestations in the forkchoice for correct head estimation
+ // forkChoice.updateTime() might have already been called by the onSlot clock
+ // handler, in which case this should just return.
+ chain.forkChoice.updateTime(slot);
+ chain.recomputeForkChoiceHead();
+
+ const fork = config.getForkName(slot);
+ // set some sensible opts
+ // builderSelection will be deprecated and will run in mode MaxProfit if builder is enabled
+ // and the actual selection will be determined using builderBoostFactor passed by the validator
+ builderSelection = builderSelection ?? routes.validator.BuilderSelection.MaxProfit;
+ builderBoostFactor = builderBoostFactor ?? BigInt(100);
+ if (builderBoostFactor > MAX_BUILDER_BOOST_FACTOR) {
+ throw new ApiError(400, `Invalid builderBoostFactor=${builderBoostFactor} > MAX_BUILDER_BOOST_FACTOR`);
+ }
+
+ const isBuilderEnabled =
+ ForkSeq[fork] >= ForkSeq.bellatrix &&
+ chain.executionBuilder !== undefined &&
+ builderSelection !== routes.validator.BuilderSelection.ExecutionOnly;
+
+ logger.verbose("Assembling block with produceEngineOrBuilderBlock ", {
+ fork,
+ builderSelection,
+ slot,
+ isBuilderEnabled,
+ strictFeeRecipientCheck,
+ // winston logger doesn't like bigint
+ builderBoostFactor: `${builderBoostFactor}`,
+ });
+ // Start calls for building execution and builder blocks
+ const blindedBlockPromise = isBuilderEnabled
+ ? // can't do fee recipient checks as builder bid doesn't return feeRecipient as of now
+ produceBuilderBlindedBlock(slot, randaoReveal, graffiti, {
feeRecipient,
- strictFeeRecipientCheck,
// skip checking and recomputing head in these individual produce calls
skipHeadChecksAndUpdate: true,
}).catch((e) => {
- logger.error("produceFullBlockOrContents failed to produce block", {slot}, e);
+ logger.error("produceBuilderBlindedBlock failed to produce block", {slot}, e);
return null;
})
: null;
- let blindedBlock, fullBlock;
- if (blindedBlockPromise !== null && fullBlockPromise !== null) {
- // reference index of promises in the race
- const promisesOrder = [ProducedBlockSource.builder, ProducedBlockSource.engine];
- [blindedBlock, fullBlock] = await racePromisesWithCutoff<
- routes.validator.ProduceBlockOrContentsRes | routes.validator.ProduceBlindedBlockOrContentsRes | null
- >(
- [blindedBlockPromise, fullBlockPromise],
- BLOCK_PRODUCTION_RACE_CUTOFF_MS,
- BLOCK_PRODUCTION_RACE_TIMEOUT_MS,
- // Callback to log the race events for better debugging capability
- (event: RaceEvent, delayMs: number, index?: number) => {
- const eventRef = index !== undefined ? {source: promisesOrder[index]} : {};
- logger.verbose("Block production race (builder vs execution)", {
- event,
- ...eventRef,
- delayMs,
- cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS,
- timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS,
- slot,
- });
+ const fullBlockPromise =
+ // At any point either the builder or execution or both flows should be active.
+ //
+ // Ideally such a scenario should be prevented on startup, but proposerSettingsFile or keymanager
+ // configurations could cause a validator pubkey to have builder disabled with builder selection builder only
+ // (TODO: independently make sure such an options update is not successful for a validator pubkey)
+ //
+ // So if builder is disabled ignore builder selection of builderonly if caused by user mistake
+ !isBuilderEnabled || builderSelection !== routes.validator.BuilderSelection.BuilderOnly
+ ? // TODO deneb: builderSelection needs to be figured out if to be done beacon side
+ // || builderSelection !== BuilderSelection.BuilderOnly
+ produceEngineFullBlockOrContents(slot, randaoReveal, graffiti, {
+ feeRecipient,
+ strictFeeRecipientCheck,
+ // skip checking and recomputing head in these individual produce calls
+ skipHeadChecksAndUpdate: true,
+ }).catch((e) => {
+ logger.error("produceEngineFullBlockOrContents failed to produce block", {slot}, e);
+ return null;
+ })
+ : null;
+
+ let blindedBlock, fullBlock;
+ if (blindedBlockPromise !== null && fullBlockPromise !== null) {
+ // reference index of promises in the race
+ const promisesOrder = [ProducedBlockSource.builder, ProducedBlockSource.engine];
+ [blindedBlock, fullBlock] = await racePromisesWithCutoff<
+ | ((routes.validator.ProduceBlockOrContentsRes | routes.validator.ProduceBlindedBlockRes) & {
+ shouldOverrideBuilder?: boolean;
+ })
+ | null
+ >(
+ [blindedBlockPromise, fullBlockPromise],
+ BLOCK_PRODUCTION_RACE_CUTOFF_MS,
+ BLOCK_PRODUCTION_RACE_TIMEOUT_MS,
+ // Callback to log the race events for better debugging capability
+ (event: RaceEvent, delayMs: number, index?: number) => {
+ const eventRef = index !== undefined ? {source: promisesOrder[index]} : {};
+ logger.verbose("Block production race (builder vs execution)", {
+ event,
+ ...eventRef,
+ delayMs,
+ cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS,
+ timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS,
+ slot,
+ });
+ }
+ );
+ if (blindedBlock instanceof Error) {
+ // error here means race cutoff exceeded
+ logger.error("Failed to produce builder block", {slot}, blindedBlock);
+ blindedBlock = null;
}
- );
- if (blindedBlock instanceof Error) {
- // error here means race cutoff exceeded
- logger.error("Failed to produce builder block", {slot}, blindedBlock);
- blindedBlock = null;
- }
- if (fullBlock instanceof Error) {
- logger.error("Failed to produce execution block", {slot}, fullBlock);
+ if (fullBlock instanceof Error) {
+ logger.error("Failed to produce execution block", {slot}, fullBlock);
+ fullBlock = null;
+ }
+ } else if (blindedBlockPromise !== null && fullBlockPromise === null) {
+ blindedBlock = await blindedBlockPromise;
fullBlock = null;
+ } else if (blindedBlockPromise === null && fullBlockPromise !== null) {
+ blindedBlock = null;
+ fullBlock = await fullBlockPromise;
+ } else {
+ throw Error(
+ `Internal Error: Neither builder nor execution proposal flow activated isBuilderEnabled=${isBuilderEnabled} builderSelection=${builderSelection}`
+ );
}
- } else if (blindedBlockPromise !== null && fullBlockPromise === null) {
- blindedBlock = await blindedBlockPromise;
- fullBlock = null;
- } else if (blindedBlockPromise === null && fullBlockPromise !== null) {
- blindedBlock = null;
- fullBlock = await fullBlockPromise;
- } else {
- throw Error(
- `Internal Error: Neither builder nor execution proposal flow activated isBuilderEnabled=${isBuilderEnabled} builderSelection=${builderSelection}`
- );
- }
-
- const builderPayloadValue = blindedBlock?.executionPayloadValue ?? BigInt(0);
- const enginePayloadValue = fullBlock?.executionPayloadValue ?? BigInt(0);
- const consensusBlockValueBuilder = blindedBlock?.consensusBlockValue ?? BigInt(0);
- const consensusBlockValueEngine = fullBlock?.consensusBlockValue ?? BigInt(0);
- const blockValueBuilder = builderPayloadValue + consensusBlockValueBuilder;
- const blockValueEngine = enginePayloadValue + consensusBlockValueEngine;
-
- let selectedSource: ProducedBlockSource | null = null;
-
- if (fullBlock && blindedBlock) {
- switch (builderSelection) {
- case routes.validator.BuilderSelection.MaxProfit: {
- if (blockValueEngine >= blockValueBuilder) {
- selectedSource = ProducedBlockSource.engine;
- } else {
- selectedSource = ProducedBlockSource.builder;
+ const builderPayloadValue = blindedBlock?.executionPayloadValue ?? BigInt(0);
+ const enginePayloadValue = fullBlock?.executionPayloadValue ?? BigInt(0);
+ const consensusBlockValueBuilder = blindedBlock?.consensusBlockValue ?? BigInt(0);
+ const consensusBlockValueEngine = fullBlock?.consensusBlockValue ?? BigInt(0);
+
+ const blockValueBuilder = builderPayloadValue + gweiToWei(consensusBlockValueBuilder); // Total block value is in wei
+ const blockValueEngine = enginePayloadValue + gweiToWei(consensusBlockValueEngine); // Total block value is in wei
+
+ let executionPayloadSource: ProducedBlockSource | null = null;
+ const shouldOverrideBuilder = fullBlock?.shouldOverrideBuilder ?? false;
+
+ // handle the builder override case separately
+ if (shouldOverrideBuilder === true) {
+ executionPayloadSource = ProducedBlockSource.engine;
+ logger.info("Selected engine block as censorship suspected in builder blocks", {
+ // winston logger doesn't like bigint
+ enginePayloadValue: `${enginePayloadValue}`,
+ consensusBlockValueEngine: `${consensusBlockValueEngine}`,
+ blockValueEngine: `${blockValueEngine}`,
+ shouldOverrideBuilder,
+ slot,
+ });
+ } else if (fullBlock && blindedBlock) {
+ switch (builderSelection) {
+ case routes.validator.BuilderSelection.MaxProfit: {
+ if (
+ // explicitly handle the two special values mentioned in spec for builder preferred / engine preferred
+ builderBoostFactor !== MAX_BUILDER_BOOST_FACTOR &&
+ (builderBoostFactor === BigInt(0) ||
+ blockValueEngine >= (blockValueBuilder * builderBoostFactor) / BigInt(100))
+ ) {
+ executionPayloadSource = ProducedBlockSource.engine;
+ } else {
+ executionPayloadSource = ProducedBlockSource.builder;
+ }
+ break;
}
- break;
- }
- case routes.validator.BuilderSelection.ExecutionOnly: {
- selectedSource = ProducedBlockSource.engine;
- break;
- }
+ case routes.validator.BuilderSelection.ExecutionOnly: {
+ executionPayloadSource = ProducedBlockSource.engine;
+ break;
+ }
- // For everything else just select the builder
- default: {
- selectedSource = ProducedBlockSource.builder;
+ // For everything else just select the builder
+ default: {
+ executionPayloadSource = ProducedBlockSource.builder;
+ }
}
+ logger.info(`Selected executionPayloadSource=${executionPayloadSource} block`, {
+ builderSelection,
+ // winston logger doesn't like bigint
+ builderBoostFactor: `${builderBoostFactor}`,
+ enginePayloadValue: `${enginePayloadValue}`,
+ builderPayloadValue: `${builderPayloadValue}`,
+ consensusBlockValueEngine: `${consensusBlockValueEngine}`,
+ consensusBlockValueBuilder: `${consensusBlockValueBuilder}`,
+ blockValueEngine: `${blockValueEngine}`,
+ blockValueBuilder: `${blockValueBuilder}`,
+ shouldOverrideBuilder,
+ slot,
+ });
+ } else if (fullBlock && !blindedBlock) {
+ executionPayloadSource = ProducedBlockSource.engine;
+ logger.info("Selected engine block: no builder block produced", {
+ // winston logger doesn't like bigint
+ enginePayloadValue: `${enginePayloadValue}`,
+ consensusBlockValueEngine: `${consensusBlockValueEngine}`,
+ blockValueEngine: `${blockValueEngine}`,
+ shouldOverrideBuilder,
+ slot,
+ });
+ } else if (blindedBlock && !fullBlock) {
+ executionPayloadSource = ProducedBlockSource.builder;
+ logger.info("Selected builder block: no engine block produced", {
+ // winston logger doesn't like bigint
+ builderPayloadValue: `${builderPayloadValue}`,
+ consensusBlockValueBuilder: `${consensusBlockValueBuilder}`,
+ blockValueBuilder: `${blockValueBuilder}`,
+ shouldOverrideBuilder,
+ slot,
+ });
}
- logger.verbose(`Selected ${selectedSource} block`, {
- builderSelection,
- // winston logger doesn't like bigint
- enginePayloadValue: `${enginePayloadValue}`,
- builderPayloadValue: `${builderPayloadValue}`,
- consensusBlockValueEngine: `${consensusBlockValueEngine}`,
- consensusBlockValueBuilder: `${consensusBlockValueBuilder}`,
- blockValueEngine: `${blockValueEngine}`,
- blockValueBuilder: `${blockValueBuilder}`,
- slot,
- });
- } else if (fullBlock && !blindedBlock) {
- selectedSource = ProducedBlockSource.engine;
- logger.verbose("Selected engine block: no builder block produced", {
- // winston logger doesn't like bigint
- enginePayloadValue: `${enginePayloadValue}`,
- consensusBlockValueEngine: `${consensusBlockValueEngine}`,
- blockValueEngine: `${blockValueEngine}`,
- slot,
- });
- } else if (blindedBlock && !fullBlock) {
- selectedSource = ProducedBlockSource.builder;
- logger.verbose("Selected builder block: no engine block produced", {
- // winston logger doesn't like bigint
- builderPayloadValue: `${builderPayloadValue}`,
- consensusBlockValueBuilder: `${consensusBlockValueBuilder}`,
- blockValueBuilder: `${blockValueBuilder}`,
- slot,
- });
- }
- if (selectedSource === null) {
- throw Error(`Failed to produce engine or builder block for slot=${slot}`);
- }
+ if (executionPayloadSource === null) {
+ throw Error(`Failed to produce engine or builder block for slot=${slot}`);
+ }
- if (selectedSource === ProducedBlockSource.engine) {
- return {...fullBlock, executionPayloadBlinded: false} as routes.validator.ProduceBlockOrContentsRes & {
- executionPayloadBlinded: false;
- };
- } else {
- return {...blindedBlock, executionPayloadBlinded: true} as routes.validator.ProduceBlindedBlockOrContentsRes & {
- executionPayloadBlinded: true;
- };
- }
- };
+ if (executionPayloadSource === ProducedBlockSource.engine) {
+ return {
+ ...fullBlock,
+ executionPayloadBlinded: false,
+ executionPayloadSource,
+ } as routes.validator.ProduceBlockOrContentsRes & {
+ executionPayloadBlinded: false;
+ executionPayloadSource: ProducedBlockSource;
+ };
+ } else {
+ return {
+ ...blindedBlock,
+ executionPayloadBlinded: true,
+ executionPayloadSource,
+ } as routes.validator.ProduceBlindedBlockRes & {
+ executionPayloadBlinded: true;
+ executionPayloadSource: ProducedBlockSource;
+ };
+ }
+ };
const produceBlock: ServerApi["produceBlock"] = async function produceBlock(
slot,
randaoReveal,
graffiti
) {
- const producedData = await produceFullBlockOrContents(slot, randaoReveal, graffiti);
+ const producedData = await produceEngineFullBlockOrContents(slot, randaoReveal, graffiti);
if (isForkBlobs(producedData.version)) {
throw Error(`Invalid call to produceBlock for deneb+ fork=${producedData.version}`);
} else {
@@ -628,45 +670,85 @@ export function getValidatorApi({
}
};
- const produceBlindedBlock: ServerApi["produceBlindedBlock"] =
- async function produceBlindedBlock(slot, randaoReveal, graffiti) {
- const producedData = await produceBlockV3(slot, randaoReveal, graffiti);
- let blindedProducedData: routes.validator.ProduceBlindedBlockOrContentsRes;
-
- if (isForkBlobs(producedData.version)) {
- if (isBlindedBlockContents(producedData.data as allForks.FullOrBlindedBlockContents)) {
- blindedProducedData = producedData as routes.validator.ProduceBlindedBlockOrContentsRes;
- } else {
- //
- const {block, blobSidecars} = producedData.data as allForks.BlockContents;
- const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]);
- const blindedBlobSidecars = blobSidecarsToBlinded(blobSidecars);
+ const produceEngineOrBuilderBlindedBlock: ServerApi["produceBlindedBlock"] =
+ async function produceEngineOrBuilderBlindedBlock(slot, randaoReveal, graffiti) {
+ const {data, executionPayloadValue, consensusBlockValue, version} = await produceEngineOrBuilderBlock(
+ slot,
+ randaoReveal,
+ graffiti
+ );
+ if (!isForkExecution(version)) {
+ throw Error(`Invalid fork=${version} for produceEngineOrBuilderBlindedBlock`);
+ }
+ const executionPayloadBlinded = true;
+
+ if (isBlockContents(data)) {
+ const {block} = data;
+ const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]);
+ return {executionPayloadValue, consensusBlockValue, data: blindedBlock, executionPayloadBlinded, version};
+ } else if (isBlindedBeaconBlock(data)) {
+ return {executionPayloadValue, consensusBlockValue, data, executionPayloadBlinded, version};
+ } else {
+ const blindedBlock = beaconBlockToBlinded(config, data as allForks.AllForksExecution["BeaconBlock"]);
+ return {executionPayloadValue, consensusBlockValue, data: blindedBlock, executionPayloadBlinded, version};
+ }
+ };
- blindedProducedData = {
- ...producedData,
- data: {blindedBlock, blindedBlobSidecars},
- } as routes.validator.ProduceBlindedBlockOrContentsRes;
- }
+ const produceBlockV3: ServerApi["produceBlockV3"] = async function produceBlockV3(
+ slot,
+ randaoReveal,
+ graffiti,
+ skipRandaoVerification?: boolean,
+ opts: routes.validator.ExtraProduceBlockOps = {}
+ ) {
+ const produceBlockEngineOrBuilderRes = await produceEngineOrBuilderBlock(
+ slot,
+ randaoReveal,
+ graffiti,
+ skipRandaoVerification,
+ opts
+ );
+
+ if (opts.blindedLocal === true && ForkSeq[produceBlockEngineOrBuilderRes.version] >= ForkSeq.bellatrix) {
+ if (produceBlockEngineOrBuilderRes.executionPayloadBlinded) {
+ return produceBlockEngineOrBuilderRes;
} else {
- if (isBlindedBeaconBlock(producedData.data)) {
- blindedProducedData = producedData as routes.validator.ProduceBlindedBlockOrContentsRes;
- } else {
- const block = producedData.data;
+ if (isBlockContents(produceBlockEngineOrBuilderRes.data)) {
+ const {block} = produceBlockEngineOrBuilderRes.data;
const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]);
- blindedProducedData = {
- ...producedData,
+ return {
+ ...produceBlockEngineOrBuilderRes,
+ data: blindedBlock,
+ executionPayloadBlinded: true,
+ } as routes.validator.ProduceBlindedBlockRes & {
+ executionPayloadBlinded: true;
+ executionPayloadSource: ProducedBlockSource;
+ };
+ } else {
+ const blindedBlock = beaconBlockToBlinded(
+ config,
+ produceBlockEngineOrBuilderRes.data as allForks.AllForksExecution["BeaconBlock"]
+ );
+ return {
+ ...produceBlockEngineOrBuilderRes,
data: blindedBlock,
- } as routes.validator.ProduceBlindedBlockOrContentsRes;
+ executionPayloadBlinded: true,
+ } as routes.validator.ProduceBlindedBlockRes & {
+ executionPayloadBlinded: true;
+ executionPayloadSource: ProducedBlockSource;
+ };
}
}
- return blindedProducedData;
- };
+ } else {
+ return produceBlockEngineOrBuilderRes;
+ }
+ };
return {
produceBlock,
- produceBlockV2: produceFullBlockOrContents,
+ produceBlockV2: produceEngineFullBlockOrContents,
produceBlockV3,
- produceBlindedBlock,
+ produceBlindedBlock: produceEngineOrBuilderBlindedBlock,
async produceAttestationData(committeeIndex, slot) {
notWhileSyncing();
diff --git a/packages/beacon-node/src/api/rest/activeSockets.ts b/packages/beacon-node/src/api/rest/activeSockets.ts
index ba8a35c80119..9f1b0f1a78a3 100644
--- a/packages/beacon-node/src/api/rest/activeSockets.ts
+++ b/packages/beacon-node/src/api/rest/activeSockets.ts
@@ -1,12 +1,11 @@
import http, {Server} from "node:http";
import {Socket} from "node:net";
-import {waitFor} from "@lodestar/utils";
-import {IGauge} from "../../metrics/index.js";
+import {Gauge, GaugeExtra, waitFor} from "@lodestar/utils";
export type SocketMetrics = {
- activeSockets: IGauge;
- socketsBytesRead: IGauge;
- socketsBytesWritten: IGauge;
+ activeSockets: GaugeExtra;
+ socketsBytesRead: Gauge;
+ socketsBytesWritten: Gauge;
};
// Use relatively short timeout to speed up shutdown
diff --git a/packages/beacon-node/src/api/rest/base.ts b/packages/beacon-node/src/api/rest/base.ts
index 4503bfe20e47..3ddb5354a897 100644
--- a/packages/beacon-node/src/api/rest/base.ts
+++ b/packages/beacon-node/src/api/rest/base.ts
@@ -3,9 +3,8 @@ import fastify, {FastifyInstance} from "fastify";
import fastifyCors from "@fastify/cors";
import bearerAuthPlugin from "@fastify/bearer-auth";
import {RouteConfig} from "@lodestar/api/beacon/server";
-import {ErrorAborted, Logger} from "@lodestar/utils";
+import {ErrorAborted, Gauge, Histogram, Logger} from "@lodestar/utils";
import {isLocalhostIP} from "../../util/ip.js";
-import {IGauge, IHistogram} from "../../metrics/index.js";
import {ApiError, NodeIsSyncing} from "../impl/errors.js";
import {HttpActiveSocketsTracker, SocketMetrics} from "./activeSockets.js";
@@ -25,9 +24,9 @@ export type RestApiServerModules = {
};
export type RestApiServerMetrics = SocketMetrics & {
- requests: IGauge<"operationId">;
- responseTime: IHistogram<"operationId">;
- errors: IGauge<"operationId">;
+ requests: Gauge<{operationId: string}>;
+ responseTime: Histogram<{operationId: string}>;
+ errors: Gauge<{operationId: string}>;
};
/**
@@ -90,6 +89,11 @@ export class RestApiServer {
metrics?.requests.inc({operationId});
});
+ server.addHook("preHandler", async (req, _res) => {
+ const {operationId} = req.routeConfig as RouteConfig;
+ this.logger.debug(`Exec ${req.id as string} ${req.ip} ${operationId}`);
+ });
+
// Log after response
server.addHook("onResponse", async (req, res) => {
const {operationId} = req.routeConfig as RouteConfig;
diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts
index feaddfbad39d..12b43359fa4e 100644
--- a/packages/beacon-node/src/chain/blocks/importBlock.ts
+++ b/packages/beacon-node/src/chain/blocks/importBlock.ts
@@ -7,7 +7,6 @@ import {
computeStartSlotAtEpoch,
isStateValidatorsNodesPopulated,
RootCache,
- kzgCommitmentToVersionedHash,
} from "@lodestar/state-transition";
import {routes} from "@lodestar/api";
import {ForkChoiceError, ForkChoiceErrorCode, EpochDifference, AncestorStatus} from "@lodestar/fork-choice";
@@ -16,6 +15,7 @@ import {ZERO_HASH_HEX} from "../../constants/index.js";
import {toCheckpointHex} from "../stateCache/index.js";
import {isOptimisticBlock} from "../../util/forkChoice.js";
import {isQueueErrorAborted} from "../../util/queue/index.js";
+import {kzgCommitmentToVersionedHash} from "../../util/blobs.js";
import {ChainEvent, ReorgEventData} from "../emitter.js";
import {REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC} from "../reprocess.js";
import type {BeaconChain} from "../chain.js";
diff --git a/packages/beacon-node/src/chain/blocks/index.ts b/packages/beacon-node/src/chain/blocks/index.ts
index 569fd0771022..8f4c7fa5f0f1 100644
--- a/packages/beacon-node/src/chain/blocks/index.ts
+++ b/packages/beacon-node/src/chain/blocks/index.ts
@@ -58,11 +58,7 @@ export async function processBlocks(
}
try {
- const {relevantBlocks, dataAvailabilityStatuses, parentSlots, parentBlock} = verifyBlocksSanityChecks(
- this,
- blocks,
- opts
- );
+ const {relevantBlocks, parentSlots, parentBlock} = verifyBlocksSanityChecks(this, blocks, opts);
// No relevant blocks, skip verifyBlocksInEpoch()
if (relevantBlocks.length === 0 || parentBlock === null) {
@@ -72,13 +68,8 @@ export async function processBlocks(
// Fully verify a block to be imported immediately after. Does not produce any side-effects besides adding intermediate
// states in the state cache through regen.
- const {postStates, proposerBalanceDeltas, segmentExecStatus} = await verifyBlocksInEpoch.call(
- this,
- parentBlock,
- relevantBlocks,
- dataAvailabilityStatuses,
- opts
- );
+ const {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus} =
+ await verifyBlocksInEpoch.call(this, parentBlock, relevantBlocks, opts);
// If segmentExecStatus has lvhForkchoice then, the entire segment should be invalid
// and we need to further propagate
diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts
index 5f1ac8833578..aff5a64c9929 100644
--- a/packages/beacon-node/src/chain/blocks/types.ts
+++ b/packages/beacon-node/src/chain/blocks/types.ts
@@ -1,14 +1,13 @@
-import {toHexString} from "@chainsafe/ssz";
import {CachedBeaconStateAllForks, computeEpochAtSlot, DataAvailableStatus} from "@lodestar/state-transition";
import {MaybeValidExecutionStatus} from "@lodestar/fork-choice";
-import {allForks, deneb, Slot, RootHex} from "@lodestar/types";
+import {allForks, deneb, Slot} from "@lodestar/types";
import {ForkSeq, MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS} from "@lodestar/params";
import {ChainForkConfig} from "@lodestar/config";
-import {pruneSetToMax} from "@lodestar/utils";
export enum BlockInputType {
preDeneb = "preDeneb",
postDeneb = "postDeneb",
+ blobsPromise = "blobsPromise",
}
/** Enum to represent where blocks come from */
@@ -19,9 +18,18 @@ export enum BlockSource {
byRoot = "req_resp_by_root",
}
+export enum GossipedInputType {
+ block = "block",
+ blob = "blob",
+}
+
+export type BlobsCache = Map;
+export type BlockInputBlobs = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]};
+
export type BlockInput = {block: allForks.SignedBeaconBlock; source: BlockSource; blockBytes: Uint8Array | null} & (
| {type: BlockInputType.preDeneb}
- | {type: BlockInputType.postDeneb; blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]}
+ | ({type: BlockInputType.postDeneb} & BlockInputBlobs)
+ | {type: BlockInputType.blobsPromise; blobsCache: BlobsCache; availabilityPromise: Promise}
);
export function blockRequiresBlobs(config: ChainForkConfig, blockSlot: Slot, clockSlot: Slot): boolean {
@@ -32,125 +40,7 @@ export function blockRequiresBlobs(config: ChainForkConfig, blockSlot: Slot, clo
);
}
-export enum GossipedInputType {
- block = "block",
- blob = "blob",
-}
-type GossipedBlockInput =
- | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null}
- | {type: GossipedInputType.blob; signedBlob: deneb.SignedBlobSidecar; blobBytes: Uint8Array | null};
-type BlockInputCacheType = {
- block?: allForks.SignedBeaconBlock;
- blockBytes?: Uint8Array | null;
- blobs: Map;
- blobsBytes: Map;
-};
-
-const MAX_GOSSIPINPUT_CACHE = 5;
-// ssz.deneb.BlobSidecars.elementType.fixedSize;
-const BLOBSIDECAR_FIXED_SIZE = 131256;
-
export const getBlockInput = {
- blockInputCache: new Map(),
-
- getGossipBlockInput(
- config: ChainForkConfig,
- gossipedInput: GossipedBlockInput
- ):
- | {blockInput: BlockInput; blockInputMeta: {pending: null; haveBlobs: number; expectedBlobs: number}}
- | {blockInput: null; blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}}
- | {blockInput: null; blockInputMeta: {pending: GossipedInputType.blob; haveBlobs: number; expectedBlobs: number}} {
- let blockHex;
- let blockCache;
-
- if (gossipedInput.type === GossipedInputType.block) {
- const {signedBlock, blockBytes} = gossipedInput;
-
- blockHex = toHexString(
- config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message)
- );
- blockCache = this.blockInputCache.get(blockHex) ?? {
- blobs: new Map(),
- blobsBytes: new Map(),
- };
-
- blockCache.block = signedBlock;
- blockCache.blockBytes = blockBytes;
- } else {
- const {signedBlob, blobBytes} = gossipedInput;
- blockHex = toHexString(signedBlob.message.blockRoot);
- blockCache = this.blockInputCache.get(blockHex);
-
- // If a new entry is going to be inserted, prune out old ones
- if (blockCache === undefined) {
- pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE);
- blockCache = {blobs: new Map(), blobsBytes: new Map()};
- }
-
- // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions
- blockCache.blobs.set(signedBlob.message.index, signedBlob.message);
- // easily splice out the unsigned message as blob is a fixed length type
- blockCache.blobsBytes.set(signedBlob.message.index, blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null);
- }
-
- this.blockInputCache.set(blockHex, blockCache);
- const {block: signedBlock, blockBytes} = blockCache;
-
- if (signedBlock !== undefined) {
- // block is available, check if all blobs have shown up
- const {slot, body} = signedBlock.message;
- const {blobKzgCommitments} = body as deneb.BeaconBlockBody;
- const blockInfo = `blockHex=${blockHex}, slot=${slot}`;
-
- if (blobKzgCommitments.length < blockCache.blobs.size) {
- throw Error(
- `Received more blobs=${blockCache.blobs.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}`
- );
- }
- if (blobKzgCommitments.length === blockCache.blobs.size) {
- const blobSidecars = [];
- const blobsBytes = [];
-
- for (let index = 0; index < blobKzgCommitments.length; index++) {
- const blobSidecar = blockCache.blobs.get(index);
- if (blobSidecar === undefined) {
- throw Error(`Missing blobSidecar at index=${index} for ${blockInfo}`);
- }
- blobSidecars.push(blobSidecar);
- blobsBytes.push(blockCache.blobsBytes.get(index) ?? null);
- }
-
- return {
- // TODO freetheblobs: collate and add serialized data for the postDeneb blockinput
- blockInput: getBlockInput.postDeneb(
- config,
- signedBlock,
- BlockSource.gossip,
- blobSidecars,
- blockBytes ?? null,
- blobsBytes
- ),
- blockInputMeta: {pending: null, haveBlobs: blockCache.blobs.size, expectedBlobs: blobKzgCommitments.length},
- };
- } else {
- return {
- blockInput: null,
- blockInputMeta: {
- pending: GossipedInputType.blob,
- haveBlobs: blockCache.blobs.size,
- expectedBlobs: blobKzgCommitments.length,
- },
- };
- }
- } else {
- // will need to wait for the block to showup
- return {
- blockInput: null,
- blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blockCache.blobs.size, expectedBlobs: null},
- };
- }
- },
-
preDeneb(
config: ChainForkConfig,
block: allForks.SignedBeaconBlock,
@@ -188,6 +78,27 @@ export const getBlockInput = {
blobsBytes,
};
},
+
+ blobsPromise(
+ config: ChainForkConfig,
+ block: allForks.SignedBeaconBlock,
+ source: BlockSource,
+ blobsCache: BlobsCache,
+ blockBytes: Uint8Array | null,
+ availabilityPromise: Promise
+ ): BlockInput {
+ if (config.getForkSeq(block.message.slot) < ForkSeq.deneb) {
+ throw Error(`Pre Deneb block slot ${block.message.slot}`);
+ }
+ return {
+ type: BlockInputType.blobsPromise,
+ block,
+ source,
+ blobsCache,
+ blockBytes,
+ availabilityPromise,
+ };
+ },
};
export enum AttestationImportOpt {
diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts
index 72db1d801b48..94a42a39a6ae 100644
--- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts
+++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts
@@ -5,7 +5,7 @@ import {
isStateValidatorsNodesPopulated,
DataAvailableStatus,
} from "@lodestar/state-transition";
-import {bellatrix} from "@lodestar/types";
+import {bellatrix, deneb} from "@lodestar/types";
import {ForkName} from "@lodestar/params";
import {ProtoBlock, ExecutionStatus} from "@lodestar/fork-choice";
import {ChainForkConfig} from "@lodestar/config";
@@ -14,13 +14,14 @@ import {BlockError, BlockErrorCode} from "../errors/index.js";
import {BlockProcessOpts} from "../options.js";
import {RegenCaller} from "../regen/index.js";
import type {BeaconChain} from "../chain.js";
-import {BlockInput, ImportBlockOpts} from "./types.js";
+import {BlockInput, ImportBlockOpts, BlockInputType} from "./types.js";
import {POS_PANDA_MERGE_TRANSITION_BANNER} from "./utils/pandaMergeTransitionBanner.js";
import {CAPELLA_OWL_BANNER} from "./utils/ownBanner.js";
import {DENEB_BLOWFISH_BANNER} from "./utils/blowfishBanner.js";
import {verifyBlocksStateTransitionOnly} from "./verifyBlocksStateTransitionOnly.js";
import {verifyBlocksSignatures} from "./verifyBlocksSignatures.js";
import {verifyBlocksExecutionPayload, SegmentExecStatus} from "./verifyBlocksExecutionPayloads.js";
+import {verifyBlocksDataAvailability} from "./verifyBlocksDataAvailability.js";
import {writeBlockInputToDb} from "./writeBlockInputToDb.js";
/**
@@ -38,12 +39,12 @@ export async function verifyBlocksInEpoch(
this: BeaconChain,
parentBlock: ProtoBlock,
blocksInput: BlockInput[],
- dataAvailabilityStatuses: DataAvailableStatus[],
opts: BlockProcessOpts & ImportBlockOpts
): Promise<{
postStates: CachedBeaconStateAllForks[];
proposerBalanceDeltas: number[];
segmentExecStatus: SegmentExecStatus;
+ dataAvailabilityStatuses: DataAvailableStatus[];
}> {
const blocks = blocksInput.map(({block}) => block);
if (blocks.length === 0) {
@@ -88,7 +89,12 @@ export async function verifyBlocksInEpoch(
try {
// batch all I/O operations to reduce overhead
- const [segmentExecStatus, {postStates, proposerBalanceDeltas}] = await Promise.all([
+ const [
+ segmentExecStatus,
+ {dataAvailabilityStatuses, availableTime},
+ {postStates, proposerBalanceDeltas, verifyStateTime},
+ {verifySignaturesTime},
+ ] = await Promise.all([
// Execution payloads
opts.skipVerifyExecutionPayload !== true
? verifyBlocksExecutionPayload(this, parentBlock, blocks, preState0, abortController.signal, opts)
@@ -98,12 +104,16 @@ export async function verifyBlocksInEpoch(
mergeBlockFound: null,
} as SegmentExecStatus),
+ // data availability for the blobs
+ verifyBlocksDataAvailability(this, blocksInput, opts),
+
// Run state transition only
// TODO: Ensure it yields to allow flushing to workers and engine API
verifyBlocksStateTransitionOnly(
preState0,
blocksInput,
- dataAvailabilityStatuses,
+ // hack availability for state transition eval as availability is separately determined
+ blocks.map(() => DataAvailableStatus.available),
this.logger,
this.metrics,
abortController.signal,
@@ -113,7 +123,7 @@ export async function verifyBlocksInEpoch(
// All signatures at once
opts.skipVerifyBlockSignatures !== true
? verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts)
- : Promise.resolve(),
+ : Promise.resolve({verifySignaturesTime: Date.now()}),
// ideally we want to only persist blocks after verifying them however the reality is there are
// rarely invalid blocks we'll batch all I/O operation here to reduce the overhead if there's
@@ -151,7 +161,35 @@ export async function verifyBlocksInEpoch(
}
}
- return {postStates, proposerBalanceDeltas, segmentExecStatus};
+ if (segmentExecStatus.execAborted === null) {
+ const {executionStatuses, executionTime} = segmentExecStatus;
+ if (
+ blocksInput.length === 1 &&
+ // gossip blocks have seenTimestampSec
+ opts.seenTimestampSec !== undefined &&
+ blocksInput[0].type !== BlockInputType.preDeneb &&
+ executionStatuses[0] === ExecutionStatus.Valid
+ ) {
+ // Find the max time when the block was actually verified
+ const fullyVerifiedTime = Math.max(executionTime, verifyStateTime, verifySignaturesTime);
+ const recvTofullyVerifedTime = fullyVerifiedTime / 1000 - opts.seenTimestampSec;
+ this.metrics?.gossipBlock.receivedToFullyVerifiedTime.observe(recvTofullyVerifedTime);
+
+ const verifiedToBlobsAvailabiltyTime = Math.max(availableTime - fullyVerifiedTime, 0) / 1000;
+ const numBlobs = (blocksInput[0].block as deneb.SignedBeaconBlock).message.body.blobKzgCommitments.length;
+
+ this.metrics?.gossipBlock.verifiedToBlobsAvailabiltyTime.observe({numBlobs}, verifiedToBlobsAvailabiltyTime);
+ this.logger.verbose("Verified blockInput fully with blobs availability", {
+ slot: blocksInput[0].block.message.slot,
+ recvTofullyVerifedTime,
+ verifiedToBlobsAvailabiltyTime,
+ type: blocksInput[0].type,
+ numBlobs,
+ });
+ }
+ }
+
+ return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus};
} finally {
abortController.abort();
}
diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts
new file mode 100644
index 000000000000..9c45469d56dd
--- /dev/null
+++ b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts
@@ -0,0 +1,126 @@
+import {computeTimeAtSlot, DataAvailableStatus} from "@lodestar/state-transition";
+import {ChainForkConfig} from "@lodestar/config";
+import {deneb, UintNum64} from "@lodestar/types";
+import {Logger} from "@lodestar/utils";
+import {BlockError, BlockErrorCode} from "../errors/index.js";
+import {validateBlobSidecars} from "../validation/blobSidecar.js";
+import {Metrics} from "../../metrics/metrics.js";
+import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation} from "./types.js";
+
+// proposer boost is not available post 3 sec so try pulling using unknown block hash
+// post 3 sec after throwing the availability error
+const BLOB_AVAILABILITY_TIMEOUT = 3_000;
+
+/**
+ * Verifies some early cheap sanity checks on the block before running the full state transition.
+ *
+ * - Parent is known to the fork-choice
+ * - Check skipped slots limit
+ * - check_block_relevancy()
+ * - Block not in the future
+ * - Not genesis block
+ * - Block's slot is < Infinity
+ * - Not finalized slot
+ * - Not already known
+ */
+export async function verifyBlocksDataAvailability(
+ chain: {config: ChainForkConfig; genesisTime: UintNum64; logger: Logger; metrics: Metrics | null},
+ blocks: BlockInput[],
+ opts: ImportBlockOpts
+): Promise<{dataAvailabilityStatuses: DataAvailableStatus[]; availableTime: number}> {
+ if (blocks.length === 0) {
+ throw Error("Empty partiallyVerifiedBlocks");
+ }
+
+ const dataAvailabilityStatuses: DataAvailableStatus[] = [];
+ const seenTime = opts.seenTimestampSec !== undefined ? opts.seenTimestampSec * 1000 : Date.now();
+
+ for (const blockInput of blocks) {
+ // Validate status of only not yet finalized blocks, we don't need yet to propogate the status
+ // as it is not used upstream anywhere
+ const dataAvailabilityStatus = await maybeValidateBlobs(chain, blockInput, opts);
+ dataAvailabilityStatuses.push(dataAvailabilityStatus);
+ }
+
+ const availableTime = blocks[blocks.length - 1].type === BlockInputType.blobsPromise ? Date.now() : seenTime;
+ if (blocks.length === 1 && opts.seenTimestampSec !== undefined && blocks[0].type !== BlockInputType.preDeneb) {
+ const recvToAvailableTime = availableTime / 1000 - opts.seenTimestampSec;
+ const numBlobs = (blocks[0].block as deneb.SignedBeaconBlock).message.body.blobKzgCommitments.length;
+
+ chain.metrics?.gossipBlock.receivedToBlobsAvailabilityTime.observe({numBlobs}, recvToAvailableTime);
+ chain.logger.verbose("Verified blobs availability", {
+ slot: blocks[0].block.message.slot,
+ recvToAvailableTime,
+ type: blocks[0].type,
+ });
+ }
+
+ return {dataAvailabilityStatuses, availableTime};
+}
+
+async function maybeValidateBlobs(
+ chain: {config: ChainForkConfig; genesisTime: UintNum64},
+ blockInput: BlockInput,
+ opts: ImportBlockOpts
+): Promise {
+ switch (blockInput.type) {
+ case BlockInputType.preDeneb:
+ return DataAvailableStatus.preDeneb;
+
+ case BlockInputType.postDeneb:
+ if (opts.validBlobSidecars === BlobSidecarValidation.Full) {
+ return DataAvailableStatus.available;
+ }
+
+ // eslint-disable-next-line no-fallthrough
+ case BlockInputType.blobsPromise: {
+ // run full validation
+ const {block} = blockInput;
+ const blockSlot = block.message.slot;
+
+ const blobsData =
+ blockInput.type === BlockInputType.postDeneb
+ ? blockInput
+ : await raceWithCutoff(chain, blockInput, blockInput.availabilityPromise);
+ const {blobs} = blobsData;
+
+ const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body;
+ const beaconBlockRoot = chain.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message);
+
+ // if the blob siddecars have been individually verified then we can skip kzg proof check
+ // but other checks to match blobs with block data still need to be performed
+ const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual;
+ validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck});
+
+ return DataAvailableStatus.available;
+ }
+ }
+}
+
+/**
+ * Wait for blobs to become available with a cutoff time. If fails then throw DATA_UNAVAILABLE error
+ * which may try unknownblock/blobs fill (by root).
+ */
+async function raceWithCutoff(
+ chain: {config: ChainForkConfig; genesisTime: UintNum64},
+ blockInput: BlockInput,
+ availabilityPromise: Promise
+): Promise {
+ const {block} = blockInput;
+ const blockSlot = block.message.slot;
+
+ const cutoffTime = Math.max(
+ computeTimeAtSlot(chain.config, blockSlot, chain.genesisTime) * 1000 + BLOB_AVAILABILITY_TIMEOUT - Date.now(),
+ 0
+ );
+ const cutoffTimeout = new Promise((_resolve, reject) => setTimeout(reject, cutoffTime));
+
+ try {
+ await Promise.race([availabilityPromise, cutoffTimeout]);
+ } catch (e) {
+ // throw unavailable so that the unknownblock/blobs can be triggered to pull the block
+ throw new BlockError(block, {code: BlockErrorCode.DATA_UNAVAILABLE});
+ }
+ // we can only be here if availabilityPromise has resolved else an error will be thrown
+ return availabilityPromise;
+}
diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts
index 7f4edd14c618..5dbe104c9541 100644
--- a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts
+++ b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts
@@ -5,7 +5,6 @@ import {
isExecutionBlockBodyType,
isMergeTransitionBlock as isMergeTransitionBlockFn,
isExecutionEnabled,
- kzgCommitmentToVersionedHash,
} from "@lodestar/state-transition";
import {bellatrix, allForks, Slot, deneb} from "@lodestar/types";
import {
@@ -24,6 +23,7 @@ import {ForkSeq, SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params";
import {IExecutionEngine} from "../../execution/engine/interface.js";
import {BlockError, BlockErrorCode} from "../errors/index.js";
import {IClock} from "../../util/clock.js";
+import {kzgCommitmentToVersionedHash} from "../../util/blobs.js";
import {BlockProcessOpts} from "../options.js";
import {ExecutionPayloadStatus} from "../../execution/engine/interface.js";
import {IEth1ForBlockProduction} from "../../eth1/index.js";
@@ -45,6 +45,7 @@ export type SegmentExecStatus =
| {
execAborted: null;
executionStatuses: MaybeValidExecutionStatus[];
+ executionTime: number;
mergeBlockFound: bellatrix.BeaconBlock | null;
}
| {execAborted: ExecAbortType; invalidSegmentLVH?: LVHInvalidResponse; mergeBlockFound: null};
@@ -243,8 +244,9 @@ export async function verifyBlocksExecutionPayload(
}
}
- if (blocks.length === 1 && opts.seenTimestampSec !== undefined) {
- const recvToVerifiedExecPayload = Date.now() / 1000 - opts.seenTimestampSec;
+ const executionTime = Date.now();
+ if (blocks.length === 1 && opts.seenTimestampSec !== undefined && executionStatuses[0] === ExecutionStatus.Valid) {
+ const recvToVerifiedExecPayload = executionTime / 1000 - opts.seenTimestampSec;
chain.metrics?.gossipBlock.receivedToExecutionPayloadVerification.observe(recvToVerifiedExecPayload);
chain.logger.verbose("Verified execution payload", {
slot: blocks[0].message.slot,
@@ -255,6 +257,7 @@ export async function verifyBlocksExecutionPayload(
return {
execAborted: null,
executionStatuses,
+ executionTime,
mergeBlockFound,
};
}
diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts
index 9fb7d04f1ed8..e62355a4889d 100644
--- a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts
+++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts
@@ -1,12 +1,11 @@
-import {computeStartSlotAtEpoch, DataAvailableStatus} from "@lodestar/state-transition";
+import {computeStartSlotAtEpoch} from "@lodestar/state-transition";
import {ChainForkConfig} from "@lodestar/config";
import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice";
-import {Slot, deneb} from "@lodestar/types";
+import {Slot} from "@lodestar/types";
import {toHexString} from "@lodestar/utils";
import {IClock} from "../../util/clock.js";
import {BlockError, BlockErrorCode} from "../errors/index.js";
-import {validateBlobSidecars} from "../validation/blobSidecar.js";
-import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation} from "./types.js";
+import {BlockInput, ImportBlockOpts} from "./types.js";
/**
* Verifies some early cheap sanity checks on the block before running the full state transition.
@@ -26,7 +25,6 @@ export function verifyBlocksSanityChecks(
opts: ImportBlockOpts
): {
relevantBlocks: BlockInput[];
- dataAvailabilityStatuses: DataAvailableStatus[];
parentSlots: Slot[];
parentBlock: ProtoBlock | null;
} {
@@ -35,7 +33,6 @@ export function verifyBlocksSanityChecks(
}
const relevantBlocks: BlockInput[] = [];
- const dataAvailabilityStatuses: DataAvailableStatus[] = [];
const parentSlots: Slot[] = [];
let parentBlock: ProtoBlock | null = null;
@@ -64,10 +61,6 @@ export function verifyBlocksSanityChecks(
}
}
- // Validate status of only not yet finalized blocks, we don't need yet to propogate the status
- // as it is not used upstream anywhere
- const dataAvailabilityStatus = maybeValidateBlobs(chain.config, blockInput, opts);
-
let parentBlockSlot: Slot;
if (relevantBlocks.length > 0) {
@@ -105,7 +98,6 @@ export function verifyBlocksSanityChecks(
// Block is relevant
relevantBlocks.push(blockInput);
- dataAvailabilityStatuses.push(dataAvailabilityStatus);
parentSlots.push(parentBlockSlot);
}
@@ -115,35 +107,5 @@ export function verifyBlocksSanityChecks(
throw Error(`Internal error, parentBlock should not be null for relevantBlocks=${relevantBlocks.length}`);
}
- return {relevantBlocks, dataAvailabilityStatuses, parentSlots, parentBlock};
-}
-
-function maybeValidateBlobs(
- config: ChainForkConfig,
- blockInput: BlockInput,
- opts: ImportBlockOpts
-): DataAvailableStatus {
- switch (blockInput.type) {
- case BlockInputType.postDeneb: {
- if (opts.validBlobSidecars === BlobSidecarValidation.Full) {
- return DataAvailableStatus.available;
- }
-
- // run full validation
- const {block, blobs} = blockInput;
- const blockSlot = block.message.slot;
- const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body;
- const beaconBlockRoot = config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message);
-
- // if the blob siddecars have been individually verified then we can skip kzg proof check
- // but other checks to match blobs with block data still need to be performed
- const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual;
- validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck});
-
- return DataAvailableStatus.available;
- }
-
- case BlockInputType.preDeneb:
- return DataAvailableStatus.preDeneb;
- }
+ return {relevantBlocks, parentSlots, parentBlock};
}
diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts
index fbbef969b696..14ad46a35c1e 100644
--- a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts
+++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts
@@ -20,7 +20,7 @@ export async function verifyBlocksSignatures(
preState0: CachedBeaconStateAllForks,
blocks: allForks.SignedBeaconBlock[],
opts: ImportBlockOpts
-): Promise {
+): Promise<{verifySignaturesTime: number}> {
const isValidPromises: Promise[] = [];
// Verifies signatures after running state transition, so all SyncCommittee signed roots are known at this point.
@@ -46,17 +46,20 @@ export async function verifyBlocksSignatures(
}
}
- if (blocks.length === 1 && opts.seenTimestampSec !== undefined) {
- const recvToSigVer = Date.now() / 1000 - opts.seenTimestampSec;
- metrics?.gossipBlock.receivedToSignaturesVerification.observe(recvToSigVer);
- logger.verbose("Verified block signatures", {slot: blocks[0].message.slot, recvToSigVer});
- }
-
// `rejectFirstInvalidResolveAllValid()` returns on isValid result with its index
const res = await rejectFirstInvalidResolveAllValid(isValidPromises);
if (!res.allValid) {
throw new BlockError(blocks[res.index], {code: BlockErrorCode.INVALID_SIGNATURE, state: preState0});
}
+
+ const verifySignaturesTime = Date.now();
+ if (blocks.length === 1 && opts.seenTimestampSec !== undefined) {
+ const recvToSigVer = verifySignaturesTime / 1000 - opts.seenTimestampSec;
+ metrics?.gossipBlock.receivedToSignaturesVerification.observe(recvToSigVer);
+ logger.verbose("Verified block signatures", {slot: blocks[0].message.slot, recvToSigVer});
+ }
+
+ return {verifySignaturesTime};
}
type AllValidRes = {allValid: true} | {allValid: false; index: number};
diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts
index 709ad0c02b27..7d15d4e4f6ce 100644
--- a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts
+++ b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts
@@ -3,6 +3,7 @@ import {
stateTransition,
ExecutionPayloadStatus,
DataAvailableStatus,
+ StateHashTreeRootSource,
} from "@lodestar/state-transition";
import {ErrorAborted, Logger, sleep} from "@lodestar/utils";
import {Metrics} from "../../metrics/index.js";
@@ -27,7 +28,7 @@ export async function verifyBlocksStateTransitionOnly(
metrics: Metrics | null,
signal: AbortSignal,
opts: BlockProcessOpts & ImportBlockOpts
-): Promise<{postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]}> {
+): Promise<{postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]; verifyStateTime: number}> {
const postStates: CachedBeaconStateAllForks[] = [];
const proposerBalanceDeltas: number[] = [];
@@ -57,7 +58,9 @@ export async function verifyBlocksStateTransitionOnly(
metrics
);
- const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({source: "block_transition"});
+ const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({
+ source: StateHashTreeRootSource.blockTransition,
+ });
const stateRoot = postState.hashTreeRoot();
hashTreeRootTimer?.();
@@ -90,12 +93,13 @@ export async function verifyBlocksStateTransitionOnly(
}
}
+ const verifyStateTime = Date.now();
if (blocks.length === 1 && opts.seenTimestampSec !== undefined) {
const slot = blocks[0].block.message.slot;
- const recvToTransition = Date.now() / 1000 - opts.seenTimestampSec;
+ const recvToTransition = verifyStateTime / 1000 - opts.seenTimestampSec;
metrics?.gossipBlock.receivedToStateTransition.observe(recvToTransition);
- logger.verbose("Transitioned gossip block", {slot, recvToTransition});
+ logger.verbose("Verified block state transition", {slot, recvToTransition});
}
- return {postStates, proposerBalanceDeltas};
+ return {postStates, proposerBalanceDeltas, verifyStateTime};
}
diff --git a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts
index 0603ed7e7f7e..0b94d32b84ec 100644
--- a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts
+++ b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts
@@ -13,7 +13,7 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI
const fnPromises: Promise[] = [];
for (const blockInput of blocksInput) {
- const {block, blockBytes, type} = blockInput;
+ const {block, blockBytes} = blockInput;
const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message);
const blockRootHex = toHex(blockRoot);
if (blockBytes) {
@@ -29,8 +29,13 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI
root: blockRootHex,
});
- if (type === BlockInputType.postDeneb) {
- const {blobs: blobSidecars} = blockInput;
+ if (blockInput.type === BlockInputType.postDeneb || blockInput.type === BlockInputType.blobsPromise) {
+ const blobSidecars =
+ blockInput.type == BlockInputType.postDeneb
+ ? blockInput.blobs
+ : // At this point of import blobs are available and can be safely awaited
+ (await blockInput.availabilityPromise).blobs;
+
// NOTE: Old blobs are pruned on archive
fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars}));
this.logger.debug("Persisted blobSidecars to hot DB", {
diff --git a/packages/beacon-node/src/chain/bls/index.ts b/packages/beacon-node/src/chain/bls/index.ts
index 3ee72ac66cbd..f9898b13776b 100644
--- a/packages/beacon-node/src/chain/bls/index.ts
+++ b/packages/beacon-node/src/chain/bls/index.ts
@@ -1,4 +1,4 @@
export type {IBlsVerifier} from "./interface.js";
-export type {BlsMultiThreadWorkerPoolModules} from "./multithread/index.js";
+export type {BlsMultiThreadWorkerPoolModules, JobQueueItemType} from "./multithread/index.js";
export {BlsMultiThreadWorkerPool} from "./multithread/index.js";
export {BlsSingleThreadVerifier} from "./singleThread.js";
diff --git a/packages/beacon-node/src/chain/bls/multithread/index.ts b/packages/beacon-node/src/chain/bls/multithread/index.ts
index 9b0006566253..235ec1536be7 100644
--- a/packages/beacon-node/src/chain/bls/multithread/index.ts
+++ b/packages/beacon-node/src/chain/bls/multithread/index.ts
@@ -41,6 +41,8 @@ export type BlsMultiThreadWorkerPoolOptions = {
blsVerifyAllMultiThread?: boolean;
};
+export type {JobQueueItemType};
+
// 1 worker for the main thread
const blsPoolSize = Math.max(defaultPoolSize - 1, 1);
diff --git a/packages/beacon-node/src/chain/bls/multithread/jobItem.ts b/packages/beacon-node/src/chain/bls/multithread/jobItem.ts
index 4ae05cdab913..8b5c63df2eeb 100644
--- a/packages/beacon-node/src/chain/bls/multithread/jobItem.ts
+++ b/packages/beacon-node/src/chain/bls/multithread/jobItem.ts
@@ -56,7 +56,7 @@ export function jobItemWorkReq(job: JobQueueItem, format: PointFormat, metrics:
opts: job.opts,
sets: job.sets.map((set) => ({
// this can throw, handled in the consumer code
- publicKey: getAggregatedPubkey(set).toBytes(format),
+ publicKey: getAggregatedPubkey(set, metrics).toBytes(format),
signature: set.signature,
message: set.signingRoot,
})),
diff --git a/packages/beacon-node/src/chain/bls/utils.ts b/packages/beacon-node/src/chain/bls/utils.ts
index 0b1010de27f6..4a3a027f31ac 100644
--- a/packages/beacon-node/src/chain/bls/utils.ts
+++ b/packages/beacon-node/src/chain/bls/utils.ts
@@ -1,14 +1,19 @@
import type {PublicKey} from "@chainsafe/bls/types";
import bls from "@chainsafe/bls";
import {ISignatureSet, SignatureSetType} from "@lodestar/state-transition";
+import {Metrics} from "../../metrics/metrics.js";
-export function getAggregatedPubkey(signatureSet: ISignatureSet): PublicKey {
+export function getAggregatedPubkey(signatureSet: ISignatureSet, metrics: Metrics | null = null): PublicKey {
switch (signatureSet.type) {
case SignatureSetType.single:
return signatureSet.pubkey;
- case SignatureSetType.aggregate:
- return bls.PublicKey.aggregate(signatureSet.pubkeys);
+ case SignatureSetType.aggregate: {
+ const timer = metrics?.blsThreadPool.pubkeysAggregationMainThreadDuration.startTimer();
+ const pubkeys = bls.PublicKey.aggregate(signatureSet.pubkeys);
+ timer?.();
+ return pubkeys;
+ }
default:
throw Error("Unknown signature set type");
diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts
index 45cda3d94bc9..ac2f97128c16 100644
--- a/packages/beacon-node/src/chain/chain.ts
+++ b/packages/beacon-node/src/chain/chain.ts
@@ -79,6 +79,7 @@ import {BlockInput} from "./blocks/types.js";
import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js";
import {ShufflingCache} from "./shufflingCache.js";
import {StateContextCache} from "./stateCache/stateContextCache.js";
+import {SeenGossipBlockInput} from "./seenCache/index.js";
import {CheckpointStateCache} from "./stateCache/stateContextCheckpointsCache.js";
/**
@@ -87,7 +88,6 @@ import {CheckpointStateCache} from "./stateCache/stateContextCheckpointsCache.js
* allow some margin if the node overloads.
*/
const DEFAULT_MAX_CACHED_PRODUCED_ROOTS = 4;
-const DEFAULT_MAX_CACHED_BLOB_SIDECARS = 4;
export class BeaconChain implements IBeaconChain {
readonly genesisTime: UintNum64;
@@ -125,6 +125,7 @@ export class BeaconChain implements IBeaconChain {
readonly seenSyncCommitteeMessages = new SeenSyncCommitteeMessages();
readonly seenContributionAndProof: SeenContributionAndProof;
readonly seenAttestationDatas: SeenAttestationDatas;
+ readonly seenGossipBlockInput = new SeenGossipBlockInput();
// Seen cache for liveness checks
readonly seenBlockAttesters = new SeenBlockAttesters();
@@ -136,8 +137,7 @@ export class BeaconChain implements IBeaconChain {
readonly checkpointBalancesCache: CheckpointBalancesCache;
readonly shufflingCache: ShufflingCache;
/** Map keyed by executionPayload.blockHash of the block for those blobs */
- readonly producedBlobSidecarsCache = new Map();
- readonly producedBlindedBlobSidecarsCache = new Map();
+ readonly producedContentsCache = new Map();
// Cache payload from the local execution so that produceBlindedBlock or produceBlockV3 and
// send and get signed/published blinded versions which beacon can assemble into full before
@@ -470,22 +470,32 @@ export class BeaconChain implements IBeaconChain {
return data && {block: data, executionOptimistic: false};
}
- produceBlock(
- blockAttributes: BlockAttributes
- ): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}> {
+ produceBlock(blockAttributes: BlockAttributes): Promise<{
+ block: allForks.BeaconBlock;
+ executionPayloadValue: Wei;
+ consensusBlockValue: Gwei;
+ shouldOverrideBuilder?: boolean;
+ }> {
return this.produceBlockWrapper(BlockType.Full, blockAttributes);
}
- produceBlindedBlock(
- blockAttributes: BlockAttributes
- ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}> {
+ produceBlindedBlock(blockAttributes: BlockAttributes): Promise<{
+ block: allForks.BlindedBeaconBlock;
+ executionPayloadValue: Wei;
+ consensusBlockValue: Gwei;
+ }> {
return this.produceBlockWrapper(BlockType.Blinded, blockAttributes);
}
async produceBlockWrapper(
blockType: T,
{randaoReveal, graffiti, slot, feeRecipient}: BlockAttributes
- ): Promise<{block: AssembledBlockType; executionPayloadValue: Wei; consensusBlockValue: Gwei}> {
+ ): Promise<{
+ block: AssembledBlockType;
+ executionPayloadValue: Wei;
+ consensusBlockValue: Gwei;
+ shouldOverrideBuilder?: boolean;
+ }> {
const head = this.forkChoice.getHead();
const state = await this.regen.getBlockSlotState(
head.blockRoot,
@@ -497,16 +507,21 @@ export class BeaconChain implements IBeaconChain {
const proposerIndex = state.epochCtx.getBeaconProposer(slot);
const proposerPubKey = state.epochCtx.index2pubkey[proposerIndex].toBytes();
- const {body, blobs, executionPayloadValue} = await produceBlockBody.call(this, blockType, state, {
- randaoReveal,
- graffiti,
- slot,
- feeRecipient,
- parentSlot: slot - 1,
- parentBlockRoot,
- proposerIndex,
- proposerPubKey,
- });
+ const {body, blobs, executionPayloadValue, shouldOverrideBuilder} = await produceBlockBody.call(
+ this,
+ blockType,
+ state,
+ {
+ randaoReveal,
+ graffiti,
+ slot,
+ feeRecipient,
+ parentSlot: slot - 1,
+ parentBlockRoot,
+ proposerIndex,
+ proposerPubKey,
+ }
+ );
// The hashtree root computed here for debug log will get cached and hence won't introduce additional delays
const bodyRoot =
@@ -552,35 +567,12 @@ export class BeaconChain implements IBeaconChain {
// publishing the blinded block's full version
if (blobs.type === BlobsResultType.produced) {
// body is of full type here
- const blockHash = blobs.blockHash;
- const blobSidecars = blobs.blobSidecars.map((blobSidecar) => ({
- ...blobSidecar,
- blockRoot,
- slot,
- blockParentRoot: parentBlockRoot,
- proposerIndex,
- }));
-
- this.producedBlobSidecarsCache.set(blockHash, blobSidecars);
- this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size);
- } else if (blobs.type === BlobsResultType.blinded) {
- // body is of blinded type here
- const blockHash = blobs.blockHash;
- const blindedBlobSidecars = blobs.blobSidecars.map((blindedBlobSidecar) => ({
- ...blindedBlobSidecar,
- blockRoot,
- slot,
- blockParentRoot: parentBlockRoot,
- proposerIndex,
- }));
-
- this.producedBlindedBlobSidecarsCache.set(blockHash, blindedBlobSidecars);
- this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set(
- this.producedBlindedBlobSidecarsCache.size
- );
+ const {blockHash, contents} = blobs;
+ this.producedContentsCache.set(blockHash, contents);
+ this.metrics?.blockProductionCaches.producedContentsCache.set(this.producedContentsCache.size);
}
- return {block, executionPayloadValue, consensusBlockValue: proposerReward};
+ return {block, executionPayloadValue, consensusBlockValue: proposerReward, shouldOverrideBuilder};
}
/**
@@ -593,14 +585,14 @@ export class BeaconChain implements IBeaconChain {
* kzg_aggregated_proof=compute_proof_from_blobs(blobs),
* )
*/
- getBlobSidecars(beaconBlock: deneb.BeaconBlock): deneb.BlobSidecars {
+ getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents {
const blockHash = toHex(beaconBlock.body.executionPayload.blockHash);
- const blobSidecars = this.producedBlobSidecarsCache.get(blockHash);
- if (!blobSidecars) {
- throw Error(`No blobSidecars for executionPayload.blockHash ${blockHash}`);
+ const contents = this.producedContentsCache.get(blockHash);
+ if (!contents) {
+ throw Error(`No contents for executionPayload.blockHash ${blockHash}`);
}
- return blobSidecars;
+ return contents;
}
async processBlock(block: BlockInput, opts?: ImportBlockOpts): Promise {
@@ -882,19 +874,8 @@ export class BeaconChain implements IBeaconChain {
this.metrics?.blockProductionCaches.producedBlindedBlockRoot.set(this.producedBlindedBlockRoot.size);
if (this.config.getForkSeq(slot) >= ForkSeq.deneb) {
- pruneSetToMax(
- this.producedBlobSidecarsCache,
- this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS
- );
- this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size);
-
- pruneSetToMax(
- this.producedBlindedBlobSidecarsCache,
- this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS
- );
- this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set(
- this.producedBlindedBlobSidecarsCache.size
- );
+ pruneSetToMax(this.producedContentsCache, this.opts.maxCachedProducedRoots ?? DEFAULT_MAX_CACHED_PRODUCED_ROOTS);
+ this.metrics?.blockProductionCaches.producedContentsCache.set(this.producedContentsCache.size);
}
const metrics = this.metrics;
@@ -938,15 +919,20 @@ export class BeaconChain implements IBeaconChain {
this.logger.verbose("Fork choice justified", {epoch: cp.epoch, root: cp.rootHex});
}
- private onForkChoiceFinalized(this: BeaconChain, cp: CheckpointWithHex): void {
+ private async onForkChoiceFinalized(this: BeaconChain, cp: CheckpointWithHex): Promise {
this.logger.verbose("Fork choice finalized", {epoch: cp.epoch, root: cp.rootHex});
this.seenBlockProposers.prune(computeStartSlotAtEpoch(cp.epoch));
// TODO: Improve using regen here
- const headState = this.regen.getStateSync(this.forkChoice.getHead().stateRoot);
- const finalizedState = this.regen.getCheckpointStateSync(cp);
+ const {blockRoot, stateRoot, slot} = this.forkChoice.getHead();
+ const headState = this.regen.getStateSync(stateRoot);
+ const headBlock = await this.db.block.get(fromHexString(blockRoot));
+ if (headBlock == null) {
+ throw Error(`Head block ${slot} ${headBlock} is not available in database`);
+ }
+
if (headState) {
- this.opPool.pruneAll(headState, finalizedState);
+ this.opPool.pruneAll(headBlock, headState);
}
}
diff --git a/packages/beacon-node/src/chain/errors/blobSidecarError.ts b/packages/beacon-node/src/chain/errors/blobSidecarError.ts
index e242cbcb11ba..f38aa883002c 100644
--- a/packages/beacon-node/src/chain/errors/blobSidecarError.ts
+++ b/packages/beacon-node/src/chain/errors/blobSidecarError.ts
@@ -21,6 +21,7 @@ export enum BlobSidecarErrorCode {
PARENT_UNKNOWN = "BLOB_SIDECAR_ERROR_PARENT_UNKNOWN",
NOT_LATER_THAN_PARENT = "BLOB_SIDECAR_ERROR_NOT_LATER_THAN_PARENT",
PROPOSAL_SIGNATURE_INVALID = "BLOB_SIDECAR_ERROR_PROPOSAL_SIGNATURE_INVALID",
+ INCLUSION_PROOF_INVALID = "BLOB_SIDECAR_ERROR_INCLUSION_PROOF_INVALID",
INCORRECT_PROPOSER = "BLOB_SIDECAR_ERROR_INCORRECT_PROPOSER",
}
@@ -37,6 +38,7 @@ export type BlobSidecarErrorType =
| {code: BlobSidecarErrorCode.PARENT_UNKNOWN; parentRoot: RootHex}
| {code: BlobSidecarErrorCode.NOT_LATER_THAN_PARENT; parentSlot: Slot; slot: Slot}
| {code: BlobSidecarErrorCode.PROPOSAL_SIGNATURE_INVALID}
+ | {code: BlobSidecarErrorCode.INCLUSION_PROOF_INVALID; slot: Slot; blobIdx: number}
| {code: BlobSidecarErrorCode.INCORRECT_PROPOSER; proposerIndex: ValidatorIndex};
export class BlobSidecarGossipError extends GossipActionError {}
diff --git a/packages/beacon-node/src/chain/errors/blockError.ts b/packages/beacon-node/src/chain/errors/blockError.ts
index ee06927a4fc1..6ab15275934e 100644
--- a/packages/beacon-node/src/chain/errors/blockError.ts
+++ b/packages/beacon-node/src/chain/errors/blockError.ts
@@ -63,6 +63,8 @@ export enum BlockErrorCode {
/** The attestation head block is too far behind the attestation slot, causing many skip slots.
This is deemed a DoS risk */
TOO_MANY_SKIPPED_SLOTS = "TOO_MANY_SKIPPED_SLOTS",
+ /** The blobs are unavailable */
+ DATA_UNAVAILABLE = "BLOCK_ERROR_DATA_UNAVAILABLE",
}
type ExecutionErrorStatus = Exclude<
@@ -103,7 +105,8 @@ export type BlockErrorType =
| {code: BlockErrorCode.TOO_MUCH_GAS_USED; gasUsed: number; gasLimit: number}
| {code: BlockErrorCode.SAME_PARENT_HASH; blockHash: RootHex}
| {code: BlockErrorCode.TRANSACTIONS_TOO_BIG; size: number; max: number}
- | {code: BlockErrorCode.EXECUTION_ENGINE_ERROR; execStatus: ExecutionErrorStatus; errorMessage: string};
+ | {code: BlockErrorCode.EXECUTION_ENGINE_ERROR; execStatus: ExecutionErrorStatus; errorMessage: string}
+ | {code: BlockErrorCode.DATA_UNAVAILABLE};
export class BlockGossipError extends GossipActionError {}
diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts
index 62355e334f61..a2f7fba34093 100644
--- a/packages/beacon-node/src/chain/interface.ts
+++ b/packages/beacon-node/src/chain/interface.ts
@@ -49,6 +49,7 @@ import {CheckpointBalancesCache} from "./balancesCache.js";
import {IChainOptions} from "./options.js";
import {AssembledBlockType, BlockAttributes, BlockType} from "./produceBlock/produceBlockBody.js";
import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js";
+import {SeenGossipBlockInput} from "./seenCache/index.js";
import {ShufflingCache} from "./shufflingCache.js";
export {BlockType, type AssembledBlockType};
@@ -102,14 +103,14 @@ export interface IBeaconChain {
readonly seenSyncCommitteeMessages: SeenSyncCommitteeMessages;
readonly seenContributionAndProof: SeenContributionAndProof;
readonly seenAttestationDatas: SeenAttestationDatas;
+ readonly seenGossipBlockInput: SeenGossipBlockInput;
// Seen cache for liveness checks
readonly seenBlockAttesters: SeenBlockAttesters;
readonly beaconProposerCache: BeaconProposerCache;
readonly checkpointBalancesCache: CheckpointBalancesCache;
- readonly producedBlobSidecarsCache: Map;
+ readonly producedContentsCache: Map;
readonly producedBlockRoot: Map;
- readonly producedBlindedBlobSidecarsCache: Map;
readonly shufflingCache: ShufflingCache;
readonly producedBlindedBlockRoot: Set;
readonly opts: IChainOptions;
@@ -151,14 +152,19 @@ export interface IBeaconChain {
*/
getBlockByRoot(root: RootHex): Promise<{block: allForks.SignedBeaconBlock; executionOptimistic: boolean} | null>;
- getBlobSidecars(beaconBlock: deneb.BeaconBlock): deneb.BlobSidecars;
-
- produceBlock(
- blockAttributes: BlockAttributes
- ): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}>;
- produceBlindedBlock(
- blockAttributes: BlockAttributes
- ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}>;
+ getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents;
+
+ produceBlock(blockAttributes: BlockAttributes): Promise<{
+ block: allForks.BeaconBlock;
+ executionPayloadValue: Wei;
+ consensusBlockValue: Gwei;
+ shouldOverrideBuilder?: boolean;
+ }>;
+ produceBlindedBlock(blockAttributes: BlockAttributes): Promise<{
+ block: allForks.BlindedBeaconBlock;
+ executionPayloadValue: Wei;
+ consensusBlockValue: Gwei;
+ }>;
/** Process a block until complete */
processBlock(block: BlockInput, opts?: ImportBlockOpts): Promise;
diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts
index cee8d0614c30..1fdee886ff1d 100644
--- a/packages/beacon-node/src/chain/opPools/opPool.ts
+++ b/packages/beacon-node/src/chain/opPools/opPool.ts
@@ -13,12 +13,14 @@ import {
MAX_BLS_TO_EXECUTION_CHANGES,
BLS_WITHDRAWAL_PREFIX,
MAX_ATTESTER_SLASHINGS,
+ ForkSeq,
} from "@lodestar/params";
-import {Epoch, phase0, capella, ssz, ValidatorIndex} from "@lodestar/types";
+import {Epoch, phase0, capella, ssz, ValidatorIndex, allForks} from "@lodestar/types";
import {IBeaconDb} from "../../db/index.js";
import {SignedBLSToExecutionChangeVersioned} from "../../util/types.js";
import {BlockType} from "../interface.js";
import {Metrics} from "../../metrics/metrics.js";
+import {BlockProductionStep} from "../produceBlock/produceBlockBody.js";
import {isValidBlsToExecutionChangeForBlockInclusion} from "./utils.js";
type HexRoot = string;
@@ -178,7 +180,7 @@ export class OpPool {
] {
const {config} = state;
const stateEpoch = computeEpochAtSlot(state.slot);
- const stateFork = config.getForkName(state.slot);
+ const stateFork = config.getForkSeq(state.slot);
const toBeSlashedIndices = new Set();
const proposerSlashings: phase0.ProposerSlashing[] = [];
@@ -201,7 +203,7 @@ export class OpPool {
}
}
endProposerSlashing?.({
- step: "proposerSlashing",
+ step: BlockProductionStep.proposerSlashing,
});
const endAttesterSlashings = stepsMetrics?.startTimer();
@@ -235,7 +237,7 @@ export class OpPool {
}
}
endAttesterSlashings?.({
- step: "attesterSlashings",
+ step: BlockProductionStep.attesterSlashings,
});
const endVoluntaryExits = stepsMetrics?.startTimer();
@@ -247,7 +249,10 @@ export class OpPool {
// Signature validation is skipped in `isValidVoluntaryExit(,,false)` since it was already validated in gossip
// However we must make sure that the signature fork is the same, or it will become invalid if included through
// a future fork.
- stateFork === config.getForkName(computeStartSlotAtEpoch(voluntaryExit.message.epoch))
+ isVoluntaryExitSignatureIncludable(
+ stateFork,
+ config.getForkSeq(computeStartSlotAtEpoch(voluntaryExit.message.epoch))
+ )
) {
voluntaryExits.push(voluntaryExit);
if (voluntaryExits.length >= MAX_VOLUNTARY_EXITS) {
@@ -256,7 +261,7 @@ export class OpPool {
}
}
endVoluntaryExits?.({
- step: "voluntaryExits",
+ step: BlockProductionStep.voluntaryExits,
});
const endBlsToExecutionChanges = stepsMetrics?.startTimer();
@@ -270,7 +275,7 @@ export class OpPool {
}
}
endBlsToExecutionChanges?.({
- step: "blsToExecutionChanges",
+ step: BlockProductionStep.blsToExecutionChanges,
});
return [attesterSlashings, proposerSlashings, voluntaryExits, blsToExecutionChanges];
@@ -299,11 +304,11 @@ export class OpPool {
/**
* Prune all types of transactions given the latest head state
*/
- pruneAll(headState: CachedBeaconStateAllForks, finalizedState: CachedBeaconStateAllForks | null): void {
+ pruneAll(headBlock: allForks.SignedBeaconBlock, headState: CachedBeaconStateAllForks): void {
this.pruneAttesterSlashings(headState);
this.pruneProposerSlashings(headState);
this.pruneVoluntaryExits(headState);
- this.pruneBlsToExecutionChanges(headState, finalizedState);
+ this.pruneBlsToExecutionChanges(headBlock, headState);
}
/**
@@ -368,19 +373,28 @@ export class OpPool {
}
/**
- * Call after finalizing
- * Prune blsToExecutionChanges for validators which have been set with withdrawal
- * credentials
+ * Prune BLS to execution changes that have been applied to the state more than 1 block ago.
+ * In the worse case where head block is reorged, the same BlsToExecutionChange message can be re-added
+ * to opPool once gossipsub seen cache TTL passes.
*/
private pruneBlsToExecutionChanges(
- headState: CachedBeaconStateAllForks,
- finalizedState: CachedBeaconStateAllForks | null
+ headBlock: allForks.SignedBeaconBlock,
+ headState: CachedBeaconStateAllForks
): void {
+ const {config} = headState;
+ const recentBlsToExecutionChanges =
+ config.getForkSeq(headBlock.message.slot) >= ForkSeq.capella
+ ? (headBlock as capella.SignedBeaconBlock).message.body.blsToExecutionChanges
+ : [];
+
+ const recentBlsToExecutionChangeIndexes = new Set(
+ recentBlsToExecutionChanges.map((blsToExecutionChange) => blsToExecutionChange.message.validatorIndex)
+ );
+
for (const [key, blsToExecutionChange] of this.blsToExecutionChanges.entries()) {
- // TODO CAPELLA: We need the finalizedState to safely prune BlsToExecutionChanges. Finalized state may not be
- // available in the cache, so it can be null. Once there's a head only prunning strategy, change
- if (finalizedState !== null) {
- const validator = finalizedState.validators.getReadonly(blsToExecutionChange.data.message.validatorIndex);
+ const {validatorIndex} = blsToExecutionChange.data.message;
+ if (!recentBlsToExecutionChangeIndexes.has(validatorIndex)) {
+ const validator = headState.validators.getReadonly(validatorIndex);
if (validator.withdrawalCredentials[0] !== BLS_WITHDRAWAL_PREFIX) {
this.blsToExecutionChanges.delete(key);
}
@@ -389,6 +403,19 @@ export class OpPool {
}
}
+/**
+ * Returns true if a pre-validated signature is still valid to be included in a specific block's fork
+ */
+function isVoluntaryExitSignatureIncludable(stateFork: ForkSeq, voluntaryExitFork: ForkSeq): boolean {
+ if (stateFork >= ForkSeq.deneb) {
+ // Exists are perpetually valid https://eips.ethereum.org/EIPS/eip-7044
+ return true;
+ } else {
+ // Can only include exits from the current and previous fork
+ return voluntaryExitFork === stateFork || voluntaryExitFork === stateFork - 1;
+ }
+}
+
function isSlashableAtEpoch(validator: phase0.Validator, epoch: Epoch): boolean {
return !validator.slashed && validator.activationEpoch <= epoch && epoch < validator.withdrawableEpoch;
}
diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts
index ce8e720cd766..60658b69ca98 100644
--- a/packages/beacon-node/src/chain/prepareNextSlot.ts
+++ b/packages/beacon-node/src/chain/prepareNextSlot.ts
@@ -1,4 +1,9 @@
-import {computeEpochAtSlot, isExecutionStateType, computeTimeAtSlot} from "@lodestar/state-transition";
+import {
+ computeEpochAtSlot,
+ isExecutionStateType,
+ computeTimeAtSlot,
+ StateHashTreeRootSource,
+} from "@lodestar/state-transition";
import {ChainForkConfig} from "@lodestar/config";
import {ForkSeq, SLOTS_PER_EPOCH, ForkExecution} from "@lodestar/params";
import {Slot} from "@lodestar/types";
@@ -92,6 +97,9 @@ export class PrepareNextSlotScheduler {
headRoot,
isEpochTransition,
});
+ const precomputeEpochTransitionTimer = isEpochTransition
+ ? this.metrics?.precomputeNextEpochTransition.duration.startTimer()
+ : null;
// No need to wait for this or the clock drift
// Pre Bellatrix: we only do precompute state transition for the last slot of epoch
// For Bellatrix, we always do the `processSlots()` to prepare payload for the next slot
@@ -106,7 +114,9 @@ export class PrepareNextSlotScheduler {
// cache HashObjects for faster hashTreeRoot() later, especially for computeNewStateRoot() if we need to produce a block at slot 0 of epoch
// see https://github.com/ChainSafe/lodestar/issues/6194
- const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({source: "prepare_next_slot"});
+ const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({
+ source: StateHashTreeRootSource.prepareNextSlot,
+ });
prepareState.hashTreeRoot();
hashTreeRootTimer?.();
@@ -126,6 +136,8 @@ export class PrepareNextSlotScheduler {
prepareSlot,
previousHits,
});
+
+ precomputeEpochTransitionTimer?.();
}
if (isExecutionStateType(prepareState)) {
diff --git a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts
index f5d02dbf9b6f..ccc0595d0db6 100644
--- a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts
+++ b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts
@@ -2,6 +2,7 @@ import {
CachedBeaconStateAllForks,
DataAvailableStatus,
ExecutionPayloadStatus,
+ StateHashTreeRootSource,
stateTransition,
} from "@lodestar/state-transition";
import {allForks, Gwei, Root} from "@lodestar/types";
@@ -44,7 +45,9 @@ export function computeNewStateRoot(
const {attestations, syncAggregate, slashing} = postState.proposerRewards;
const proposerReward = BigInt(attestations + syncAggregate + slashing);
- const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({source: "compute_new_state_root"});
+ const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({
+ source: StateHashTreeRootSource.computeNewStateRoot,
+ });
const newStateRoot = postState.hashTreeRoot();
hashTreeRootTimer?.();
diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts
index 1c522c54a93d..0b6ff7b1316b 100644
--- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts
+++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts
@@ -35,17 +35,30 @@ import {PayloadId, IExecutionEngine, IExecutionBuilder, PayloadAttributes} from
import {ZERO_HASH, ZERO_HASH_HEX} from "../../constants/index.js";
import {IEth1ForBlockProduction} from "../../eth1/index.js";
import {numToQuantity} from "../../eth1/provider/utils.js";
-import {
- validateBlobsAndKzgCommitments,
- validateBlindedBlobsAndKzgCommitments,
-} from "./validateBlobsAndKzgCommitments.js";
+import {validateBlobsAndKzgCommitments} from "./validateBlobsAndKzgCommitments.js";
// Time to provide the EL to generate a payload from new payload id
const PAYLOAD_GENERATION_TIME_MS = 500;
-enum PayloadPreparationType {
+
+export enum PayloadPreparationType {
Fresh = "Fresh",
Cached = "Cached",
Reorged = "Reorged",
+ Blinded = "Blinded",
+}
+
+/**
+ * Block production steps tracked in metrics
+ */
+export enum BlockProductionStep {
+ proposerSlashing = "proposerSlashing",
+ attesterSlashings = "attesterSlashings",
+ voluntaryExits = "voluntaryExits",
+ blsToExecutionChanges = "blsToExecutionChanges",
+ attestations = "attestations",
+ eth1DataAndDeposits = "eth1DataAndDeposits",
+ syncAggregate = "syncAggregate",
+ executionPayload = "executionPayload",
}
export type BlockAttributes = {
@@ -74,8 +87,8 @@ export enum BlobsResultType {
export type BlobsResult =
| {type: BlobsResultType.preDeneb}
- | {type: BlobsResultType.produced; blobSidecars: deneb.BlobSidecars; blockHash: RootHex}
- | {type: BlobsResultType.blinded; blobSidecars: deneb.BlindedBlobSidecars; blockHash: RootHex};
+ | {type: BlobsResultType.produced; contents: deneb.Contents; blockHash: RootHex}
+ | {type: BlobsResultType.blinded};
export async function produceBlockBody(
this: BeaconChain,
@@ -96,12 +109,20 @@ export async function produceBlockBody(
proposerIndex: ValidatorIndex;
proposerPubKey: BLSPubkey;
}
-): Promise<{body: AssembledBodyType; blobs: BlobsResult; executionPayloadValue: Wei}> {
+): Promise<{
+ body: AssembledBodyType;
+ blobs: BlobsResult;
+ executionPayloadValue: Wei;
+ shouldOverrideBuilder?: boolean;
+}> {
// Type-safe for blobs variable. Translate 'null' value into 'preDeneb' enum
// TODO: Not ideal, but better than just using null.
// TODO: Does not guarantee that preDeneb enum goes with a preDeneb block
let blobsResult: BlobsResult;
let executionPayloadValue: Wei;
+ // even though shouldOverrideBuilder is relevant for the engine response, for simplicity of typing
+ // we just return it undefined for the builder which anyway doesn't get consumed downstream
+ let shouldOverrideBuilder: boolean | undefined;
const fork = currentState.config.getForkName(blockSlot);
const logMeta: Record = {
@@ -134,13 +155,13 @@ export async function produceBlockBody(
const endAttestations = stepsMetrics?.startTimer();
const attestations = this.aggregatedAttestationPool.getAttestationsForBlock(this.forkChoice, currentState);
endAttestations?.({
- step: "attestations",
+ step: BlockProductionStep.attestations,
});
const endEth1DataAndDeposits = stepsMetrics?.startTimer();
const {eth1Data, deposits} = await this.eth1.getEth1DataAndDeposits(currentState);
endEth1DataAndDeposits?.({
- step: "eth1DataAndDeposits",
+ step: BlockProductionStep.eth1DataAndDeposits,
});
const blockBody: phase0.BeaconBlockBody = {
@@ -165,7 +186,7 @@ export async function produceBlockBody(
(blockBody as altair.BeaconBlockBody).syncAggregate = syncAggregate;
}
endSyncAggregate?.({
- step: "syncAggregate",
+ step: BlockProductionStep.syncAggregate,
});
Object.assign(logMeta, {
@@ -221,7 +242,7 @@ export async function produceBlockBody(
executionPayloadValue = builderRes.executionPayloadValue;
const fetchedTime = Date.now() / 1000 - computeTimeAtSlot(this.config, blockSlot, this.genesisTime);
- const prepType = "blinded";
+ const prepType = PayloadPreparationType.Blinded;
this.metrics?.blockPayload.payloadFetchedTime.observe({prepType}, fetchedTime);
this.logger.verbose("Fetched execution payload header from builder", {
slot: blockSlot,
@@ -231,35 +252,14 @@ export async function produceBlockBody(
});
if (ForkSeq[fork] >= ForkSeq.deneb) {
- const {blindedBlobsBundle} = builderRes;
- if (blindedBlobsBundle === undefined) {
- throw Error(`Invalid builder getHeader response for fork=${fork}, missing blindedBlobsBundle`);
+ const {blobKzgCommitments} = builderRes;
+ if (blobKzgCommitments === undefined) {
+ throw Error(`Invalid builder getHeader response for fork=${fork}, missing blobKzgCommitments`);
}
- // validate blindedBlobsBundle
- if (this.opts.sanityCheckExecutionEngineBlobs) {
- validateBlindedBlobsAndKzgCommitments(builderRes.header, blindedBlobsBundle);
- }
-
- (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blindedBlobsBundle.commitments;
- const blockHash = toHex(builderRes.header.blockHash);
-
- const blobSidecars = Array.from({length: blindedBlobsBundle.blobRoots.length}, (_v, index) => {
- const blobRoot = blindedBlobsBundle.blobRoots[index];
- const commitment = blindedBlobsBundle.commitments[index];
- const proof = blindedBlobsBundle.proofs[index];
- const blindedBlobSidecar = {
- index,
- blobRoot,
- kzgProof: proof,
- kzgCommitment: commitment,
- };
- // Other fields will be injected after postState is calculated
- return blindedBlobSidecar;
- }) as deneb.BlindedBlobSidecars;
- blobsResult = {type: BlobsResultType.blinded, blobSidecars, blockHash};
-
- Object.assign(logMeta, {blobs: blindedBlobsBundle.commitments.length});
+ (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blobKzgCommitments;
+ blobsResult = {type: BlobsResultType.blinded};
+ Object.assign(logMeta, {blobs: blobKzgCommitments.length});
} else {
blobsResult = {type: BlobsResultType.preDeneb};
}
@@ -303,9 +303,11 @@ export async function produceBlockBody(
const engineRes = await this.executionEngine.getPayload(fork, payloadId);
const {executionPayload, blobsBundle} = engineRes;
+ shouldOverrideBuilder = engineRes.shouldOverrideBuilder;
+
(blockBody as allForks.ExecutionBlockBody).executionPayload = executionPayload;
executionPayloadValue = engineRes.executionPayloadValue;
- Object.assign(logMeta, {transactions: executionPayload.transactions.length});
+ Object.assign(logMeta, {transactions: executionPayload.transactions.length, shouldOverrideBuilder});
const fetchedTime = Date.now() / 1000 - computeTimeAtSlot(this.config, blockSlot, this.genesisTime);
this.metrics?.blockPayload.payloadFetchedTime.observe({prepType}, fetchedTime);
@@ -332,23 +334,10 @@ export async function produceBlockBody(
(blockBody as deneb.BeaconBlockBody).blobKzgCommitments = blobsBundle.commitments;
const blockHash = toHex(executionPayload.blockHash);
+ const contents = {kzgProofs: blobsBundle.proofs, blobs: blobsBundle.blobs};
+ blobsResult = {type: BlobsResultType.produced, contents, blockHash};
- const blobSidecars = Array.from({length: blobsBundle.blobs.length}, (_v, index) => {
- const blob = blobsBundle.blobs[index];
- const commitment = blobsBundle.commitments[index];
- const proof = blobsBundle.proofs[index];
- const blobSidecar = {
- index,
- blob,
- kzgProof: proof,
- kzgCommitment: commitment,
- };
- // Other fields will be injected after postState is calculated
- return blobSidecar;
- }) as deneb.BlobSidecars;
- blobsResult = {type: BlobsResultType.produced, blobSidecars, blockHash};
-
- Object.assign(logMeta, {blobs: blobSidecars.length});
+ Object.assign(logMeta, {blobs: blobsBundle.commitments.length});
} else {
blobsResult = {type: BlobsResultType.preDeneb};
}
@@ -380,7 +369,7 @@ export async function produceBlockBody(
executionPayloadValue = BigInt(0);
}
endExecutionPayload?.({
- step: "executionPayload",
+ step: BlockProductionStep.executionPayload,
});
if (ForkSeq[fork] >= ForkSeq.capella) {
@@ -401,7 +390,7 @@ export async function produceBlockBody(
Object.assign(logMeta, {executionPayloadValue});
this.logger.verbose("Produced beacon block body", logMeta);
- return {body: blockBody as AssembledBodyType, blobs: blobsResult, executionPayloadValue};
+ return {body: blockBody as AssembledBodyType, blobs: blobsResult, executionPayloadValue, shouldOverrideBuilder};
}
/**
@@ -502,7 +491,7 @@ async function prepareExecutionPayloadHeader(
): Promise<{
header: allForks.ExecutionPayloadHeader;
executionPayloadValue: Wei;
- blindedBlobsBundle?: deneb.BlindedBlobsBundle;
+ blobKzgCommitments?: deneb.BlobKzgCommitments;
}> {
if (!chain.executionBuilder) {
throw Error("executionBuilder required");
diff --git a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts
index 0d00d0c8bd72..54e90672d189 100644
--- a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts
+++ b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts
@@ -1,4 +1,4 @@
-import {allForks, deneb} from "@lodestar/types";
+import {allForks} from "@lodestar/types";
import {BlobsBundle} from "../../execution/index.js";
/**
@@ -13,15 +13,3 @@ export function validateBlobsAndKzgCommitments(payload: allForks.ExecutionPayloa
);
}
}
-
-export function validateBlindedBlobsAndKzgCommitments(
- payload: allForks.ExecutionPayloadHeader,
- blindedBlobsBundle: deneb.BlindedBlobsBundle
-): void {
- // sanity-check that the KZG commitments match the blobs (as produced by the execution engine)
- if (blindedBlobsBundle.blobRoots.length !== blindedBlobsBundle.commitments.length) {
- throw Error(
- `BlindedBlobs bundle blobs len ${blindedBlobsBundle.blobRoots.length} != commitments len ${blindedBlobsBundle.commitments.length}`
- );
- }
-}
diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts
index 5305502c8c05..dfda56cc1eea 100644
--- a/packages/beacon-node/src/chain/regen/queued.ts
+++ b/packages/beacon-node/src/chain/regen/queued.ts
@@ -221,7 +221,7 @@ export class QueuedStateRegenerator implements IStateRegenerator {
private jobQueueProcessor = async (regenRequest: RegenRequest): Promise => {
const metricsLabels = {
caller: regenRequest.args[regenRequest.args.length - 1] as RegenCaller,
- entrypoint: regenRequest.key,
+ entrypoint: regenRequest.key as RegenFnName,
};
let timer;
try {
diff --git a/packages/beacon-node/src/chain/reprocess.ts b/packages/beacon-node/src/chain/reprocess.ts
index 3ab6056fb3af..4c91ef07ff69 100644
--- a/packages/beacon-node/src/chain/reprocess.ts
+++ b/packages/beacon-node/src/chain/reprocess.ts
@@ -11,7 +11,7 @@ export const REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC = 2;
/**
* Reprocess status for metrics
*/
-enum ReprocessStatus {
+export enum ReprocessStatus {
/**
* There are too many attestations that have unknown block root.
*/
@@ -140,7 +140,10 @@ export class ReprocessController {
for (const awaitingPromise of awaitingPromisesByRoot.values()) {
const {resolve, addedTimeMs} = awaitingPromise;
resolve(false);
- this.metrics?.reprocessApiAttestations.waitSecBeforeReject.set((now - addedTimeMs) / 1000);
+ this.metrics?.reprocessApiAttestations.waitSecBeforeReject.set(
+ {reason: ReprocessStatus.expired},
+ (now - addedTimeMs) / 1000
+ );
this.metrics?.reprocessApiAttestations.reject.inc({reason: ReprocessStatus.expired});
}
diff --git a/packages/beacon-node/src/chain/seenCache/index.ts b/packages/beacon-node/src/chain/seenCache/index.ts
index f354a37f93ee..250e6581c312 100644
--- a/packages/beacon-node/src/chain/seenCache/index.ts
+++ b/packages/beacon-node/src/chain/seenCache/index.ts
@@ -2,3 +2,4 @@ export {SeenAggregators, SeenAttesters} from "./seenAttesters.js";
export {SeenBlockProposers} from "./seenBlockProposers.js";
export {SeenSyncCommitteeMessages} from "./seenCommittee.js";
export {SeenContributionAndProof} from "./seenCommitteeContribution.js";
+export {SeenGossipBlockInput} from "./seenGossipBlockInput.js";
diff --git a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts
index ded54a5b4a54..a19476497e9f 100644
--- a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts
+++ b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts
@@ -17,7 +17,7 @@ export type AttestationDataCacheEntry = {
subnet: number;
};
-enum RejectReason {
+export enum RejectReason {
// attestation data reaches MAX_CACHE_SIZE_PER_SLOT
reached_limit = "reached_limit",
// attestation data is too old
diff --git a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts
new file mode 100644
index 000000000000..8b767975c112
--- /dev/null
+++ b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts
@@ -0,0 +1,170 @@
+import {toHexString} from "@chainsafe/ssz";
+import {deneb, RootHex, ssz, allForks} from "@lodestar/types";
+import {ChainForkConfig} from "@lodestar/config";
+import {pruneSetToMax} from "@lodestar/utils";
+import {BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params";
+
+import {
+ BlockInput,
+ getBlockInput,
+ BlockSource,
+ BlockInputBlobs,
+ BlobsCache,
+ GossipedInputType,
+} from "../blocks/types.js";
+
+type GossipedBlockInput =
+ | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null}
+ | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null};
+
+type BlockInputCacheType = {
+ block?: allForks.SignedBeaconBlock;
+ blockBytes?: Uint8Array | null;
+ blobsCache: BlobsCache;
+ // promise and its callback cached for delayed resolution
+ availabilityPromise: Promise;
+ resolveAvailability: (blobs: BlockInputBlobs) => void;
+};
+
+const MAX_GOSSIPINPUT_CACHE = 5;
+
+/**
+ * SeenGossipBlockInput tracks and caches the live blobs and blocks on the network to solve data availability
+ * for the blockInput. If no block has been seen yet for some already seen blobs, it responds will null, but
+ * on the first block or the consequent blobs it responds with blobs promise till all blobs become available.
+ *
+ * One can start processing block on blobs promise blockInput response and can await on the promise before
+ * fully importing the block. The blobs promise is gets resolved as soon as all blobs corresponding to that
+ * block are seen by SeenGossipBlockInput
+ */
+export class SeenGossipBlockInput {
+ private blockInputCache = new Map();
+
+ prune(): void {
+ pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE);
+ }
+
+ getGossipBlockInput(
+ config: ChainForkConfig,
+ gossipedInput: GossipedBlockInput
+ ):
+ | {
+ blockInput: BlockInput;
+ blockInputMeta: {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number};
+ }
+ | {blockInput: null; blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}} {
+ let blockHex;
+ let blockCache;
+
+ if (gossipedInput.type === GossipedInputType.block) {
+ const {signedBlock, blockBytes} = gossipedInput;
+
+ blockHex = toHexString(
+ config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message)
+ );
+ blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry();
+
+ blockCache.block = signedBlock;
+ blockCache.blockBytes = blockBytes;
+ } else {
+ const {blobSidecar, blobBytes} = gossipedInput;
+ const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message);
+ blockHex = toHexString(blockRoot);
+ blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry();
+
+ // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions
+ blockCache.blobsCache.set(blobSidecar.index, {
+ blobSidecar,
+ // easily splice out the unsigned message as blob is a fixed length type
+ blobBytes: blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null,
+ });
+ }
+
+ if (!this.blockInputCache.has(blockHex)) {
+ this.blockInputCache.set(blockHex, blockCache);
+ }
+ const {block: signedBlock, blockBytes, blobsCache, availabilityPromise, resolveAvailability} = blockCache;
+
+ if (signedBlock !== undefined) {
+ // block is available, check if all blobs have shown up
+ const {slot, body} = signedBlock.message;
+ const {blobKzgCommitments} = body as deneb.BeaconBlockBody;
+ const blockInfo = `blockHex=${blockHex}, slot=${slot}`;
+
+ if (blobKzgCommitments.length < blobsCache.size) {
+ throw Error(
+ `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}`
+ );
+ }
+
+ if (blobKzgCommitments.length === blobsCache.size) {
+ const allBlobs = getBlockInputBlobs(blobsCache);
+ resolveAvailability(allBlobs);
+ const {blobs, blobsBytes} = allBlobs;
+ return {
+ blockInput: getBlockInput.postDeneb(
+ config,
+ signedBlock,
+ BlockSource.gossip,
+ blobs,
+ blockBytes ?? null,
+ blobsBytes
+ ),
+ blockInputMeta: {pending: null, haveBlobs: blobs.length, expectedBlobs: blobKzgCommitments.length},
+ };
+ } else {
+ return {
+ blockInput: getBlockInput.blobsPromise(
+ config,
+ signedBlock,
+ BlockSource.gossip,
+ blobsCache,
+ blockBytes ?? null,
+ availabilityPromise
+ ),
+ blockInputMeta: {
+ pending: GossipedInputType.blob,
+ haveBlobs: blobsCache.size,
+ expectedBlobs: blobKzgCommitments.length,
+ },
+ };
+ }
+ } else {
+ // will need to wait for the block to showup
+ return {
+ blockInput: null,
+ blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null},
+ };
+ }
+ }
+}
+
+function getEmptyBlockInputCacheEntry(): BlockInputCacheType {
+ // Capture both the promise and its callbacks.
+ // It is not spec'ed but in tests in Firefox and NodeJS the promise constructor is run immediately
+ let resolveAvailability: ((blobs: BlockInputBlobs) => void) | null = null;
+ const availabilityPromise = new Promise((resolveCB) => {
+ resolveAvailability = resolveCB;
+ });
+ if (resolveAvailability === null) {
+ throw Error("Promise Constructor was not executed immediately");
+ }
+ const blobsCache = new Map();
+ return {availabilityPromise, resolveAvailability, blobsCache};
+}
+
+function getBlockInputBlobs(blobsCache: BlobsCache): BlockInputBlobs {
+ const blobs = [];
+ const blobsBytes = [];
+
+ for (let index = 0; index < blobsCache.size; index++) {
+ const blobCache = blobsCache.get(index);
+ if (blobCache === undefined) {
+ throw Error(`Missing blobSidecar at index=${index}`);
+ }
+ const {blobSidecar, blobBytes} = blobCache;
+ blobs.push(blobSidecar);
+ blobsBytes.push(blobBytes);
+ }
+ return {blobs, blobsBytes};
+}
diff --git a/packages/beacon-node/src/chain/shufflingCache.ts b/packages/beacon-node/src/chain/shufflingCache.ts
index c8468f3b6db5..23177142d846 100644
--- a/packages/beacon-node/src/chain/shufflingCache.ts
+++ b/packages/beacon-node/src/chain/shufflingCache.ts
@@ -167,6 +167,23 @@ export class ShufflingCache {
}
}
+ /**
+ * Same to get() function but synchronous.
+ */
+ getSync(shufflingEpoch: Epoch, decisionRootHex: RootHex): EpochShuffling | null {
+ const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).get(decisionRootHex);
+ if (cacheItem === undefined) {
+ return null;
+ }
+
+ if (isShufflingCacheItem(cacheItem)) {
+ return cacheItem.shuffling;
+ }
+
+ // ignore promise
+ return null;
+ }
+
private add(shufflingEpoch: Epoch, decisionBlock: RootHex, cacheItem: CacheItem): void {
this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).set(decisionBlock, cacheItem);
pruneSetToMax(this.itemsByDecisionRootByEpoch, this.maxEpochs);
diff --git a/packages/beacon-node/src/chain/stateCache/datastore/db.ts b/packages/beacon-node/src/chain/stateCache/datastore/db.ts
new file mode 100644
index 000000000000..fef38a7f8dd2
--- /dev/null
+++ b/packages/beacon-node/src/chain/stateCache/datastore/db.ts
@@ -0,0 +1,38 @@
+import {CachedBeaconStateAllForks} from "@lodestar/state-transition";
+import {phase0, ssz} from "@lodestar/types";
+import {IBeaconDb} from "../../../db/interface.js";
+import {CPStateDatastore, DatastoreKey} from "./types.js";
+
+/**
+ * Implementation of CPStateDatastore using db.
+ */
+export class DbCPStateDatastore implements CPStateDatastore {
+ constructor(private readonly db: IBeaconDb) {}
+
+ async write(cpKey: phase0.Checkpoint, state: CachedBeaconStateAllForks): Promise {
+ const serializedCheckpoint = checkpointToDatastoreKey(cpKey);
+ const stateBytes = state.serialize();
+ await this.db.checkpointState.putBinary(serializedCheckpoint, stateBytes);
+ return serializedCheckpoint;
+ }
+
+ async remove(serializedCheckpoint: DatastoreKey): Promise {
+ await this.db.checkpointState.delete(serializedCheckpoint);
+ }
+
+ async read(serializedCheckpoint: DatastoreKey): Promise {
+ return this.db.checkpointState.getBinary(serializedCheckpoint);
+ }
+
+ async readKeys(): Promise {
+ return this.db.checkpointState.keys();
+ }
+}
+
+export function datastoreKeyToCheckpoint(key: DatastoreKey): phase0.Checkpoint {
+ return ssz.phase0.Checkpoint.deserialize(key);
+}
+
+export function checkpointToDatastoreKey(cp: phase0.Checkpoint): DatastoreKey {
+ return ssz.phase0.Checkpoint.serialize(cp);
+}
diff --git a/packages/beacon-node/src/chain/stateCache/datastore/index.ts b/packages/beacon-node/src/chain/stateCache/datastore/index.ts
new file mode 100644
index 000000000000..c37de5292a38
--- /dev/null
+++ b/packages/beacon-node/src/chain/stateCache/datastore/index.ts
@@ -0,0 +1,2 @@
+export * from "./types.js";
+export * from "./db.js";
diff --git a/packages/beacon-node/src/chain/stateCache/datastore/types.ts b/packages/beacon-node/src/chain/stateCache/datastore/types.ts
new file mode 100644
index 000000000000..66ea67f93500
--- /dev/null
+++ b/packages/beacon-node/src/chain/stateCache/datastore/types.ts
@@ -0,0 +1,13 @@
+import {CachedBeaconStateAllForks} from "@lodestar/state-transition";
+import {phase0} from "@lodestar/types";
+
+// With db implementation, persistedKey is serialized data of a checkpoint
+export type DatastoreKey = Uint8Array;
+
+// Make this generic to support testing
+export interface CPStateDatastore {
+ write: (cpKey: phase0.Checkpoint, state: CachedBeaconStateAllForks) => Promise;
+ remove: (key: DatastoreKey) => Promise;
+ read: (key: DatastoreKey) => Promise;
+ readKeys: () => Promise;
+}
diff --git a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts
new file mode 100644
index 000000000000..854983101c04
--- /dev/null
+++ b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts
@@ -0,0 +1,181 @@
+import {toHexString} from "@chainsafe/ssz";
+import {RootHex} from "@lodestar/types";
+import {CachedBeaconStateAllForks} from "@lodestar/state-transition";
+import {routes} from "@lodestar/api";
+import {Metrics} from "../../metrics/index.js";
+import {LinkedList} from "../../util/array.js";
+import {MapTracker} from "./mapMetrics.js";
+import {BlockStateCache} from "./types.js";
+
+export type FIFOBlockStateCacheOpts = {
+ maxBlockStates?: number;
+};
+
+/**
+ * Regen state if there's a reorg distance > 32 slots.
+ */
+export const DEFAULT_MAX_BLOCK_STATES = 32;
+
+/**
+ * New implementation of BlockStateCache that keeps the most recent n states consistently
+ * - Maintain a linked list (FIFO) with special handling for head state, which is always the first item in the list
+ * - Prune per add() instead of per checkpoint so it only keeps n historical states consistently, prune from tail
+ * - No need to prune per finalized checkpoint
+ *
+ * Given this block tree with Block 11 as head:
+ * ```
+ Block 10
+ |
+ +-----+-----+
+ | |
+ Block 11 Block 12
+ ^ |
+ | |
+ head Block 13
+ * ```
+ * The maintained key order would be: 11 -> 13 -> 12 -> 10, and state 10 will be pruned first.
+ */
+export class FIFOBlockStateCache implements BlockStateCache {
+ /**
+ * Max number of states allowed in the cache
+ */
+ readonly maxStates: number;
+
+ private readonly cache: MapTracker;
+ /**
+ * Key order to implement FIFO cache
+ */
+ private readonly keyOrder: LinkedList;
+ private readonly metrics: Metrics["stateCache"] | null | undefined;
+
+ constructor(opts: FIFOBlockStateCacheOpts, {metrics}: {metrics?: Metrics | null}) {
+ this.maxStates = opts.maxBlockStates ?? DEFAULT_MAX_BLOCK_STATES;
+ this.cache = new MapTracker(metrics?.stateCache);
+ if (metrics) {
+ this.metrics = metrics.stateCache;
+ metrics.stateCache.size.addCollect(() => metrics.stateCache.size.set(this.cache.size));
+ }
+ this.keyOrder = new LinkedList();
+ }
+
+ /**
+ * Set a state as head, happens when importing a block and head block is changed.
+ */
+ setHeadState(item: CachedBeaconStateAllForks | null): void {
+ if (item !== null) {
+ this.add(item, true);
+ }
+ }
+
+ /**
+ * Get a state from this cache given a state root hex.
+ */
+ get(rootHex: RootHex): CachedBeaconStateAllForks | null {
+ this.metrics?.lookups.inc();
+ const item = this.cache.get(rootHex);
+ if (!item) {
+ return null;
+ }
+
+ this.metrics?.hits.inc();
+ this.metrics?.stateClonedCount.observe(item.clonedCount);
+
+ return item;
+ }
+
+ /**
+ * Add a state to this cache.
+ * @param isHead if true, move it to the head of the list. Otherwise add to the 2nd position.
+ * In importBlock() steps, normally it'll call add() with isHead = false first. Then call setHeadState() to set the head.
+ */
+ add(item: CachedBeaconStateAllForks, isHead = false): void {
+ const key = toHexString(item.hashTreeRoot());
+ if (this.cache.get(key) != null) {
+ if (!this.keyOrder.has(key)) {
+ throw Error(`State exists but key not found in keyOrder: ${key}`);
+ }
+ if (isHead) {
+ this.keyOrder.moveToHead(key);
+ } else {
+ this.keyOrder.moveToSecond(key);
+ }
+ // same size, no prune
+ return;
+ }
+
+ // new state
+ this.metrics?.adds.inc();
+ this.cache.set(key, item);
+ if (isHead) {
+ this.keyOrder.unshift(key);
+ } else {
+ // insert after head
+ const head = this.keyOrder.first();
+ if (head == null) {
+ // should not happen, however handle just in case
+ this.keyOrder.unshift(key);
+ } else {
+ this.keyOrder.insertAfter(head, key);
+ }
+ }
+ this.prune(key);
+ }
+
+ get size(): number {
+ return this.cache.size;
+ }
+
+ /**
+ * Prune the cache from tail to keep the most recent n states consistently.
+ * The tail of the list is the oldest state, in case regen adds back the same state,
+ * it should stay next to head so that it won't be pruned right away.
+ * The FIFO cache helps with this.
+ */
+ prune(lastAddedKey: string): void {
+ while (this.keyOrder.length > this.maxStates) {
+ const key = this.keyOrder.last();
+ // it does not make sense to prune the last added state
+ // this only happens when max state is 1 in a short period of time
+ if (key === lastAddedKey) {
+ break;
+ }
+ if (!key) {
+ // should not happen
+ throw new Error("No key");
+ }
+ this.keyOrder.pop();
+ this.cache.delete(key);
+ }
+ }
+
+ /**
+ * No need for this implementation
+ * This is only to conform to the old api
+ */
+ deleteAllBeforeEpoch(): void {}
+
+ /**
+ * ONLY FOR DEBUGGING PURPOSES. For lodestar debug API.
+ */
+ clear(): void {
+ this.cache.clear();
+ }
+
+ /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */
+ dumpSummary(): routes.lodestar.StateCacheItem[] {
+ return Array.from(this.cache.entries()).map(([key, state]) => ({
+ slot: state.slot,
+ root: toHexString(state.hashTreeRoot()),
+ reads: this.cache.readCount.get(key) ?? 0,
+ lastRead: this.cache.lastRead.get(key) ?? 0,
+ checkpointState: false,
+ }));
+ }
+
+ /**
+ * For unit test only.
+ */
+ dumpKeyOrder(): string[] {
+ return this.keyOrder.toArray();
+ }
+}
diff --git a/packages/beacon-node/src/chain/stateCache/index.ts b/packages/beacon-node/src/chain/stateCache/index.ts
index 69fb34a77e4c..b16d87c3fa0d 100644
--- a/packages/beacon-node/src/chain/stateCache/index.ts
+++ b/packages/beacon-node/src/chain/stateCache/index.ts
@@ -1,2 +1,3 @@
export * from "./stateContextCache.js";
export * from "./stateContextCheckpointsCache.js";
+export * from "./fifoBlockStateCache.js";
diff --git a/packages/beacon-node/src/chain/stateCache/mapMetrics.ts b/packages/beacon-node/src/chain/stateCache/mapMetrics.ts
index eb52755bfc00..bb33323015d4 100644
--- a/packages/beacon-node/src/chain/stateCache/mapMetrics.ts
+++ b/packages/beacon-node/src/chain/stateCache/mapMetrics.ts
@@ -1,8 +1,8 @@
-import {IAvgMinMax} from "../../metrics/index.js";
+import {AvgMinMax} from "@lodestar/utils";
type MapTrackerMetrics = {
- reads: IAvgMinMax;
- secondsSinceLastRead: IAvgMinMax;
+ reads: AvgMinMax;
+ secondsSinceLastRead: AvgMinMax;
};
export class MapTracker extends Map {
diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts
new file mode 100644
index 000000000000..8ad5c5098118
--- /dev/null
+++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts
@@ -0,0 +1,645 @@
+import {fromHexString, toHexString} from "@chainsafe/ssz";
+import {phase0, Epoch, RootHex} from "@lodestar/types";
+import {CachedBeaconStateAllForks, computeStartSlotAtEpoch, getBlockRootAtSlot} from "@lodestar/state-transition";
+import {Logger, MapDef} from "@lodestar/utils";
+import {routes} from "@lodestar/api";
+import {loadCachedBeaconState} from "@lodestar/state-transition";
+import {Metrics} from "../../metrics/index.js";
+import {IClock} from "../../util/clock.js";
+import {ShufflingCache} from "../shufflingCache.js";
+import {MapTracker} from "./mapMetrics.js";
+import {CheckpointHex, CheckpointStateCache, CacheItemType} from "./types.js";
+import {CPStateDatastore, DatastoreKey, datastoreKeyToCheckpoint} from "./datastore/index.js";
+
+type GetHeadStateFn = () => CachedBeaconStateAllForks;
+
+type PersistentCheckpointStateCacheModules = {
+ metrics?: Metrics | null;
+ logger: Logger;
+ clock?: IClock | null;
+ shufflingCache: ShufflingCache;
+ datastore: CPStateDatastore;
+ getHeadState?: GetHeadStateFn;
+};
+
+type PersistentCheckpointStateCacheOpts = {
+ // Keep max n states in memory, persist the rest to disk
+ maxCPStateEpochsInMemory?: number;
+};
+
+/** checkpoint serialized as a string */
+type CacheKey = string;
+
+type InMemoryCacheItem = {
+ type: CacheItemType.inMemory;
+ state: CachedBeaconStateAllForks;
+ // if a cp state is reloaded from disk, it'll keep track of persistedKey to allow us to remove it from disk later
+ // it also helps not to persist it again
+ persistedKey?: DatastoreKey;
+};
+
+type PersistedCacheItem = {
+ type: CacheItemType.persisted;
+ value: DatastoreKey;
+};
+
+type CacheItem = InMemoryCacheItem | PersistedCacheItem;
+
+type LoadedStateBytesData = {persistedKey: DatastoreKey; stateBytes: Uint8Array};
+
+/**
+ * Before n-historical states, lodestar keeps mostly 3 states in memory with 1 finalized state
+ * Since Jan 2024, lodestar stores the finalized state in disk and keeps up to 2 epochs in memory
+ */
+export const DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY = 2;
+
+/**
+ * An implementation of CheckpointStateCache that keep up to n epoch checkpoint states in memory and persist the rest to disk
+ * - If it's more than `maxEpochsInMemory` epochs old, it will persist n last epochs to disk based on the view of the block
+ * - Once a chain gets finalized we'll prune all states from memory and disk for epochs < finalizedEpoch
+ * - In get*() apis if shouldReload is true, it will reload from disk. The reload() api is expensive and should only be called in some important flows:
+ * - Get state for block processing
+ * - updateHeadState
+ * - as with any cache, the state could be evicted from memory at any time, so we should always check if the state is in memory or not
+ * - Each time we process a state, we only persist exactly 1 checkpoint state per epoch based on the view of block and prune all others. The persisted
+ * checkpoint state could be finalized and used later in archive task, it's also used to regen states.
+ * - When we process multiple states in the same epoch, we could persist different checkpoint states of the same epoch because each block could have its
+ * own view. See unit test of this file `packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts` for more details.
+ *
+ * The below diagram shows Previous Root Checkpoint State is persisted for epoch (n-2) and Current Root Checkpoint State is persisted for epoch (n-1)
+ * while at epoch (n) and (n+1) we have both of them in memory
+ *
+ * ╔════════════════════════════════════╗═══════════════╗
+ * ║ persisted to db or fs ║ in memory ║
+ * ║ reload if needed ║ ║
+ * ║ -----------------------------------║---------------║
+ * ║ epoch: (n-2) (n-1) ║ n (n+1) ║
+ * ║ |-------|-------|----║--|-------|----║
+ * ║ ^ ^ ║ ^ ^ ║
+ * ║ ║ ^ ^ ║
+ * ╚════════════════════════════════════╝═══════════════╝
+ *
+ * The "in memory" checkpoint states are similar to the old implementation: we have both Previous Root Checkpoint State and Current Root Checkpoint State per epoch.
+ * However in the "persisted to db or fs" part, we usually only persist 1 checkpoint state per epoch, the one that could potentially be justified/finalized later
+ * based on the view of blocks.
+ */
+export class PersistentCheckpointStateCache implements CheckpointStateCache {
+ private readonly cache: MapTracker;
+ /** Epoch -> Set */
+ private readonly epochIndex = new MapDef>(() => new Set());
+ private readonly metrics: Metrics["cpStateCache"] | null | undefined;
+ private readonly logger: Logger;
+ private readonly clock: IClock | null | undefined;
+ private preComputedCheckpoint: string | null = null;
+ private preComputedCheckpointHits: number | null = null;
+ private readonly maxEpochsInMemory: number;
+ private readonly datastore: CPStateDatastore;
+ private readonly shufflingCache: ShufflingCache;
+ private readonly getHeadState?: GetHeadStateFn;
+
+ constructor(
+ {metrics, logger, clock, shufflingCache, datastore, getHeadState}: PersistentCheckpointStateCacheModules,
+ opts: PersistentCheckpointStateCacheOpts
+ ) {
+ this.cache = new MapTracker(metrics?.cpStateCache);
+ if (metrics) {
+ this.metrics = metrics.cpStateCache;
+ metrics.cpStateCache.size.addCollect(() => {
+ let persistCount = 0;
+ let inMemoryCount = 0;
+ const memoryEpochs = new Set();
+ const persistentEpochs = new Set();
+ for (const [key, cacheItem] of this.cache.entries()) {
+ const {epoch} = fromCacheKey(key);
+ if (isPersistedCacheItem(cacheItem)) {
+ persistCount++;
+ persistentEpochs.add(epoch);
+ } else {
+ inMemoryCount++;
+ memoryEpochs.add(epoch);
+ }
+ }
+ metrics.cpStateCache.size.set({type: CacheItemType.persisted}, persistCount);
+ metrics.cpStateCache.size.set({type: CacheItemType.inMemory}, inMemoryCount);
+ metrics.cpStateCache.epochSize.set({type: CacheItemType.persisted}, persistentEpochs.size);
+ metrics.cpStateCache.epochSize.set({type: CacheItemType.inMemory}, memoryEpochs.size);
+ });
+ }
+ this.logger = logger;
+ this.clock = clock;
+ if (opts.maxCPStateEpochsInMemory !== undefined && opts.maxCPStateEpochsInMemory < 0) {
+ throw new Error("maxEpochsInMemory must be >= 0");
+ }
+ this.maxEpochsInMemory = opts.maxCPStateEpochsInMemory ?? DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY;
+ // Specify different datastore for testing
+ this.datastore = datastore;
+ this.shufflingCache = shufflingCache;
+ this.getHeadState = getHeadState;
+ }
+
+ /**
+ * Reload checkpoint state keys from the last run.
+ */
+ async init(): Promise {
+ const persistedKeys = await this.datastore.readKeys();
+ for (const persistedKey of persistedKeys) {
+ const cp = datastoreKeyToCheckpoint(persistedKey);
+ this.cache.set(toCacheKey(cp), {type: CacheItemType.persisted, value: persistedKey});
+ this.epochIndex.getOrDefault(cp.epoch).add(toHexString(cp.root));
+ }
+ this.logger.info("Loaded persisted checkpoint states from the last run", {
+ count: persistedKeys.length,
+ maxEpochsInMemory: this.maxEpochsInMemory,
+ });
+ }
+
+ /**
+ * Get a state from cache, it may reload from disk.
+ * This is an expensive api, should only be called in some important flows:
+ * - Validate a gossip block
+ * - Get block for processing
+ * - Regen head state
+ */
+ async getOrReload(cp: CheckpointHex): Promise {
+ const stateOrStateBytesData = await this.getStateOrLoadDb(cp);
+ if (stateOrStateBytesData === null || isCachedBeaconState(stateOrStateBytesData)) {
+ return stateOrStateBytesData;
+ }
+ const {persistedKey, stateBytes} = stateOrStateBytesData;
+ const logMeta = {persistedKey: toHexString(persistedKey)};
+ this.logger.debug("Reload: read state successful", logMeta);
+ this.metrics?.stateReloadSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0);
+ const seedState = this.findSeedStateToReload(cp) ?? this.getHeadState?.();
+ if (seedState == null) {
+ throw new Error("No seed state found for cp " + toCacheKey(cp));
+ }
+ this.metrics?.stateReloadEpochDiff.observe(Math.abs(seedState.epochCtx.epoch - cp.epoch));
+ this.logger.debug("Reload: found seed state", {...logMeta, seedSlot: seedState.slot});
+
+ try {
+ const timer = this.metrics?.stateReloadDuration.startTimer();
+ const newCachedState = loadCachedBeaconState(seedState, stateBytes, {
+ shufflingGetter: this.shufflingCache.getSync.bind(this.shufflingCache),
+ });
+ newCachedState.commit();
+ const stateRoot = toHexString(newCachedState.hashTreeRoot());
+ timer?.();
+ this.logger.debug("Reload: cached state load successful", {
+ ...logMeta,
+ stateSlot: newCachedState.slot,
+ stateRoot,
+ seedSlot: seedState.slot,
+ });
+
+ // only remove persisted state once we reload successfully
+ const cpKey = toCacheKey(cp);
+ this.cache.set(cpKey, {type: CacheItemType.inMemory, state: newCachedState, persistedKey});
+ this.epochIndex.getOrDefault(cp.epoch).add(cp.rootHex);
+ // don't prune from memory here, call it at the last 1/3 of slot 0 of an epoch
+ return newCachedState;
+ } catch (e) {
+ this.logger.debug("Reload: error loading cached state", logMeta, e as Error);
+ return null;
+ }
+ }
+
+ /**
+ * Return either state or state bytes loaded from db.
+ */
+ async getStateOrBytes(cp: CheckpointHex): Promise {
+ const stateOrLoadedState = await this.getStateOrLoadDb(cp);
+ if (stateOrLoadedState === null || isCachedBeaconState(stateOrLoadedState)) {
+ return stateOrLoadedState;
+ }
+ return stateOrLoadedState.stateBytes;
+ }
+
+ /**
+ * Return either state or state bytes with persisted key loaded from db.
+ */
+ async getStateOrLoadDb(cp: CheckpointHex): Promise {
+ const cpKey = toCacheKey(cp);
+ const inMemoryState = this.get(cpKey);
+ if (inMemoryState) {
+ return inMemoryState;
+ }
+
+ const cacheItem = this.cache.get(cpKey);
+ if (cacheItem === undefined) {
+ return null;
+ }
+
+ if (isInMemoryCacheItem(cacheItem)) {
+ // should not happen, in-memory state is handled above
+ throw new Error("Expected persistent key");
+ }
+
+ const persistedKey = cacheItem.value;
+ const dbReadTimer = this.metrics?.stateReloadDbReadTime.startTimer();
+ const stateBytes = await this.datastore.read(persistedKey);
+ dbReadTimer?.();
+
+ if (stateBytes === null) {
+ return null;
+ }
+ return {persistedKey, stateBytes};
+ }
+
+ /**
+ * Similar to get() api without reloading from disk
+ */
+ get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null {
+ this.metrics?.lookups.inc();
+ const cpKey = typeof cpOrKey === "string" ? cpOrKey : toCacheKey(cpOrKey);
+ const cacheItem = this.cache.get(cpKey);
+
+ if (cacheItem === undefined) {
+ return null;
+ }
+
+ this.metrics?.hits.inc();
+
+ if (cpKey === this.preComputedCheckpoint) {
+ this.preComputedCheckpointHits = (this.preComputedCheckpointHits ?? 0) + 1;
+ }
+
+ if (isInMemoryCacheItem(cacheItem)) {
+ const {state} = cacheItem;
+ this.metrics?.stateClonedCount.observe(state.clonedCount);
+ return state;
+ }
+
+ return null;
+ }
+
+ /**
+ * Add a state of a checkpoint to this cache, prune from memory if necessary.
+ */
+ add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void {
+ const cpHex = toCheckpointHex(cp);
+ const key = toCacheKey(cpHex);
+ const cacheItem = this.cache.get(key);
+ this.metrics?.adds.inc();
+ if (cacheItem !== undefined && isPersistedCacheItem(cacheItem)) {
+ const persistedKey = cacheItem.value;
+ // was persisted to disk, set back to memory
+ this.cache.set(key, {type: CacheItemType.inMemory, state, persistedKey});
+ this.logger.verbose("Added checkpoint state to memory but a persisted key existed", {
+ epoch: cp.epoch,
+ rootHex: cpHex.rootHex,
+ persistedKey: toHexString(persistedKey),
+ });
+ } else {
+ this.cache.set(key, {type: CacheItemType.inMemory, state});
+ this.logger.verbose("Added checkpoint state to memory", {epoch: cp.epoch, rootHex: cpHex.rootHex});
+ }
+ this.epochIndex.getOrDefault(cp.epoch).add(cpHex.rootHex);
+ }
+
+ /**
+ * Searches in-memory state for the latest cached state with a `root` without reload, starting with `epoch` and descending
+ */
+ getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null {
+ // sort epochs in descending order, only consider epochs lte `epoch`
+ const epochs = Array.from(this.epochIndex.keys())
+ .sort((a, b) => b - a)
+ .filter((e) => e <= maxEpoch);
+ for (const epoch of epochs) {
+ if (this.epochIndex.get(epoch)?.has(rootHex)) {
+ const inMemoryState = this.get({rootHex, epoch});
+ if (inMemoryState) {
+ return inMemoryState;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Searches state for the latest cached state with a `root`, reload if needed, starting with `epoch` and descending
+ * This is expensive api, should only be called in some important flows:
+ * - Validate a gossip block
+ * - Get block for processing
+ * - Regen head state
+ */
+ async getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise {
+ // sort epochs in descending order, only consider epochs lte `epoch`
+ const epochs = Array.from(this.epochIndex.keys())
+ .sort((a, b) => b - a)
+ .filter((e) => e <= maxEpoch);
+ for (const epoch of epochs) {
+ if (this.epochIndex.get(epoch)?.has(rootHex)) {
+ try {
+ const state = await this.getOrReload({rootHex, epoch});
+ if (state) {
+ return state;
+ }
+ } catch (e) {
+ this.logger.debug("Error get or reload state", {epoch, rootHex}, e as Error);
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Update the precomputed checkpoint and return the number of his for the
+ * previous one (if any).
+ */
+ updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null {
+ const previousHits = this.preComputedCheckpointHits;
+ this.preComputedCheckpoint = toCacheKey({rootHex, epoch});
+ this.preComputedCheckpointHits = 0;
+ return previousHits;
+ }
+
+ /**
+ * This is just to conform to the old implementation
+ */
+ prune(): void {
+ // do nothing
+ }
+
+ /**
+ * Prune all checkpoint states before the provided finalized epoch.
+ */
+ pruneFinalized(finalizedEpoch: Epoch): void {
+ for (const epoch of this.epochIndex.keys()) {
+ if (epoch < finalizedEpoch) {
+ this.deleteAllEpochItems(epoch).catch((e) =>
+ this.logger.debug("Error delete all epoch items", {epoch, finalizedEpoch}, e as Error)
+ );
+ }
+ }
+ }
+
+ /**
+ * After processing a block, prune from memory based on the view of that block.
+ * This is likely persist 1 state per epoch, at the last 1/3 of slot 0 of an epoch although it'll be called on every last 1/3 of slot.
+ * Given the following block b was processed with b2, b1, b0 are ancestors in epoch (n-2), (n-1), n respectively
+ *
+ * epoch: (n-2) (n-1) n (n+1)
+ * |-----------|-----------|-----------|-----------|
+ * ^ ^ ^ ^
+ * | | | |
+ * block chain: b2---------->b1--------->b0-->b
+ *
+ * After processing block b, if maxEpochsInMemory is:
+ * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk
+ * - 1 then we'll persist {root: b2, epoch n-2} and {root: b1, epoch n-1} checkpoint state to disk
+ * - 0 then we'll persist {root: b2, epoch n-2} and {root: b1, epoch n-1} and {root: b0, epoch n} checkpoint state to disk
+ * - if any old epochs checkpoint states are persisted, no need to do it again
+ *
+ * Note that for each epoch there could be multiple checkpoint states, usually 2, one for Previous Root Checkpoint State and one for Current Root Checkpoint State.
+ * We normally only persist 1 checkpoint state per epoch, the one that could potentially be justified/finalized later based on the view of the block.
+ * Other checkpoint states are pruned from memory.
+ *
+ * This design also covers the reorg scenario. Given block c in the same epoch n where c.slot > b.slot, c is not descendant of b, and c is built on top of c0
+ * instead of b0 (epoch (n - 1))
+ *
+ * epoch: (n-2) (n-1) n (n+1)
+ * |-----------|-----------|-----------|-----------|
+ * ^ ^ ^ ^ ^ ^
+ * | | | | | |
+ * block chain: b2---------->b1----->c0->b0-->b |
+ * ║ |
+ * ╚═══════════>c (reorg)
+ *
+ * After processing block c, if maxEpochsInMemory is:
+ * - 0 then we'll persist {root: c0, epoch: n} checkpoint state to disk. Note that regen should populate {root: c0, epoch: n} checkpoint state before.
+ *
+ * epoch: (n-1) n (n+1)
+ * |-------------------------------------------------------------|-------------------------------------------------------------|
+ * ^ ^ ^ ^
+ * _______ | | | |
+ * | | | | | |
+ * | db |====== reload ======> {root: b1, epoch: n-1} cp state ======> c0 block state ======> {root: c0, epoch: n} cp state =====> c block state
+ * |_______|
+ *
+ *
+ *
+ * - 1 then we'll persist {root: b1, epoch n-1} checkpoint state to disk. Note that at epoch n there is both {root: b0, epoch: n} and {root: c0, epoch: n} checkpoint states in memory
+ * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk, there are also 2 checkpoint states in memory at epoch n, same to the above (maxEpochsInMemory=1)
+ *
+ * As of Nov 2023, it takes 1.3s to 1.5s to persist a state on holesky on fast server. TODO:
+ * - improve state serialization time
+ * - or research how to only store diff against the finalized state
+ */
+ async processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise {
+ let persistCount = 0;
+ // it's important to sort the epochs in ascending order, in case of big reorg we always want to keep the most recent checkpoint states
+ const sortedEpochs = Array.from(this.epochIndex.keys()).sort((a, b) => a - b);
+ if (sortedEpochs.length <= this.maxEpochsInMemory) {
+ return 0;
+ }
+
+ for (const lowestEpoch of sortedEpochs.slice(0, sortedEpochs.length - this.maxEpochsInMemory)) {
+ const epochBoundarySlot = computeStartSlotAtEpoch(lowestEpoch);
+ const epochBoundaryRoot =
+ epochBoundarySlot === state.slot ? fromHexString(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot);
+ const epochBoundaryHex = toHexString(epochBoundaryRoot);
+
+ // for each epoch, usually there are 2 rootHex respective to the 2 checkpoint states: Previous Root Checkpoint State and Current Root Checkpoint State
+ for (const rootHex of this.epochIndex.get(lowestEpoch) ?? []) {
+ const cpKey = toCacheKey({epoch: lowestEpoch, rootHex});
+ const cacheItem = this.cache.get(cpKey);
+
+ if (cacheItem !== undefined && isInMemoryCacheItem(cacheItem)) {
+ // this is state in memory, we don't care if the checkpoint state is already persisted
+ let {persistedKey} = cacheItem;
+ const {state} = cacheItem;
+ const logMeta = {
+ stateSlot: state.slot,
+ rootHex,
+ epochBoundaryHex,
+ persistedKey: persistedKey ? toHexString(persistedKey) : "",
+ };
+
+ if (rootHex === epochBoundaryHex) {
+ if (persistedKey) {
+ // no need to persist
+ this.logger.verbose("Pruned checkpoint state from memory but no need to persist", logMeta);
+ } else {
+ // persist and do not update epochIndex
+ this.metrics?.statePersistSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0);
+ const timer = this.metrics?.statePersistDuration.startTimer();
+ const cpPersist = {epoch: lowestEpoch, root: epochBoundaryRoot};
+ persistedKey = await this.datastore.write(cpPersist, state);
+ timer?.();
+ persistCount++;
+ this.logger.verbose("Pruned checkpoint state from memory and persisted to disk", {
+ ...logMeta,
+ persistedKey: toHexString(persistedKey),
+ });
+ }
+ // overwrite cpKey, this means the state is deleted from memory
+ this.cache.set(cpKey, {type: CacheItemType.persisted, value: persistedKey});
+ } else {
+ if (persistedKey) {
+ // persisted file will be eventually deleted by the archive task
+ // this also means the state is deleted from memory
+ this.cache.set(cpKey, {type: CacheItemType.persisted, value: persistedKey});
+ // do not update epochIndex
+ } else {
+ // delete the state from memory
+ this.cache.delete(cpKey);
+ this.epochIndex.get(lowestEpoch)?.delete(rootHex);
+ }
+ this.metrics?.statePruneFromMemoryCount.inc();
+ this.logger.verbose("Pruned checkpoint state from memory", logMeta);
+ }
+ }
+ }
+ }
+
+ return persistCount;
+ }
+
+ /**
+ * Find a seed state to reload the state of provided checkpoint. Based on the design of n-historical state:
+ *
+ * ╔════════════════════════════════════╗═══════════════╗
+ * ║ persisted to db or fs ║ in memory ║
+ * ║ reload if needed ║ ║
+ * ║ -----------------------------------║---------------║
+ * ║ epoch: (n-2) (n-1) ║ n (n+1) ║
+ * ║ |-------|-------|----║--|-------|----║
+ * ║ ^ ^ ║ ^ ^ ║
+ * ║ ║ ^ ^ ║
+ * ╚════════════════════════════════════╝═══════════════╝
+ *
+ * we always reload an epoch in the past. We'll start with epoch n then (n+1) prioritizing ones with the same view of `reloadedCp`.
+ *
+ * This could return null and we should get head state in that case.
+ */
+ findSeedStateToReload(reloadedCp: CheckpointHex): CachedBeaconStateAllForks | null {
+ const maxEpoch = Math.max(...Array.from(this.epochIndex.keys()));
+ const reloadedCpSlot = computeStartSlotAtEpoch(reloadedCp.epoch);
+ let firstState: CachedBeaconStateAllForks | null = null;
+ // no need to check epochs before `maxEpoch - this.maxEpochsInMemory + 1` before they are all persisted
+ for (let epoch = maxEpoch - this.maxEpochsInMemory + 1; epoch <= maxEpoch; epoch++) {
+ // if there's at least 1 state in memory in an epoch, just return the 1st one
+ if (firstState !== null) {
+ return firstState;
+ }
+
+ for (const rootHex of this.epochIndex.get(epoch) || []) {
+ const cpKey = toCacheKey({rootHex, epoch});
+ const cacheItem = this.cache.get(cpKey);
+ if (cacheItem === undefined) {
+ // should not happen
+ continue;
+ }
+ if (isInMemoryCacheItem(cacheItem)) {
+ const {state} = cacheItem;
+ if (firstState === null) {
+ firstState = state;
+ }
+
+ // amongst states of the same epoch, choose the one with the same view of reloadedCp
+ if (
+ reloadedCpSlot < state.slot &&
+ toHexString(getBlockRootAtSlot(state, reloadedCpSlot)) === reloadedCp.rootHex
+ ) {
+ return state;
+ }
+ }
+ }
+ }
+
+ return firstState;
+ }
+
+ clear(): void {
+ this.cache.clear();
+ this.epochIndex.clear();
+ }
+
+ /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */
+ dumpSummary(): routes.lodestar.StateCacheItem[] {
+ return Array.from(this.cache.keys()).map((key) => {
+ const cp = fromCacheKey(key);
+ // TODO: add checkpoint key and persistent key to the summary
+ return {
+ slot: computeStartSlotAtEpoch(cp.epoch),
+ root: cp.rootHex,
+ reads: this.cache.readCount.get(key) ?? 0,
+ lastRead: this.cache.lastRead.get(key) ?? 0,
+ checkpointState: true,
+ };
+ });
+ }
+
+ /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */
+ dumpCheckpointKeys(): string[] {
+ return Array.from(this.cache.keys());
+ }
+
+ /**
+ * Delete all items of an epoch from disk and memory
+ */
+ private async deleteAllEpochItems(epoch: Epoch): Promise {
+ let persistCount = 0;
+ const rootHexes = this.epochIndex.get(epoch) || [];
+ for (const rootHex of rootHexes) {
+ const key = toCacheKey({rootHex, epoch});
+ const cacheItem = this.cache.get(key);
+
+ if (cacheItem) {
+ const persistedKey = isPersistedCacheItem(cacheItem) ? cacheItem.value : cacheItem.persistedKey;
+ if (persistedKey) {
+ await this.datastore.remove(persistedKey);
+ persistCount++;
+ this.metrics?.persistedStateRemoveCount.inc();
+ }
+ }
+ this.cache.delete(key);
+ }
+ this.epochIndex.delete(epoch);
+ this.logger.verbose("Pruned finalized checkpoints states for epoch", {
+ epoch,
+ persistCount,
+ rootHexes: Array.from(rootHexes).join(","),
+ });
+ }
+}
+
+function toCheckpointHex(checkpoint: phase0.Checkpoint): CheckpointHex {
+ return {
+ epoch: checkpoint.epoch,
+ rootHex: toHexString(checkpoint.root),
+ };
+}
+
+function toCacheKey(cp: CheckpointHex | phase0.Checkpoint): CacheKey {
+ if (isCheckpointHex(cp)) {
+ return `${cp.rootHex}_${cp.epoch}`;
+ }
+ return `${toHexString(cp.root)}_${cp.epoch}`;
+}
+
+function fromCacheKey(key: CacheKey): CheckpointHex {
+ const [rootHex, epoch] = key.split("_");
+ return {
+ rootHex,
+ epoch: Number(epoch),
+ };
+}
+
+function isCachedBeaconState(
+ stateOrBytes: CachedBeaconStateAllForks | LoadedStateBytesData
+): stateOrBytes is CachedBeaconStateAllForks {
+ return (stateOrBytes as CachedBeaconStateAllForks).slot !== undefined;
+}
+
+function isInMemoryCacheItem(cacheItem: CacheItem): cacheItem is InMemoryCacheItem {
+ return cacheItem.type === CacheItemType.inMemory;
+}
+
+function isPersistedCacheItem(cacheItem: CacheItem): cacheItem is PersistedCacheItem {
+ return cacheItem.type === CacheItemType.persisted;
+}
+
+function isCheckpointHex(cp: CheckpointHex | phase0.Checkpoint): cp is CheckpointHex {
+ return (cp as CheckpointHex).rootHex !== undefined;
+}
diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts
index 44523abf799c..3a04c4f4a258 100644
--- a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts
+++ b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts
@@ -4,15 +4,16 @@ import {CachedBeaconStateAllForks} from "@lodestar/state-transition";
import {routes} from "@lodestar/api";
import {Metrics} from "../../metrics/index.js";
import {MapTracker} from "./mapMetrics.js";
+import {BlockStateCache} from "./types.js";
const MAX_STATES = 3 * 32;
/**
- * In memory cache of CachedBeaconState
- *
- * Similar API to Repository
+ * Old implementation of StateCache
+ * - Prune per checkpoint so number of states ranges from 96 to 128
+ * - Keep a separate head state to make sure it is always available
*/
-export class StateContextCache {
+export class StateContextCache implements BlockStateCache {
/**
* Max number of states allowed in the cache
*/
diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts
index 0cb48f0e2ded..a177db9b7c87 100644
--- a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts
+++ b/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts
@@ -5,6 +5,7 @@ import {MapDef} from "@lodestar/utils";
import {routes} from "@lodestar/api";
import {Metrics} from "../../metrics/index.js";
import {MapTracker} from "./mapMetrics.js";
+import {CheckpointStateCache as CheckpointStateCacheInterface, CacheItemType} from "./types.js";
export type CheckpointHex = {epoch: Epoch; rootHex: RootHex};
const MAX_EPOCHS = 10;
@@ -14,8 +15,9 @@ const MAX_EPOCHS = 10;
* belonging to checkpoint
*
* Similar API to Repository
+ * TODO: rename to MemoryCheckpointStateCache in the next PR of n-historical states
*/
-export class CheckpointStateCache {
+export class CheckpointStateCache implements CheckpointStateCacheInterface {
private readonly cache: MapTracker;
/** Epoch -> Set */
private readonly epochIndex = new MapDef>(() => new Set());
@@ -27,11 +29,32 @@ export class CheckpointStateCache {
this.cache = new MapTracker(metrics?.cpStateCache);
if (metrics) {
this.metrics = metrics.cpStateCache;
- metrics.cpStateCache.size.addCollect(() => metrics.cpStateCache.size.set(this.cache.size));
- metrics.cpStateCache.epochSize.addCollect(() => metrics.cpStateCache.epochSize.set(this.epochIndex.size));
+ metrics.cpStateCache.size.addCollect(() =>
+ metrics.cpStateCache.size.set({type: CacheItemType.inMemory}, this.cache.size)
+ );
+ metrics.cpStateCache.epochSize.addCollect(() =>
+ metrics.cpStateCache.epochSize.set({type: CacheItemType.inMemory}, this.epochIndex.size)
+ );
}
}
+ async getOrReload(cp: CheckpointHex): Promise {
+ return this.get(cp);
+ }
+
+ async getStateOrBytes(cp: CheckpointHex): Promise {
+ return this.get(cp);
+ }
+
+ async getOrReloadLatest(rootHex: string, maxEpoch: number): Promise {
+ return this.getLatest(rootHex, maxEpoch);
+ }
+
+ async processState(): Promise {
+ // do nothing, this class does not support prunning
+ return 0;
+ }
+
get(cp: CheckpointHex): CachedBeaconStateAllForks | null {
this.metrics?.lookups.inc();
const cpKey = toCheckpointKey(cp);
diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts
new file mode 100644
index 000000000000..5867d7d356c1
--- /dev/null
+++ b/packages/beacon-node/src/chain/stateCache/types.ts
@@ -0,0 +1,73 @@
+import {CachedBeaconStateAllForks} from "@lodestar/state-transition";
+import {Epoch, RootHex, phase0} from "@lodestar/types";
+import {routes} from "@lodestar/api";
+
+export type CheckpointHex = {epoch: Epoch; rootHex: RootHex};
+
+/**
+ * Lodestar currently keeps two state caches around.
+ *
+ * 1. BlockStateCache is keyed by state root, and intended to keep extremely recent states around (eg: post states from the latest blocks)
+ * These states are most likely to be useful for state transition of new blocks.
+ *
+ * 2. CheckpointStateCache is keyed by checkpoint, and intended to keep states which have just undergone an epoch transition.
+ * These states are useful for gossip verification and for avoiding an epoch transition during state transition of first-in-epoch blocks
+ */
+
+/**
+ * Store up to n recent block states.
+ *
+ * The cache key is state root
+ */
+export interface BlockStateCache {
+ get(rootHex: RootHex): CachedBeaconStateAllForks | null;
+ add(item: CachedBeaconStateAllForks): void;
+ setHeadState(item: CachedBeaconStateAllForks | null): void;
+ clear(): void;
+ size: number;
+ prune(headStateRootHex: RootHex): void;
+ deleteAllBeforeEpoch(finalizedEpoch: Epoch): void;
+ dumpSummary(): routes.lodestar.StateCacheItem[];
+}
+
+/**
+ * Store checkpoint states to preserve epoch transition, this helps lodestar run exactly 1 epoch transition per epoch in normal network conditions.
+ *
+ * There are 2 types of checkpoint states:
+ *
+ * - Previous Root Checkpoint State: where root is from previous epoch, this is added when we prepare for next slot,
+ * or to validate gossip block
+ * ```
+ * epoch: (n-2) (n-1) n (n+1)
+ * |-------|-------|-------|-------|
+ * root ---------------------^
+ * ```
+ *
+ * - Current Root Checkpoint State: this is added when we process block slot 0 of epoch n, note that this block could
+ * be skipped so we don't always have this checkpoint state
+ * ```
+ * epoch: (n-2) (n-1) n (n+1)
+ * |-------|-------|-------|-------|
+ * root ---------------------^
+ * ```
+ */
+export interface CheckpointStateCache {
+ init?: () => Promise;
+ getOrReload(cp: CheckpointHex): Promise;
+ getStateOrBytes(cp: CheckpointHex): Promise;
+ get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null;
+ add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void;
+ getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null;
+ getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise;
+ updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null;
+ prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void;
+ pruneFinalized(finalizedEpoch: Epoch): void;
+ processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise;
+ clear(): void;
+ dumpSummary(): routes.lodestar.StateCacheItem[];
+}
+
+export enum CacheItemType {
+ persisted = "persisted",
+ inMemory = "in-memory",
+}
diff --git a/packages/beacon-node/src/chain/validation/attestation.ts b/packages/beacon-node/src/chain/validation/attestation.ts
index 31e105911ab4..eae171631025 100644
--- a/packages/beacon-node/src/chain/validation/attestation.ts
+++ b/packages/beacon-node/src/chain/validation/attestation.ts
@@ -541,7 +541,7 @@ export function verifyHeadBlockAndTargetRoot(
targetRoot: Root,
attestationSlot: Slot,
attestationEpoch: Epoch,
- caller: string,
+ caller: RegenCaller,
maxSkipSlots?: number
): ProtoBlock {
const headBlock = verifyHeadBlockIsKnown(chain, beaconBlockRoot);
diff --git a/packages/beacon-node/src/chain/validation/blobSidecar.ts b/packages/beacon-node/src/chain/validation/blobSidecar.ts
index b5aab323c269..f1ea7bfa95c8 100644
--- a/packages/beacon-node/src/chain/validation/blobSidecar.ts
+++ b/packages/beacon-node/src/chain/validation/blobSidecar.ts
@@ -1,7 +1,7 @@
-import {ChainForkConfig} from "@lodestar/config";
-import {deneb, Root, Slot} from "@lodestar/types";
-import {toHex} from "@lodestar/utils";
-import {getBlobProposerSignatureSet, computeStartSlotAtEpoch} from "@lodestar/state-transition";
+import {deneb, Root, Slot, ssz} from "@lodestar/types";
+import {toHex, verifyMerkleBranch} from "@lodestar/utils";
+import {computeStartSlotAtEpoch, getBlockHeaderProposerSignatureSet} from "@lodestar/state-transition";
+import {KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, KZG_COMMITMENT_SUBTREE_INDEX0} from "@lodestar/params";
import {BlobSidecarGossipError, BlobSidecarErrorCode} from "../errors/blobSidecarError.js";
import {GossipAction} from "../errors/gossipValidation.js";
@@ -11,13 +11,11 @@ import {IBeaconChain} from "../interface.js";
import {RegenCaller} from "../regen/index.js";
export async function validateGossipBlobSidecar(
- config: ChainForkConfig,
chain: IBeaconChain,
- signedBlob: deneb.SignedBlobSidecar,
+ blobSidecar: deneb.BlobSidecar,
gossipIndex: number
): Promise {
- const blobSidecar = signedBlob.message;
- const blobSlot = blobSidecar.slot;
+ const blobSlot = blobSidecar.signedBlockHeader.message.slot;
// [REJECT] The sidecar is for the correct topic -- i.e. sidecar.index matches the topic {index}.
if (blobSidecar.index !== gossipIndex) {
@@ -58,9 +56,10 @@ export async function validateGossipBlobSidecar(
// reboot if the `observed_block_producers` cache is empty. In that case, without this
// check, we will load the parent and state from disk only to find out later that we
// already know this block.
- const blockRoot = toHex(blobSidecar.blockRoot);
- if (chain.forkChoice.getBlockHex(blockRoot) !== null) {
- throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockRoot});
+ const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message);
+ const blockHex = toHex(blockRoot);
+ if (chain.forkChoice.getBlockHex(blockHex) !== null) {
+ throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockHex});
}
// TODO: freetheblobs - check for badblock
@@ -69,7 +68,7 @@ export async function validateGossipBlobSidecar(
// _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both
// gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is
// retrieved).
- const parentRoot = toHex(blobSidecar.blockParentRoot);
+ const parentRoot = toHex(blobSidecar.signedBlockHeader.message.parentRoot);
const parentBlock = chain.forkChoice.getBlockHex(parentRoot);
if (parentBlock === null) {
// If fork choice does *not* consider the parent to be a descendant of the finalized block,
@@ -97,18 +96,16 @@ export async function validateGossipBlobSidecar(
// getBlockSlotState also checks for whether the current finalized checkpoint is an ancestor of the block.
// As a result, we throw an IGNORE (whereas the spec says we should REJECT for this scenario).
// this is something we should change this in the future to make the code airtight to the spec.
- // _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both
- // gossip and non-gossip sources) // _[REJECT]_ The blob's block's parent (defined by `sidecar.block_parent_root`) passes validation
- // The above validation will happen while importing
+ // [IGNORE] The block's parent (defined by block.parent_root) has been seen (via both gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is retrieved).
+ // [REJECT] The block's parent (defined by block.parent_root) passes validation.
const blockState = await chain.regen
- .getBlockSlotState(parentRoot, blobSlot, {dontTransferCache: true}, RegenCaller.validateGossipBlob)
+ .getBlockSlotState(parentRoot, blobSlot, {dontTransferCache: true}, RegenCaller.validateGossipBlock)
.catch(() => {
throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.PARENT_UNKNOWN, parentRoot});
});
- // _[REJECT]_ The proposer signature, `signed_blob_sidecar.signature`, is valid with respect to the
- // `sidecar.proposer_index` pubkey.
- const signatureSet = getBlobProposerSignatureSet(blockState, signedBlob);
+ // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey.
+ const signatureSet = getBlockHeaderProposerSignatureSet(blockState, blobSidecar.signedBlockHeader);
// Don't batch so verification is not delayed
if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) {
throw new BlobSidecarGossipError(GossipAction.REJECT, {
@@ -116,6 +113,15 @@ export async function validateGossipBlobSidecar(
});
}
+ // verify if the blob inclusion proof is correct
+ if (!validateInclusionProof(blobSidecar)) {
+ throw new BlobSidecarGossipError(GossipAction.REJECT, {
+ code: BlobSidecarErrorCode.INCLUSION_PROOF_INVALID,
+ slot: blobSidecar.signedBlockHeader.message.slot,
+ blobIdx: blobSidecar.index,
+ });
+ }
+
// _[IGNORE]_ The sidecar is the only sidecar with valid signature received for the tuple
// `(sidecar.block_root, sidecar.index)`
//
@@ -127,7 +133,7 @@ export async function validateGossipBlobSidecar(
// If the `proposer_index` cannot immediately be verified against the expected shuffling, the sidecar
// MAY be queued for later processing while proposers for the block's branch are calculated -- in such
// a case _do not_ `REJECT`, instead `IGNORE` this message.
- const proposerIndex = blobSidecar.proposerIndex;
+ const proposerIndex = blobSidecar.signedBlockHeader.message.proposerIndex;
if (blockState.epochCtx.getBeaconProposer(blobSlot) !== proposerIndex) {
throw new BlobSidecarGossipError(GossipAction.REJECT, {
code: BlobSidecarErrorCode.INCORRECT_PROPOSER,
@@ -168,16 +174,18 @@ export function validateBlobSidecars(
const proofs = [];
for (let index = 0; index < blobSidecars.length; index++) {
const blobSidecar = blobSidecars[index];
+ const blobBlockHeader = blobSidecar.signedBlockHeader.message;
+ const blobBlockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobBlockHeader);
if (
- blobSidecar.slot !== blockSlot ||
- !byteArrayEquals(blobSidecar.blockRoot, blockRoot) ||
+ blobBlockHeader.slot !== blockSlot ||
+ !byteArrayEquals(blobBlockRoot, blockRoot) ||
blobSidecar.index !== index ||
!byteArrayEquals(expectedKzgCommitments[index], blobSidecar.kzgCommitment)
) {
throw new Error(
- `Invalid blob with slot=${blobSidecar.slot} blockRoot=${toHex(blockRoot)} index=${
+ `Invalid blob with slot=${blobBlockHeader.slot} blobBlockRoot=${toHex(blobBlockRoot)} index=${
blobSidecar.index
- } for the block root=${toHex(blockRoot)} slot=${blockSlot} index=${index}`
+ } for the block blockRoot=${toHex(blockRoot)} slot=${blockSlot} index=${index}`
);
}
blobs.push(blobSidecar.blob);
@@ -207,3 +215,13 @@ function validateBlobsAndProofs(
throw Error("Invalid verifyBlobKzgProofBatch");
}
}
+
+function validateInclusionProof(blobSidecar: deneb.BlobSidecar): boolean {
+ return verifyMerkleBranch(
+ ssz.deneb.KZGCommitment.hashTreeRoot(blobSidecar.kzgCommitment),
+ blobSidecar.kzgCommitmentInclusionProof,
+ KZG_COMMITMENT_INCLUSION_PROOF_DEPTH,
+ KZG_COMMITMENT_SUBTREE_INDEX0 + blobSidecar.index,
+ blobSidecar.signedBlockHeader.message.bodyRoot
+ );
+}
diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts
index 58b99f2a37e0..07cc47fa54d8 100644
--- a/packages/beacon-node/src/db/beacon.ts
+++ b/packages/beacon-node/src/db/beacon.ts
@@ -21,6 +21,7 @@ import {
BLSToExecutionChangeRepository,
} from "./repositories/index.js";
import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js";
+import {CheckpointStateRepository} from "./repositories/checkpointState.js";
export type BeaconDbModules = {
config: ChainForkConfig;
@@ -35,6 +36,7 @@ export class BeaconDb implements IBeaconDb {
blobSidecarsArchive: BlobSidecarsArchiveRepository;
stateArchive: StateArchiveRepository;
+ checkpointState: CheckpointStateRepository;
voluntaryExit: VoluntaryExitRepository;
proposerSlashing: ProposerSlashingRepository;
@@ -67,6 +69,7 @@ export class BeaconDb implements IBeaconDb {
this.blobSidecarsArchive = new BlobSidecarsArchiveRepository(config, db);
this.stateArchive = new StateArchiveRepository(config, db);
+ this.checkpointState = new CheckpointStateRepository(config, db);
this.voluntaryExit = new VoluntaryExitRepository(config, db);
this.blsToExecutionChange = new BLSToExecutionChangeRepository(config, db);
this.proposerSlashing = new ProposerSlashingRepository(config, db);
diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts
index 1a3abfa33623..9dffd0608d52 100644
--- a/packages/beacon-node/src/db/buckets.ts
+++ b/packages/beacon-node/src/db/buckets.ts
@@ -28,6 +28,8 @@ export enum Bucket {
phase0_proposerSlashing = 14, // ValidatorIndex -> ProposerSlashing
phase0_attesterSlashing = 15, // Root -> AttesterSlashing
capella_blsToExecutionChange = 16, // ValidatorIndex -> SignedBLSToExecutionChange
+ // checkpoint states
+ allForks_checkpointState = 17, // Root -> allForks.BeaconState
// allForks_pendingBlock = 25, // Root -> SignedBeaconBlock // DEPRECATED on v0.30.0
phase0_depositEvent = 19, // depositIndex -> DepositEvent
diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts
index 58bf25c57aa7..6ffb8992f635 100644
--- a/packages/beacon-node/src/db/interface.ts
+++ b/packages/beacon-node/src/db/interface.ts
@@ -19,6 +19,7 @@ import {
BLSToExecutionChangeRepository,
} from "./repositories/index.js";
import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js";
+import {CheckpointStateRepository} from "./repositories/checkpointState.js";
/**
* The DB service manages the data layer of the beacon chain
@@ -36,6 +37,8 @@ export interface IBeaconDb {
// finalized states
stateArchive: StateArchiveRepository;
+ // checkpoint states
+ checkpointState: CheckpointStateRepository;
// op pool
voluntaryExit: VoluntaryExitRepository;
diff --git a/packages/beacon-node/src/db/repositories/blobSidecars.ts b/packages/beacon-node/src/db/repositories/blobSidecars.ts
index 576a03df9e61..e5750ed31b58 100644
--- a/packages/beacon-node/src/db/repositories/blobSidecars.ts
+++ b/packages/beacon-node/src/db/repositories/blobSidecars.ts
@@ -2,6 +2,7 @@ import {ValueOf, ContainerType} from "@chainsafe/ssz";
import {ChainForkConfig} from "@lodestar/config";
import {Db, Repository} from "@lodestar/db";
import {ssz} from "@lodestar/types";
+
import {Bucket, getBucketNameByValue} from "../buckets.js";
export const blobSidecarsWrapperSsz = new ContainerType(
@@ -14,10 +15,7 @@ export const blobSidecarsWrapperSsz = new ContainerType(
);
export type BlobSidecarsWrapper = ValueOf;
-
export const BLOB_SIDECARS_IN_WRAPPER_INDEX = 44;
-// ssz.deneb.BlobSidecars.elementType.fixedSize;
-export const BLOBSIDECAR_FIXED_SIZE = 131256;
/**
* blobSidecarsWrapper by block root (= hash_tree_root(SignedBeaconBlock.message))
diff --git a/packages/beacon-node/src/db/repositories/checkpointState.ts b/packages/beacon-node/src/db/repositories/checkpointState.ts
new file mode 100644
index 000000000000..8848f4d26d3a
--- /dev/null
+++ b/packages/beacon-node/src/db/repositories/checkpointState.ts
@@ -0,0 +1,31 @@
+import {ChainForkConfig} from "@lodestar/config";
+import {Db, Repository} from "@lodestar/db";
+import {BeaconStateAllForks} from "@lodestar/state-transition";
+import {ssz} from "@lodestar/types";
+import {Bucket, getBucketNameByValue} from "../buckets.js";
+
+/**
+ * Store temporary checkpoint states.
+ * We should only put/get binary data from this repository, consumer will load it into an existing state ViewDU object.
+ */
+export class CheckpointStateRepository extends Repository {
+ constructor(config: ChainForkConfig, db: Db) {
+ // Pick some type but won't be used. Casted to any because no type can match `BeaconStateAllForks`
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any
+ const type = ssz.phase0.BeaconState as any;
+ const bucket = Bucket.allForks_checkpointState;
+ super(config, db, bucket, type, getBucketNameByValue(bucket));
+ }
+
+ getId(): Uint8Array {
+ throw Error("CheckpointStateRepository does not work with value");
+ }
+
+ encodeValue(): Uint8Array {
+ throw Error("CheckpointStateRepository does not work with value");
+ }
+
+ decodeValue(): BeaconStateAllForks {
+ throw Error("CheckpointStateRepository does not work with value");
+ }
+}
diff --git a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts b/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts
index 3a1b4ddb0ce1..faa4e310e10a 100644
--- a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts
+++ b/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts
@@ -1,8 +1,7 @@
import {EventEmitter} from "events";
import StrictEventEmitter from "strict-event-emitter-types";
import {fetch} from "@lodestar/api";
-import {ErrorAborted, TimeoutError, isValidHttpUrl, retry} from "@lodestar/utils";
-import {IGauge, IHistogram} from "../../metrics/interface.js";
+import {ErrorAborted, Gauge, Histogram, TimeoutError, isValidHttpUrl, retry} from "@lodestar/utils";
import {IJson, RpcPayload} from "../interface.js";
import {JwtClaim, encodeJwtToken} from "./jwt.js";
@@ -58,13 +57,13 @@ export type ReqOpts = {
};
export type JsonRpcHttpClientMetrics = {
- requestTime: IHistogram<"routeId">;
- streamTime: IHistogram<"routeId">;
- requestErrors: IGauge<"routeId">;
- requestUsedFallbackUrl: IGauge<"routeId">;
- activeRequests: IGauge<"routeId">;
- configUrlsCount: IGauge;
- retryCount: IGauge<"routeId">;
+ requestTime: Histogram<{routeId: string}>;
+ streamTime: Histogram<{routeId: string}>;
+ requestErrors: Gauge<{routeId: string}>;
+ requestUsedFallbackUrl: Gauge<{routeId: string}>;
+ activeRequests: Gauge<{routeId: string}>;
+ configUrlsCount: Gauge;
+ retryCount: Gauge<{routeId: string}>;
};
export interface IJsonRpcHttpClient {
diff --git a/packages/beacon-node/src/execution/builder/http.ts b/packages/beacon-node/src/execution/builder/http.ts
index 20b7d4751c81..c47e8471f199 100644
--- a/packages/beacon-node/src/execution/builder/http.ts
+++ b/packages/beacon-node/src/execution/builder/http.ts
@@ -1,10 +1,6 @@
import {byteArrayEquals, toHexString} from "@chainsafe/ssz";
import {allForks, bellatrix, Slot, Root, BLSPubkey, ssz, deneb, Wei} from "@lodestar/types";
-import {
- parseSignedBlindedBlockOrContents,
- parseExecutionPayloadAndBlobsBundle,
- reconstructFullBlockOrContents,
-} from "@lodestar/state-transition";
+import {parseExecutionPayloadAndBlobsBundle, reconstructFullBlockOrContents} from "@lodestar/state-transition";
import {ChainForkConfig} from "@lodestar/config";
import {Logger} from "@lodestar/logger";
import {getClient, Api as BuilderApi} from "@lodestar/api/builder";
@@ -110,26 +106,23 @@ export class ExecutionBuilderHttp implements IExecutionBuilder {
): Promise<{
header: allForks.ExecutionPayloadHeader;
executionPayloadValue: Wei;
- blindedBlobsBundle?: deneb.BlindedBlobsBundle;
+ blobKzgCommitments?: deneb.BlobKzgCommitments;
}> {
const res = await this.api.getHeader(slot, parentHash, proposerPubKey);
ApiError.assert(res, "execution.builder.getheader");
const {header, value: executionPayloadValue} = res.response.data.message;
- const {blindedBlobsBundle} = res.response.data.message as deneb.BuilderBid;
- return {header, executionPayloadValue, blindedBlobsBundle};
+ const {blobKzgCommitments} = res.response.data.message as deneb.BuilderBid;
+ return {header, executionPayloadValue, blobKzgCommitments};
}
async submitBlindedBlock(
- signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents
+ signedBlindedBlock: allForks.SignedBlindedBeaconBlock
): Promise {
- const res = await this.api.submitBlindedBlock(signedBlindedBlockOrContents);
+ const res = await this.api.submitBlindedBlock(signedBlindedBlock);
ApiError.assert(res, "execution.builder.submitBlindedBlock");
const {data} = res.response;
const {executionPayload, blobsBundle} = parseExecutionPayloadAndBlobsBundle(data);
- const {signedBlindedBlock, signedBlindedBlobSidecars} =
- parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents);
-
// some validations for execution payload
const expectedTransactionsRoot = signedBlindedBlock.message.body.executionPayloadHeader.transactionsRoot;
const actualTransactionsRoot = ssz.bellatrix.Transactions.hashTreeRoot(executionPayload.transactions);
@@ -141,7 +134,7 @@ export class ExecutionBuilderHttp implements IExecutionBuilder {
);
}
- const blobs = blobsBundle ? blobsBundle.blobs : null;
- return reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs});
+ const contents = blobsBundle ? {blobs: blobsBundle.blobs, kzgProofs: blobsBundle.proofs} : null;
+ return reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents});
}
}
diff --git a/packages/beacon-node/src/execution/builder/interface.ts b/packages/beacon-node/src/execution/builder/interface.ts
index e9a2cabb69ef..8754a3616610 100644
--- a/packages/beacon-node/src/execution/builder/interface.ts
+++ b/packages/beacon-node/src/execution/builder/interface.ts
@@ -25,9 +25,7 @@ export interface IExecutionBuilder {
): Promise<{
header: allForks.ExecutionPayloadHeader;
executionPayloadValue: Wei;
- blindedBlobsBundle?: deneb.BlindedBlobsBundle;
+ blobKzgCommitments?: deneb.BlobKzgCommitments;
}>;
- submitBlindedBlock(
- signedBlock: allForks.SignedBlindedBeaconBlockOrContents
- ): Promise;
+ submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise;
}
diff --git a/packages/beacon-node/src/execution/engine/http.ts b/packages/beacon-node/src/execution/engine/http.ts
index 70df97ba1e4a..91ceabaf2770 100644
--- a/packages/beacon-node/src/execution/engine/http.ts
+++ b/packages/beacon-node/src/execution/engine/http.ts
@@ -363,7 +363,12 @@ export class ExecutionEngineHttp implements IExecutionEngine {
async getPayload(
fork: ForkName,
payloadId: PayloadId
- ): Promise<{executionPayload: allForks.ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle}> {
+ ): Promise<{
+ executionPayload: allForks.ExecutionPayload;
+ executionPayloadValue: Wei;
+ blobsBundle?: BlobsBundle;
+ shouldOverrideBuilder?: boolean;
+ }> {
const method =
ForkSeq[fork] >= ForkSeq.deneb
? "engine_getPayloadV3"
diff --git a/packages/beacon-node/src/execution/engine/interface.ts b/packages/beacon-node/src/execution/engine/interface.ts
index 9a7ee3963379..e5f612fc0965 100644
--- a/packages/beacon-node/src/execution/engine/interface.ts
+++ b/packages/beacon-node/src/execution/engine/interface.ts
@@ -136,7 +136,12 @@ export interface IExecutionEngine {
getPayload(
fork: ForkName,
payloadId: PayloadId
- ): Promise<{executionPayload: allForks.ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle}>;
+ ): Promise<{
+ executionPayload: allForks.ExecutionPayload;
+ executionPayloadValue: Wei;
+ blobsBundle?: BlobsBundle;
+ shouldOverrideBuilder?: boolean;
+ }>;
getPayloadBodiesByHash(blockHash: DATA[]): Promise<(ExecutionPayloadBody | null)[]>;
diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts
index 83a5ea3a7ed6..5779713435a5 100644
--- a/packages/beacon-node/src/execution/engine/mock.ts
+++ b/packages/beacon-node/src/execution/engine/mock.ts
@@ -1,5 +1,4 @@
import crypto from "node:crypto";
-import {kzgCommitmentToVersionedHash} from "@lodestar/state-transition";
import {bellatrix, deneb, RootHex, ssz} from "@lodestar/types";
import {fromHex, toHex} from "@lodestar/utils";
import {
@@ -12,6 +11,7 @@ import {
} from "@lodestar/params";
import {ZERO_HASH_HEX} from "../../constants/index.js";
import {ckzg} from "../../util/kzg.js";
+import {kzgCommitmentToVersionedHash} from "../../util/blobs.js";
import {quantityToNum} from "../../eth1/provider/utils.js";
import {
EngineApiRpcParamTypes,
diff --git a/packages/beacon-node/src/execution/engine/types.ts b/packages/beacon-node/src/execution/engine/types.ts
index 4f24480e0b96..72a0100f7a51 100644
--- a/packages/beacon-node/src/execution/engine/types.ts
+++ b/packages/beacon-node/src/execution/engine/types.ts
@@ -107,6 +107,7 @@ type ExecutionPayloadRpcWithValue = {
// even though CL tracks this as executionPayloadValue, EL returns this as blockValue
blockValue: QUANTITY;
blobsBundle?: BlobsBundleRpc;
+ shouldOverrideBuilder?: boolean;
};
type ExecutionPayloadResponse = ExecutionPayloadRpc | ExecutionPayloadRpcWithValue;
@@ -207,19 +208,28 @@ export function hasPayloadValue(response: ExecutionPayloadResponse): response is
export function parseExecutionPayload(
fork: ForkName,
response: ExecutionPayloadResponse
-): {executionPayload: allForks.ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle} {
+): {
+ executionPayload: allForks.ExecutionPayload;
+ executionPayloadValue: Wei;
+ blobsBundle?: BlobsBundle;
+ shouldOverrideBuilder?: boolean;
+} {
let data: ExecutionPayloadRpc;
let executionPayloadValue: Wei;
let blobsBundle: BlobsBundle | undefined;
+ let shouldOverrideBuilder: boolean;
+
if (hasPayloadValue(response)) {
executionPayloadValue = quantityToBigint(response.blockValue);
data = response.executionPayload;
blobsBundle = response.blobsBundle ? parseBlobsBundle(response.blobsBundle) : undefined;
+ shouldOverrideBuilder = response.shouldOverrideBuilder ?? false;
} else {
data = response;
// Just set it to zero as default
executionPayloadValue = BigInt(0);
blobsBundle = undefined;
+ shouldOverrideBuilder = false;
}
const executionPayload = {
@@ -269,7 +279,7 @@ export function parseExecutionPayload(
(executionPayload as deneb.ExecutionPayload).excessBlobGas = quantityToBigint(excessBlobGas);
}
- return {executionPayload, executionPayloadValue, blobsBundle};
+ return {executionPayload, executionPayloadValue, blobsBundle, shouldOverrideBuilder};
}
export function serializePayloadAttributes(data: PayloadAttributes): PayloadAttributesRpc {
diff --git a/packages/beacon-node/src/metrics/index.ts b/packages/beacon-node/src/metrics/index.ts
index fb2781333d66..a56591a04090 100644
--- a/packages/beacon-node/src/metrics/index.ts
+++ b/packages/beacon-node/src/metrics/index.ts
@@ -1,5 +1,4 @@
export * from "./metrics.js";
export * from "./server/index.js";
-export * from "./interface.js";
export * from "./nodeJsMetrics.js";
export {RegistryMetricCreator} from "./utils/registryMetricCreator.js";
diff --git a/packages/beacon-node/src/metrics/interface.ts b/packages/beacon-node/src/metrics/interface.ts
deleted file mode 100644
index 2e2a267ca13c..000000000000
--- a/packages/beacon-node/src/metrics/interface.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-import {Gauge, Histogram} from "prom-client";
-
-type CollectFn = (metric: IGauge) => void;
-
-export type IGauge = Pick, "inc" | "dec" | "set"> & {
- addCollect: (collectFn: CollectFn) => void;
-};
-
-export type IHistogram = Pick, "observe" | "startTimer">;
-
-export type IAvgMinMax = {
- addGetValuesFn(getValuesFn: () => number[]): void;
- set(values: number[]): void;
-};
diff --git a/packages/beacon-node/src/metrics/metrics/beacon.ts b/packages/beacon-node/src/metrics/metrics/beacon.ts
index 8d9094f19a25..9366174ef6c6 100644
--- a/packages/beacon-node/src/metrics/metrics/beacon.ts
+++ b/packages/beacon-node/src/metrics/metrics/beacon.ts
@@ -1,4 +1,6 @@
+import {ProducedBlockSource} from "@lodestar/types";
import {RegistryMetricCreator} from "../utils/registryMetricCreator.js";
+import {BlockProductionStep, PayloadPreparationType} from "../../chain/produceBlock/index.js";
export type BeaconMetrics = ReturnType;
@@ -46,7 +48,7 @@ export function createBeaconMetrics(register: RegistryMetricCreator) {
// Additional Metrics
// TODO: Implement
- currentValidators: register.gauge<"status">({
+ currentValidators: register.gauge<{status: string}>({
name: "beacon_current_validators",
labelNames: ["status"],
help: "number of validators in current epoch",
@@ -115,55 +117,35 @@ export function createBeaconMetrics(register: RegistryMetricCreator) {
buckets: [1, 2, 3, 5, 7, 10, 20, 30, 50, 100],
}),
- blockProductionTime: register.histogram<"source">({
+ blockProductionTime: register.histogram<{source: ProducedBlockSource}>({
name: "beacon_block_production_seconds",
help: "Full runtime of block production",
buckets: [0.1, 1, 2, 4, 10],
labelNames: ["source"],
}),
- executionBlockProductionTimeSteps: register.histogram<"step">({
+ executionBlockProductionTimeSteps: register.histogram<{step: BlockProductionStep}>({
name: "beacon_block_production_execution_steps_seconds",
help: "Detailed steps runtime of execution block production",
buckets: [0.01, 0.1, 0.2, 0.5, 1],
- /**
- * - proposerSlashing
- * - attesterSlashings
- * - voluntaryExits
- * - blsToExecutionChanges
- * - attestations
- * - eth1DataAndDeposits
- * - syncAggregate
- * - executionPayload
- */
labelNames: ["step"],
}),
- builderBlockProductionTimeSteps: register.histogram<"step">({
+ builderBlockProductionTimeSteps: register.histogram<{step: BlockProductionStep}>({
name: "beacon_block_production_builder_steps_seconds",
help: "Detailed steps runtime of builder block production",
buckets: [0.01, 0.1, 0.2, 0.5, 1],
- /**
- * - proposerSlashing
- * - attesterSlashings
- * - voluntaryExits
- * - blsToExecutionChanges
- * - attestations
- * - eth1DataAndDeposits
- * - syncAggregate
- * - executionPayload
- */
labelNames: ["step"],
}),
- blockProductionRequests: register.gauge<"source">({
+ blockProductionRequests: register.gauge<{source: ProducedBlockSource}>({
name: "beacon_block_production_requests_total",
help: "Count of all block production requests",
labelNames: ["source"],
}),
- blockProductionSuccess: register.gauge<"source">({
+ blockProductionSuccess: register.gauge<{source: ProducedBlockSource}>({
name: "beacon_block_production_successes_total",
help: "Count of blocks successfully produced",
labelNames: ["source"],
}),
- blockProductionNumAggregated: register.histogram<"source">({
+ blockProductionNumAggregated: register.histogram<{source: ProducedBlockSource}>({
name: "beacon_block_production_num_aggregated_total",
help: "Count of all aggregated attestations in our produced block",
buckets: [32, 64, 96, 128],
@@ -173,34 +155,30 @@ export function createBeaconMetrics(register: RegistryMetricCreator) {
blockProductionCaches: {
producedBlockRoot: register.gauge({
name: "beacon_blockroot_produced_cache_total",
- help: "Count of cached produded block roots",
+ help: "Count of cached produced block roots",
}),
producedBlindedBlockRoot: register.gauge({
name: "beacon_blinded_blockroot_produced_cache_total",
- help: "Count of cached produded blinded block roots",
+ help: "Count of cached produced blinded block roots",
}),
- producedBlobSidecarsCache: register.gauge({
- name: "beacon_blobsidecars_produced_cache_total",
- help: "Count of cached produced blob sidecars",
- }),
- producedBlindedBlobSidecarsCache: register.gauge({
- name: "beacon_blinded_blobsidecars_produced_cache_total",
- help: "Count of cached produced blinded blob sidecars",
+ producedContentsCache: register.gauge({
+ name: "beacon_contents_produced_cache_total",
+ help: "Count of cached produced blob contents",
}),
},
blockPayload: {
payloadAdvancePrepTime: register.histogram({
name: "beacon_block_payload_prepare_time",
- help: "Time for perparing payload in advance",
+ help: "Time for preparing payload in advance",
buckets: [0.1, 1, 3, 5, 10],
}),
- payloadFetchedTime: register.histogram<"prepType">({
+ payloadFetchedTime: register.histogram<{prepType: PayloadPreparationType}>({
name: "beacon_block_payload_fetched_time",
help: "Time to fetch the payload from EL",
labelNames: ["prepType"],
}),
- emptyPayloads: register.gauge<"prepType">({
+ emptyPayloads: register.gauge<{prepType: PayloadPreparationType}>({
name: "beacon_block_payload_empty_total",
help: "Count of payload with empty transactions",
labelNames: ["prepType"],
diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts
index 8a22fe8f0a9b..f6b143913346 100644
--- a/packages/beacon-node/src/metrics/metrics/lodestar.ts
+++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts
@@ -1,6 +1,22 @@
+import {EpochTransitionStep, StateCloneSource, StateHashTreeRootSource} from "@lodestar/state-transition";
import {allForks} from "@lodestar/types";
-import {RegistryMetricCreator} from "../utils/registryMetricCreator.js";
+import {BlockSource} from "../../chain/blocks/types.js";
+import {JobQueueItemType} from "../../chain/bls/index.js";
+import {BlockErrorCode} from "../../chain/errors/index.js";
+import {InsertOutcome} from "../../chain/opPools/types.js";
+import {RegenCaller, RegenFnName} from "../../chain/regen/interface.js";
+import {ReprocessStatus} from "../../chain/reprocess.js";
+import {RejectReason} from "../../chain/seenCache/seenAttestationData.js";
+import {ExecutionPayloadStatus} from "../../execution/index.js";
+import {GossipType} from "../../network/index.js";
+import {CannotAcceptWorkReason, ReprocessRejectReason} from "../../network/processor/index.js";
+import {BackfillSyncMethod} from "../../sync/backfill/backfill.js";
+import {PendingBlockType} from "../../sync/interface.js";
+import {PeerSyncType, RangeSyncType} from "../../sync/utils/remoteSyncType.js";
import {LodestarMetadata} from "../options.js";
+import {RegistryMetricCreator} from "../utils/registryMetricCreator.js";
+import {OpSource} from "../validatorMonitor.js";
+import {CacheItemType} from "../../chain/stateCache/types.js";
export type LodestarMetrics = ReturnType;
@@ -14,7 +30,7 @@ export function createLodestarMetrics(
anchorState?: Pick
) {
if (metadata) {
- register.static({
+ register.static({
name: "lodestar_version",
help: "Lodestar version",
value: metadata,
@@ -33,34 +49,34 @@ export function createLodestarMetrics(
return {
gossipValidationQueue: {
- length: register.gauge<"topic">({
+ length: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_queue_length",
help: "Count of total gossip validation queue length",
labelNames: ["topic"],
}),
- keySize: register.gauge<"topic">({
+ keySize: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_queue_key_size",
help: "Count of total gossip validation queue key size",
labelNames: ["topic"],
}),
- droppedJobs: register.gauge<"topic">({
+ droppedJobs: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_queue_dropped_jobs_total",
help: "Count of total gossip validation queue dropped jobs",
labelNames: ["topic"],
}),
- jobTime: register.histogram<"topic">({
+ jobTime: register.histogram<{topic: GossipType}>({
name: "lodestar_gossip_validation_queue_job_time_seconds",
help: "Time to process gossip validation queue job in seconds",
labelNames: ["topic"],
buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10],
}),
- jobWaitTime: register.histogram<"topic">({
+ jobWaitTime: register.histogram<{topic: GossipType}>({
name: "lodestar_gossip_validation_queue_job_wait_time_seconds",
help: "Time from job added to the queue to starting the job in seconds",
labelNames: ["topic"],
buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10],
}),
- concurrency: register.gauge<"topic">({
+ concurrency: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_queue_concurrency",
help: "Current count of jobs being run on network processor for topic",
labelNames: ["topic"],
@@ -79,22 +95,22 @@ export function createLodestarMetrics(
},
networkProcessor: {
- gossipValidationAccept: register.gauge<"topic">({
+ gossipValidationAccept: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_accept_total",
help: "Count of total gossip validation accept",
labelNames: ["topic"],
}),
- gossipValidationIgnore: register.gauge<"topic">({
+ gossipValidationIgnore: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_ignore_total",
help: "Count of total gossip validation ignore",
labelNames: ["topic"],
}),
- gossipValidationReject: register.gauge<"topic">({
+ gossipValidationReject: register.gauge<{topic: GossipType}>({
name: "lodestar_gossip_validation_reject_total",
help: "Count of total gossip validation reject",
labelNames: ["topic"],
}),
- gossipValidationError: register.gauge<"topic" | "error">({
+ gossipValidationError: register.gauge<{topic: GossipType; error: string}>({
name: "lodestar_gossip_validation_error_total",
help: "Count of total gossip validation errors detailed",
labelNames: ["topic", "error"],
@@ -108,7 +124,7 @@ export function createLodestarMetrics(
help: "Total calls to network processor execute work fn",
buckets: [0, 1, 5, 128],
}),
- canNotAcceptWork: register.gauge<"reason">({
+ canNotAcceptWork: register.gauge<{reason: CannotAcceptWorkReason}>({
name: "lodestar_network_processor_can_not_accept_work_total",
help: "Total times network processor can not accept work on executeWork",
labelNames: ["reason"],
@@ -121,7 +137,7 @@ export function createLodestarMetrics(
help: "Current count of pending items in reqRespBridgeReqCaller data structure",
}),
},
- networkWorkerWireEventsOnMainThreadLatency: register.histogram<"eventName">({
+ networkWorkerWireEventsOnMainThreadLatency: register.histogram<{eventName: string}>({
name: "lodestar_network_worker_wire_events_on_main_thread_latency_seconds",
help: "Latency in seconds to transmit network events to main thread across worker port",
labelNames: ["eventName"],
@@ -206,19 +222,19 @@ export function createLodestarMetrics(
},
apiRest: {
- responseTime: register.histogram<"operationId">({
+ responseTime: register.histogram<{operationId: string}>({
name: "lodestar_api_rest_response_time_seconds",
help: "REST API time to fulfill a request by operationId",
labelNames: ["operationId"],
// Request times range between 1ms to 100ms in normal conditions. Can get to 1-5 seconds if overloaded
buckets: [0.01, 0.1, 1],
}),
- requests: register.gauge<"operationId">({
+ requests: register.gauge<{operationId: string}>({
name: "lodestar_api_rest_requests_total",
help: "REST API total count requests by operationId",
labelNames: ["operationId"],
}),
- errors: register.gauge<"operationId">({
+ errors: register.gauge<{operationId: string}>({
name: "lodestar_api_rest_errors_total",
help: "REST API total count of errors by operationId",
labelNames: ["operationId"],
@@ -286,7 +302,7 @@ export function createLodestarMetrics(
help: "Time to call commit after process a single epoch transition in seconds",
buckets: [0.01, 0.05, 0.1, 0.2, 0.5, 0.75, 1],
}),
- epochTransitionStepTime: register.histogram<"step">({
+ epochTransitionStepTime: register.histogram<{step: EpochTransitionStep}>({
name: "lodestar_stfn_epoch_transition_step_seconds",
help: "Time to call each step of epoch transition in seconds",
labelNames: ["step"],
@@ -304,28 +320,28 @@ export function createLodestarMetrics(
help: "Time to call commit after process a single block in seconds",
buckets: [0.005, 0.01, 0.02, 0.05, 0.1, 1],
}),
- stateHashTreeRootTime: register.histogram<"source">({
+ stateHashTreeRootTime: register.histogram<{source: StateHashTreeRootSource}>({
name: "lodestar_stfn_hash_tree_root_seconds",
help: "Time to compute the hash tree root of a post state in seconds",
buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5],
labelNames: ["source"],
}),
- preStateBalancesNodesPopulatedMiss: register.gauge<"source">({
+ preStateBalancesNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({
name: "lodestar_stfn_balances_nodes_populated_miss_total",
help: "Total count state.balances nodesPopulated is false on stfn",
labelNames: ["source"],
}),
- preStateBalancesNodesPopulatedHit: register.gauge<"source">({
+ preStateBalancesNodesPopulatedHit: register.gauge<{source: StateCloneSource}>({
name: "lodestar_stfn_balances_nodes_populated_hit_total",
help: "Total count state.balances nodesPopulated is true on stfn",
labelNames: ["source"],
}),
- preStateValidatorsNodesPopulatedMiss: register.gauge<"source">({
+ preStateValidatorsNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({
name: "lodestar_stfn_validators_nodes_populated_miss_total",
help: "Total count state.validators nodesPopulated is false on stfn",
labelNames: ["source"],
}),
- preStateValidatorsNodesPopulatedHit: register.gauge<"source">({
+ preStateValidatorsNodesPopulatedHit: register.gauge<{source: StateCloneSource}>({
name: "lodestar_stfn_validators_nodes_populated_hit_total",
help: "Total count state.validators nodesPopulated is true on stfn",
labelNames: ["source"],
@@ -362,7 +378,7 @@ export function createLodestarMetrics(
},
blsThreadPool: {
- jobsWorkerTime: register.gauge<"workerId">({
+ jobsWorkerTime: register.gauge<{workerId: number}>({
name: "lodestar_bls_thread_pool_time_seconds_sum",
help: "Total time spent verifying signature sets measured on the worker",
labelNames: ["workerId"],
@@ -371,7 +387,7 @@ export function createLodestarMetrics(
name: "lodestar_bls_thread_pool_success_jobs_signature_sets_count",
help: "Count of total verified signature sets",
}),
- errorAggregateSignatureSetsCount: register.gauge<"type">({
+ errorAggregateSignatureSetsCount: register.gauge<{type: JobQueueItemType}>({
name: "lodestar_bls_thread_pool_error_aggregate_signature_sets_count",
help: "Count of error when aggregating pubkeys or signatures",
labelNames: ["type"],
@@ -397,12 +413,12 @@ export function createLodestarMetrics(
name: "lodestar_bls_thread_pool_job_groups_started_total",
help: "Count of total jobs groups started in bls thread pool, job groups include +1 jobs",
}),
- totalJobsStarted: register.gauge<"type">({
+ totalJobsStarted: register.gauge<{type: JobQueueItemType}>({
name: "lodestar_bls_thread_pool_jobs_started_total",
help: "Count of total jobs started in bls thread pool, jobs include +1 signature sets",
labelNames: ["type"],
}),
- totalSigSetsStarted: register.gauge<"type">({
+ totalSigSetsStarted: register.gauge<{type: JobQueueItemType}>({
name: "lodestar_bls_thread_pool_sig_sets_started_total",
help: "Count of total signature sets started in bls thread pool, sig sets include 1 pk, msg, sig",
labelNames: ["type"],
@@ -460,9 +476,15 @@ export function createLodestarMetrics(
name: "lodestar_bls_thread_pool_batchable_sig_sets_total",
help: "Count of total batchable signature sets",
}),
- signatureDeserializationMainThreadDuration: register.gauge({
+ signatureDeserializationMainThreadDuration: register.histogram({
name: "lodestar_bls_thread_pool_signature_deserialization_main_thread_time_seconds",
help: "Total time spent deserializing signatures on main thread",
+ buckets: [0.001, 0.005, 0.01, 0.1],
+ }),
+ pubkeysAggregationMainThreadDuration: register.histogram({
+ name: "lodestar_bls_thread_pool_pubkeys_aggregation_main_thread_time_seconds",
+ help: "Total time spent aggregating pubkeys on main thread",
+ buckets: [0.001, 0.005, 0.01, 0.1],
}),
},
@@ -487,29 +509,29 @@ export function createLodestarMetrics(
name: "lodestar_sync_status",
help: "Range sync status: [Stalled, SyncingFinalized, SyncingHead, Synced]",
}),
- syncPeersBySyncType: register.gauge<"syncType">({
+ syncPeersBySyncType: register.gauge<{syncType: PeerSyncType}>({
name: "lodestar_sync_range_sync_peers",
help: "Count of peers by sync type [FullySynced, Advanced, Behind]",
labelNames: ["syncType"],
}),
- syncSwitchGossipSubscriptions: register.gauge<"action">({
+ syncSwitchGossipSubscriptions: register.gauge<{action: string}>({
name: "lodestar_sync_switch_gossip_subscriptions",
help: "Sync switched gossip subscriptions on/off",
labelNames: ["action"],
}),
syncRange: {
- syncChainsEvents: register.gauge<"syncType" | "event">({
+ syncChainsEvents: register.gauge<{syncType: RangeSyncType; event: string}>({
name: "lodestar_sync_chains_events_total",
help: "Total number of sync chains events events, labeled by syncType",
labelNames: ["syncType", "event"],
}),
- syncChains: register.gauge<"syncType">({
+ syncChains: register.gauge<{syncType: RangeSyncType}>({
name: "lodestar_sync_chains_count",
help: "Count of sync chains by syncType",
labelNames: ["syncType"],
}),
- syncChainsPeers: register.histogram<"syncType">({
+ syncChainsPeers: register.histogram<{syncType: RangeSyncType}>({
name: "lodestar_sync_chains_peer_count_by_type",
help: "Count of sync chain peers by syncType",
labelNames: ["syncType"],
@@ -522,12 +544,12 @@ export function createLodestarMetrics(
},
syncUnknownBlock: {
- switchNetworkSubscriptions: register.gauge<"action">({
+ switchNetworkSubscriptions: register.gauge<{action: string}>({
name: "lodestar_sync_unknown_block_network_subscriptions_count",
help: "Switch network subscriptions on/off",
labelNames: ["action"],
}),
- requests: register.gauge<"type">({
+ requests: register.gauge<{type: PendingBlockType}>({
name: "lodestar_sync_unknown_block_requests_total",
help: "Total number of unknown block events or requests",
labelNames: ["type"],
@@ -581,43 +603,43 @@ export function createLodestarMetrics(
// Gossip attestation
gossipAttestation: {
- useHeadBlockState: register.gauge<"caller">({
+ useHeadBlockState: register.gauge<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_use_head_block_state_count",
help: "Count of gossip attestation verification using head block state",
labelNames: ["caller"],
}),
- useHeadBlockStateDialedToTargetEpoch: register.gauge<"caller">({
+ useHeadBlockStateDialedToTargetEpoch: register.gauge<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_use_head_block_state_dialed_to_target_epoch_count",
help: "Count of gossip attestation verification using head block state and dialed to target epoch",
labelNames: ["caller"],
}),
- headSlotToAttestationSlot: register.histogram<"caller">({
+ headSlotToAttestationSlot: register.histogram<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_head_slot_to_attestation_slot",
help: "Slot distance between attestation slot and head slot",
labelNames: ["caller"],
buckets: [0, 1, 2, 4, 8, 16, 32, 64],
}),
- shufflingCacheHit: register.gauge<"caller">({
+ shufflingCacheHit: register.gauge<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_shuffling_cache_hit_count",
help: "Count of gossip attestation verification shuffling cache hit",
labelNames: ["caller"],
}),
- shufflingCacheMiss: register.gauge<"caller">({
+ shufflingCacheMiss: register.gauge<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_shuffling_cache_miss_count",
help: "Count of gossip attestation verification shuffling cache miss",
labelNames: ["caller"],
}),
- shufflingCacheRegenHit: register.gauge<"caller">({
+ shufflingCacheRegenHit: register.gauge<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_shuffling_cache_regen_hit_count",
help: "Count of gossip attestation verification shuffling cache regen hit",
labelNames: ["caller"],
}),
- shufflingCacheRegenMiss: register.gauge<"caller">({
+ shufflingCacheRegenMiss: register.gauge<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_shuffling_cache_regen_miss_count",
help: "Count of gossip attestation verification shuffling cache regen miss",
labelNames: ["caller"],
}),
- attestationSlotToClockSlot: register.histogram<"caller">({
+ attestationSlotToClockSlot: register.histogram<{caller: RegenCaller}>({
name: "lodestar_gossip_attestation_attestation_slot_to_clock_slot",
help: "Slot distance between clock slot and attestation slot",
labelNames: ["caller"],
@@ -649,29 +671,46 @@ export function createLodestarMetrics(
receivedToGossipValidate: register.histogram({
name: "lodestar_gossip_block_received_to_gossip_validate",
help: "Time elapsed between block received and block validated",
- buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4],
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
}),
receivedToStateTransition: register.histogram({
name: "lodestar_gossip_block_received_to_state_transition",
help: "Time elapsed between block received and block state transition",
- buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4],
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
}),
receivedToSignaturesVerification: register.histogram({
name: "lodestar_gossip_block_received_to_signatures_verification",
help: "Time elapsed between block received and block signatures verification",
- buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4],
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
}),
receivedToExecutionPayloadVerification: register.histogram({
name: "lodestar_gossip_block_received_to_execution_payload_verification",
help: "Time elapsed between block received and execution payload verification",
- buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4],
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
+ }),
+ receivedToBlobsAvailabilityTime: register.histogram<{numBlobs: number}>({
+ name: "lodestar_gossip_block_received_to_blobs_availability_time",
+ help: "Time elapsed between block received and blobs became available",
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
+ labelNames: ["numBlobs"],
+ }),
+ receivedToFullyVerifiedTime: register.histogram({
+ name: "lodestar_gossip_block_received_to_fully_verified_time",
+ help: "Time elapsed between block received and fully verified state, signatures and payload",
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
+ }),
+ verifiedToBlobsAvailabiltyTime: register.histogram<{numBlobs: number}>({
+ name: "lodestar_gossip_block_verified_to_blobs_availability_time",
+ help: "Time elapsed between block verified and blobs became available",
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
+ labelNames: ["numBlobs"],
}),
receivedToBlockImport: register.histogram({
name: "lodestar_gossip_block_received_to_block_import",
help: "Time elapsed between block received and block import",
- buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4],
+ buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4],
}),
- processBlockErrors: register.gauge<"error">({
+ processBlockErrors: register.gauge<{error: BlockErrorCode | "NOT_BLOCK_ERROR"}>({
name: "lodestar_gossip_block_process_block_errors",
help: "Count of errors, by error type, while processing blocks",
labelNames: ["error"],
@@ -702,13 +741,13 @@ export function createLodestarMetrics(
name: "lodestar_import_block_set_head_after_first_interval_total",
help: "Total times an imported block is set as head after the first slot interval",
}),
- bySource: register.gauge<"source">({
+ bySource: register.gauge<{source: BlockSource}>({
name: "lodestar_import_block_by_source_total",
help: "Total number of imported blocks by source",
labelNames: ["source"],
}),
},
- engineNotifyNewPayloadResult: register.gauge<"result">({
+ engineNotifyNewPayloadResult: register.gauge<{result: ExecutionPayloadStatus}>({
name: "lodestar_execution_engine_notify_new_payload_result_total",
help: "The total result of calling notifyNewPayload execution engine api",
labelNames: ["result"],
@@ -722,7 +761,7 @@ export function createLodestarMetrics(
name: "lodestar_backfill_prev_fin_or_ws_slot",
help: "Slot of previous finalized or wsCheckpoint block to be validated",
}),
- totalBlocks: register.gauge<"method">({
+ totalBlocks: register.gauge<{method: BackfillSyncMethod}>({
name: "lodestar_backfill_sync_blocks_total",
help: "Total amount of backfilled blocks",
labelNames: ["method"],
@@ -753,7 +792,7 @@ export function createLodestarMetrics(
name: "lodestar_oppool_attestation_pool_size",
help: "Current size of the AttestationPool = total attestations unique by data and slot",
}),
- attestationPoolInsertOutcome: register.counter<"insertOutcome">({
+ attestationPoolInsertOutcome: register.counter<{insertOutcome: InsertOutcome}>({
name: "lodestar_attestation_pool_insert_outcome_total",
help: "Total number of InsertOutcome as a result of adding an attestation in a pool",
labelNames: ["insertOutcome"],
@@ -778,7 +817,7 @@ export function createLodestarMetrics(
name: "lodestar_oppool_sync_committee_message_pool_size",
help: "Current size of the SyncCommitteeMessagePool unique by slot subnet and block root",
}),
- syncCommitteeMessagePoolInsertOutcome: register.counter<"insertOutcome">({
+ syncCommitteeMessagePoolInsertOutcome: register.counter<{insertOutcome: InsertOutcome}>({
name: "lodestar_oppool_sync_committee_message_insert_outcome_total",
help: "Total number of InsertOutcome as a result of adding a SyncCommitteeMessage to pool",
labelNames: ["insertOutcome"],
@@ -804,7 +843,7 @@ export function createLodestarMetrics(
// Validator Monitor Metrics (per-epoch summaries)
// Only track prevEpochOnChainBalance per index
- prevEpochOnChainBalance: register.gauge<"index">({
+ prevEpochOnChainBalance: register.gauge<{index: number}>({
name: "validator_monitor_prev_epoch_on_chain_balance",
help: "Balance of validator after an epoch",
labelNames: ["index"],
@@ -913,12 +952,12 @@ export function createLodestarMetrics(
help: "The count of times a sync signature was seen inside an aggregate",
buckets: [0, 1, 2, 3, 5, 10],
}),
- prevEpochAttestationSummary: register.gauge<"summary">({
+ prevEpochAttestationSummary: register.gauge<{summary: string}>({
name: "validator_monitor_prev_epoch_attestation_summary",
help: "Best guess of the node of the result of previous epoch validators attestation actions and causality",
labelNames: ["summary"],
}),
- prevEpochBlockProposalSummary: register.gauge<"summary">({
+ prevEpochBlockProposalSummary: register.gauge<{summary: string}>({
name: "validator_monitor_prev_epoch_block_proposal_summary",
help: "Best guess of the node of the result of previous epoch validators block proposal actions and causality",
labelNames: ["summary"],
@@ -926,12 +965,12 @@ export function createLodestarMetrics(
// Validator Monitor Metrics (real-time)
- unaggregatedAttestationTotal: register.gauge<"src">({
+ unaggregatedAttestationTotal: register.gauge<{src: OpSource}>({
name: "validator_monitor_unaggregated_attestation_total",
help: "Number of unaggregated attestations seen",
labelNames: ["src"],
}),
- unaggregatedAttestationDelaySeconds: register.histogram<"src">({
+ unaggregatedAttestationDelaySeconds: register.histogram<{src: OpSource}>({
name: "validator_monitor_unaggregated_attestation_delay_seconds",
help: "The delay between when the validator should send the attestation and when it was received",
labelNames: ["src"],
@@ -945,23 +984,23 @@ export function createLodestarMetrics(
// refine if we want more reasonable values
buckets: [0, 10, 20, 30],
}),
- aggregatedAttestationTotal: register.gauge<"src">({
+ aggregatedAttestationTotal: register.gauge<{src: OpSource}>({
name: "validator_monitor_aggregated_attestation_total",
help: "Number of aggregated attestations seen",
labelNames: ["src"],
}),
- aggregatedAttestationDelaySeconds: register.histogram<"src">({
+ aggregatedAttestationDelaySeconds: register.histogram<{src: OpSource}>({
name: "validator_monitor_aggregated_attestation_delay_seconds",
help: "The delay between then the validator should send the aggregate and when it was received",
labelNames: ["src"],
buckets: [0.1, 0.25, 0.5, 1, 2, 5, 10],
}),
- attestationInAggregateTotal: register.gauge<"src">({
+ attestationInAggregateTotal: register.gauge<{src: OpSource}>({
name: "validator_monitor_attestation_in_aggregate_total",
help: "Number of times an attestation has been seen in an aggregate",
labelNames: ["src"],
}),
- attestationInAggregateDelaySeconds: register.histogram<"src">({
+ attestationInAggregateDelaySeconds: register.histogram<{src: OpSource}>({
name: "validator_monitor_attestation_in_aggregate_delay_seconds",
help: "The delay between when the validator should send the aggregate and when it was received",
labelNames: ["src"],
@@ -985,12 +1024,12 @@ export function createLodestarMetrics(
name: "validator_monitor_sync_signature_in_aggregate_total",
help: "Number of times a sync signature has been seen in an aggregate",
}),
- beaconBlockTotal: register.gauge<"src">({
+ beaconBlockTotal: register.gauge<{src: OpSource}>({
name: "validator_monitor_beacon_block_total",
help: "Total number of beacon blocks seen",
labelNames: ["src"],
}),
- beaconBlockDelaySeconds: register.histogram<"src">({
+ beaconBlockDelaySeconds: register.histogram<{src: OpSource}>({
name: "validator_monitor_beacon_block_delay_seconds",
help: "The delay between when the validator should send the block and when it was received",
labelNames: ["src"],
@@ -1060,13 +1099,15 @@ export function createLodestarMetrics(
name: "lodestar_cp_state_cache_adds_total",
help: "Total number of items added in checkpoint state cache",
}),
- size: register.gauge({
+ size: register.gauge<{type: CacheItemType}>({
name: "lodestar_cp_state_cache_size",
help: "Checkpoint state cache size",
+ labelNames: ["type"],
}),
- epochSize: register.gauge({
+ epochSize: register.gauge<{type: CacheItemType}>({
name: "lodestar_cp_state_epoch_size",
help: "Checkpoint state cache size",
+ labelNames: ["type"],
}),
reads: register.avgMinMax({
name: "lodestar_cp_state_epoch_reads",
@@ -1081,6 +1122,44 @@ export function createLodestarMetrics(
help: "Histogram of cloned count per state every time state.clone() is called",
buckets: [1, 2, 5, 10, 50, 250],
}),
+ statePersistDuration: register.histogram({
+ name: "lodestar_cp_state_cache_state_persist_seconds",
+ help: "Histogram of time to persist state to db",
+ buckets: [0.1, 0.5, 1, 2, 3, 4],
+ }),
+ statePruneFromMemoryCount: register.gauge({
+ name: "lodestar_cp_state_cache_state_prune_from_memory_count",
+ help: "Total number of states pruned from memory",
+ }),
+ statePersistSecFromSlot: register.histogram({
+ name: "lodestar_cp_state_cache_state_persist_seconds_from_slot",
+ help: "Histogram of time to persist state to db since the clock slot",
+ buckets: [0, 2, 4, 6, 8, 10, 12],
+ }),
+ stateReloadDuration: register.histogram({
+ name: "lodestar_cp_state_cache_state_reload_seconds",
+ help: "Histogram of time to load state from db",
+ buckets: [0, 2, 4, 6, 8, 10, 12],
+ }),
+ stateReloadEpochDiff: register.histogram({
+ name: "lodestar_cp_state_cache_state_reload_epoch_diff",
+ help: "Histogram of epoch difference between seed state epoch and loaded state epoch",
+ buckets: [0, 1, 2, 4, 8, 16, 32],
+ }),
+ stateReloadSecFromSlot: register.histogram({
+ name: "lodestar_cp_state_cache_state_reload_seconds_from_slot",
+ help: "Histogram of time to load state from db since the clock slot",
+ buckets: [0, 2, 4, 6, 8, 10, 12],
+ }),
+ stateReloadDbReadTime: register.histogram({
+ name: "lodestar_cp_state_cache_state_reload_db_read_seconds",
+ help: "Histogram of time to load state bytes from db",
+ buckets: [0.01, 0.05, 0.1, 0.2, 0.5],
+ }),
+ persistedStateRemoveCount: register.gauge({
+ name: "lodestar_cp_state_cache_persisted_state_remove_count",
+ help: "Total number of persisted states removed",
+ }),
},
balancesCache: {
@@ -1092,7 +1171,7 @@ export function createLodestarMetrics(
name: "lodestar_balances_cache_misses_total",
help: "Total number of balances cache misses",
}),
- closestStateResult: register.counter<"stateId">({
+ closestStateResult: register.counter<{stateId: string}>({
name: "lodestar_balances_cache_closest_state_result_total",
help: "Total number of stateIds returned as closest justified balances state by id",
labelNames: ["stateId"],
@@ -1170,7 +1249,7 @@ export function createLodestarMetrics(
name: "lodestar_seen_cache_attestation_data_miss_total",
help: "Total number of attestation data miss in SeenAttestationData",
}),
- reject: register.gauge<"reason">({
+ reject: register.gauge<{reason: RejectReason}>({
name: "lodestar_seen_cache_attestation_data_reject_total",
help: "Total number of attestation data rejected in SeenAttestationData",
labelNames: ["reason"],
@@ -1178,23 +1257,23 @@ export function createLodestarMetrics(
},
},
- regenFnCallTotal: register.gauge<"entrypoint" | "caller">({
+ regenFnCallTotal: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({
name: "lodestar_regen_fn_call_total",
help: "Total number of calls for regen functions",
labelNames: ["entrypoint", "caller"],
}),
- regenFnQueuedTotal: register.gauge<"entrypoint" | "caller">({
+ regenFnQueuedTotal: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({
name: "lodestar_regen_fn_queued_total",
help: "Total number of calls queued for regen functions",
labelNames: ["entrypoint", "caller"],
}),
- regenFnCallDuration: register.histogram<"entrypoint" | "caller">({
+ regenFnCallDuration: register.histogram<{entrypoint: RegenFnName; caller: RegenCaller}>({
name: "lodestar_regen_fn_call_duration",
help: "regen function duration",
labelNames: ["entrypoint", "caller"],
buckets: [0.1, 1, 10, 100],
}),
- regenFnTotalErrors: register.gauge<"entrypoint" | "caller">({
+ regenFnTotalErrors: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({
name: "lodestar_regen_fn_errors_total",
help: "regen function total errors",
labelNames: ["entrypoint", "caller"],
@@ -1206,7 +1285,7 @@ export function createLodestarMetrics(
// Precompute next epoch transition
precomputeNextEpochTransition: {
- count: register.counter<"result">({
+ count: register.counter<{result: string}>({
name: "lodestar_precompute_next_epoch_transition_result_total",
labelNames: ["result"],
help: "Total number of precomputeNextEpochTransition runs by result",
@@ -1219,6 +1298,11 @@ export function createLodestarMetrics(
name: "lodestar_precompute_next_epoch_transition_waste_total",
help: "Total number of precomputing next epoch transition wasted",
}),
+ duration: register.histogram({
+ name: "lodestar_precompute_next_epoch_transition_duration_seconds",
+ help: "Duration of precomputeNextEpochTransition, including epoch transition and hashTreeRoot",
+ buckets: [1, 2, 3, 4, 8],
+ }),
},
// reprocess attestations
@@ -1235,14 +1319,15 @@ export function createLodestarMetrics(
name: "lodestar_reprocess_attestations_wait_time_resolve_seconds",
help: "Time to wait for unknown block in seconds",
}),
- reject: register.gauge<"reason">({
+ reject: register.gauge<{reason: ReprocessStatus}>({
name: "lodestar_reprocess_attestations_reject_total",
help: "Total number of attestations are rejected to reprocess",
labelNames: ["reason"],
}),
- waitSecBeforeReject: register.gauge<"reason">({
+ waitSecBeforeReject: register.gauge<{reason: ReprocessStatus}>({
name: "lodestar_reprocess_attestations_wait_time_reject_seconds",
help: "Time to wait for unknown block before being rejected",
+ labelNames: ["reason"],
}),
},
@@ -1264,24 +1349,25 @@ export function createLodestarMetrics(
name: "lodestar_reprocess_gossip_attestations_wait_time_resolve_seconds",
help: "Time to wait for unknown block in seconds",
}),
- reject: register.gauge<"reason">({
+ reject: register.gauge<{reason: ReprocessRejectReason}>({
name: "lodestar_reprocess_gossip_attestations_reject_total",
help: "Total number of attestations are rejected to reprocess",
labelNames: ["reason"],
}),
- waitSecBeforeReject: register.gauge<"reason">({
+ waitSecBeforeReject: register.gauge<{reason: ReprocessRejectReason}>({
name: "lodestar_reprocess_gossip_attestations_wait_time_reject_seconds",
help: "Time to wait for unknown block before being rejected",
+ labelNames: ["reason"],
}),
},
lightclientServer: {
- onSyncAggregate: register.gauge<"event">({
+ onSyncAggregate: register.gauge<{event: string}>({
name: "lodestar_lightclient_server_on_sync_aggregate_event_total",
help: "Total number of relevant events onSyncAggregate fn",
labelNames: ["event"],
}),
- highestSlot: register.gauge<"item">({
+ highestSlot: register.gauge<{item: string}>({
name: "lodestar_lightclient_server_highest_slot",
help: "Current highest slot of items stored by LightclientServer",
labelNames: ["item"],
@@ -1392,7 +1478,11 @@ export function createLodestarMetrics(
}),
// Merge details
- eth1MergeBlockDetails: register.gauge<"terminalBlockHash" | "terminalBlockNumber" | "terminalBlockTD">({
+ eth1MergeBlockDetails: register.gauge<{
+ terminalBlockHash: string;
+ terminalBlockNumber: string;
+ terminalBlockTD: string;
+ }>({
name: "lodestar_eth1_merge_block_details",
help: "If found then 1 with terminal block details",
labelNames: ["terminalBlockHash", "terminalBlockNumber", "terminalBlockTD"],
@@ -1400,36 +1490,36 @@ export function createLodestarMetrics(
},
eth1HttpClient: {
- requestTime: register.histogram<"routeId">({
+ requestTime: register.histogram<{routeId: string}>({
name: "lodestar_eth1_http_client_request_time_seconds",
help: "eth1 JsonHttpClient - histogram or roundtrip request times",
labelNames: ["routeId"],
// Provide max resolution on problematic values around 1 second
buckets: [0.1, 0.5, 1, 2, 5, 15],
}),
- streamTime: register.histogram<"routeId">({
+ streamTime: register.histogram<{routeId: string}>({
name: "lodestar_eth1_http_client_stream_time_seconds",
help: "eth1 JsonHttpClient - streaming time by routeId",
labelNames: ["routeId"],
// Provide max resolution on problematic values around 1 second
buckets: [0.1, 0.5, 1, 2, 5, 15],
}),
- requestErrors: register.gauge<"routeId">({
+ requestErrors: register.gauge<{routeId: string}>({
name: "lodestar_eth1_http_client_request_errors_total",
help: "eth1 JsonHttpClient - total count of request errors",
labelNames: ["routeId"],
}),
- retryCount: register.gauge<"routeId">({
+ retryCount: register.gauge<{routeId: string}>({
name: "lodestar_eth1_http_client_request_retries_total",
help: "eth1 JsonHttpClient - total count of request retries",
labelNames: ["routeId"],
}),
- requestUsedFallbackUrl: register.gauge({
+ requestUsedFallbackUrl: register.gauge<{routeId: string}>({
name: "lodestar_eth1_http_client_request_used_fallback_url_total",
help: "eth1 JsonHttpClient - total count of requests on fallback url(s)",
labelNames: ["routeId"],
}),
- activeRequests: register.gauge({
+ activeRequests: register.gauge<{routeId: string}>({
name: "lodestar_eth1_http_client_active_requests",
help: "eth1 JsonHttpClient - current count of active requests",
labelNames: ["routeId"],
@@ -1441,36 +1531,36 @@ export function createLodestarMetrics(
},
executionEnginerHttpClient: {
- requestTime: register.histogram<"routeId">({
+ requestTime: register.histogram<{routeId: string}>({
name: "lodestar_execution_engine_http_client_request_time_seconds",
help: "ExecutionEngineHttp client - histogram or roundtrip request times",
labelNames: ["routeId"],
// Provide max resolution on problematic values around 1 second
buckets: [0.1, 0.5, 1, 2, 5, 15],
}),
- streamTime: register.histogram<"routeId">({
+ streamTime: register.histogram<{routeId: string}>({
name: "lodestar_execution_engine_http_client_stream_time_seconds",
help: "ExecutionEngineHttp client - streaming time by routeId",
labelNames: ["routeId"],
// Provide max resolution on problematic values around 1 second
buckets: [0.1, 0.5, 1, 2, 5, 15],
}),
- requestErrors: register.gauge<"routeId">({
+ requestErrors: register.gauge<{routeId: string}>({
name: "lodestar_execution_engine_http_client_request_errors_total",
help: "ExecutionEngineHttp client - total count of request errors",
labelNames: ["routeId"],
}),
- retryCount: register.gauge<"routeId">({
+ retryCount: register.gauge<{routeId: string}>({
name: "lodestar_execution_engine_http_client_request_retries_total",
help: "ExecutionEngineHttp client - total count of request retries",
labelNames: ["routeId"],
}),
- requestUsedFallbackUrl: register.gauge({
+ requestUsedFallbackUrl: register.gauge<{routeId: string}>({
name: "lodestar_execution_engine_http_client_request_used_fallback_url_total",
help: "ExecutionEngineHttp client - total count of requests on fallback url(s)",
labelNames: ["routeId"],
}),
- activeRequests: register.gauge({
+ activeRequests: register.gauge<{routeId: string}>({
name: "lodestar_execution_engine_http_client_active_requests",
help: "ExecutionEngineHttp client - current count of active requests",
labelNames: ["routeId"],
@@ -1482,32 +1572,32 @@ export function createLodestarMetrics(
},
builderHttpClient: {
- requestTime: register.histogram<"routeId">({
+ requestTime: register.histogram<{routeId: string}>({
name: "lodestar_builder_http_client_request_time_seconds",
help: "Histogram of builder http client request time by routeId",
labelNames: ["routeId"],
// Expected times are ~ 50-500ms, but in an overload NodeJS they can be greater
buckets: [0.01, 0.1, 1, 5],
}),
- streamTime: register.histogram<"routeId">({
+ streamTime: register.histogram<{routeId: string}>({
name: "lodestar_builder_http_client_stream_time_seconds",
help: "Builder api - streaming time by routeId",
labelNames: ["routeId"],
// Provide max resolution on problematic values around 1 second
buckets: [0.1, 0.5, 1, 2, 5, 15],
}),
- requestErrors: register.gauge<"routeId">({
+ requestErrors: register.gauge<{routeId: string}>({
name: "lodestar_builder_http_client_request_errors_total",
help: "Total count of errors on builder http client requests by routeId",
labelNames: ["routeId"],
}),
- requestToFallbacks: register.gauge<"routeId">({
+ requestToFallbacks: register.gauge<{routeId: string}>({
name: "lodestar_builder_http_client_request_to_fallbacks_total",
help: "Total count of requests to fallback URLs on builder http API by routeId",
labelNames: ["routeId"],
}),
- urlsScore: register.gauge<"urlIndex">({
+ urlsScore: register.gauge<{urlIndex: number}>({
name: "lodestar_builder_http_client_urls_score",
help: "Current score of builder http URLs by url index",
labelNames: ["urlIndex"],
@@ -1515,22 +1605,22 @@ export function createLodestarMetrics(
},
db: {
- dbReadReq: register.gauge<"bucket">({
+ dbReadReq: register.gauge<{bucket: string}>({
name: "lodestar_db_read_req_total",
help: "Total count of db read requests, may read 0 or more items",
labelNames: ["bucket"],
}),
- dbReadItems: register.gauge<"bucket">({
+ dbReadItems: register.gauge<{bucket: string}>({
name: "lodestar_db_read_items_total",
help: "Total count of db read items, item = key | value | entry",
labelNames: ["bucket"],
}),
- dbWriteReq: register.gauge<"bucket">({
+ dbWriteReq: register.gauge<{bucket: string}>({
name: "lodestar_db_write_req_total",
help: "Total count of db write requests, may write 0 or more items",
labelNames: ["bucket"],
}),
- dbWriteItems: register.gauge<"bucket">({
+ dbWriteItems: register.gauge<{bucket: string}>({
name: "lodestar_db_write_items_total",
help: "Total count of db write items",
labelNames: ["bucket"],
diff --git a/packages/beacon-node/src/metrics/server/http.ts b/packages/beacon-node/src/metrics/server/http.ts
index b699471e07d5..d8fbb289e951 100644
--- a/packages/beacon-node/src/metrics/server/http.ts
+++ b/packages/beacon-node/src/metrics/server/http.ts
@@ -15,6 +15,11 @@ export type HttpMetricsServer = {
close(): Promise;
};
+enum RequestStatus {
+ success = "success",
+ error = "error",
+}
+
export async function getHttpMetricsServer(
opts: HttpMetricsServerOpts,
{
@@ -26,7 +31,7 @@ export async function getHttpMetricsServer(
// New registry to metric the metrics. Using the same registry would deadlock the .metrics promise
const httpServerRegister = new RegistryMetricCreator();
- const scrapeTimeMetric = httpServerRegister.histogram<"status">({
+ const scrapeTimeMetric = httpServerRegister.histogram<{status: RequestStatus}>({
name: "lodestar_metrics_scrape_seconds",
help: "Lodestar metrics server async time to scrape metrics",
labelNames: ["status"],
@@ -40,7 +45,7 @@ export async function getHttpMetricsServer(
if (req.method === "GET" && req.url && req.url.includes("/metrics")) {
const timer = scrapeTimeMetric.startTimer();
const metricsRes = await Promise.all([wrapError(register.metrics()), getOtherMetrics()]);
- timer({status: metricsRes[0].err ? "error" : "success"});
+ timer({status: metricsRes[0].err ? RequestStatus.error : RequestStatus.success});
// Ensure we only writeHead once
if (metricsRes[0].err) {
diff --git a/packages/beacon-node/src/metrics/utils/avgMinMax.ts b/packages/beacon-node/src/metrics/utils/avgMinMax.ts
index 43f51c821790..709c83ee38d6 100644
--- a/packages/beacon-node/src/metrics/utils/avgMinMax.ts
+++ b/packages/beacon-node/src/metrics/utils/avgMinMax.ts
@@ -1,21 +1,21 @@
import {GaugeConfiguration} from "prom-client";
+import {AvgMinMax as IAvgMinMax, LabelKeys, LabelsGeneric} from "@lodestar/utils";
import {GaugeExtra} from "./gauge.js";
type GetValuesFn = () => number[];
-type Labels = Partial>;
/**
* Special non-standard "Histogram" that captures the avg, min and max of values
*/
-export class AvgMinMax {
- private readonly sum: GaugeExtra;
- private readonly avg: GaugeExtra;
- private readonly min: GaugeExtra;
- private readonly max: GaugeExtra;
+export class AvgMinMax implements IAvgMinMax {
+ private readonly sum: GaugeExtra;
+ private readonly avg: GaugeExtra;
+ private readonly min: GaugeExtra;
+ private readonly max: GaugeExtra;
private getValuesFn: GetValuesFn | null = null;
- constructor(configuration: GaugeConfiguration) {
+ constructor(configuration: GaugeConfiguration>) {
this.sum = new GaugeExtra({...configuration, name: `${configuration.name}_sum`});
this.avg = new GaugeExtra({...configuration, name: `${configuration.name}_avg`});
this.min = new GaugeExtra({...configuration, name: `${configuration.name}_min`});
@@ -33,8 +33,8 @@ export class AvgMinMax {
}
set(values: number[]): void;
- set(labels: Labels, values: number[]): void;
- set(arg1?: Labels | number[], arg2?: number[]): void {
+ set(labels: Labels, values: number[]): void;
+ set(arg1?: Labels | number[], arg2?: number[]): void {
if (arg2 === undefined) {
const values = arg1 as number[];
const {sum, avg, min, max} = getStats(values);
@@ -44,7 +44,7 @@ export class AvgMinMax {
this.max.set(max);
} else {
const values = (arg2 !== undefined ? arg2 : arg1) as number[];
- const labels = arg1 as Labels;
+ const labels = arg1 as Labels;
const {sum, avg, min, max} = getStats(values);
this.sum.set(labels, sum);
this.avg.set(labels, avg);
diff --git a/packages/beacon-node/src/metrics/utils/gauge.ts b/packages/beacon-node/src/metrics/utils/gauge.ts
index fb95fe25d24d..1f527adfcb64 100644
--- a/packages/beacon-node/src/metrics/utils/gauge.ts
+++ b/packages/beacon-node/src/metrics/utils/gauge.ts
@@ -1,29 +1,16 @@
-import {Gauge, GaugeConfiguration} from "prom-client";
-import {IGauge} from "../interface.js";
-
-type CollectFn = (metric: IGauge) => void;
-type Labels = Partial>;
+import {Gauge} from "prom-client";
+import {CollectFn, Gauge as IGauge, LabelKeys, LabelsGeneric} from "@lodestar/utils";
/**
- * Extends the prom-client Gauge with extra features:
- * - Add multiple collect functions after instantiation
- * - Create child gauges with fixed labels
+ * Extends the prom-client Gauge to be able to add multiple collect functions after instantiation
*/
-export class GaugeExtra extends Gauge implements IGauge {
- private collectFns: CollectFn[] = [];
-
- constructor(configuration: GaugeConfiguration) {
- super(configuration);
- }
+export class GaugeExtra extends Gauge> implements IGauge {
+ private collectFns: CollectFn[] = [];
- addCollect(collectFn: CollectFn): void {
+ addCollect(collectFn: CollectFn): void {
this.collectFns.push(collectFn);
}
- child(labels: Labels): GaugeChild {
- return new GaugeChild(labels, this);
- }
-
/**
* @override Metric.collect
*/
@@ -33,48 +20,3 @@ export class GaugeExtra extends Gauge implements IGauge {
}
}
}
-
-export class GaugeChild implements IGauge {
- gauge: GaugeExtra;
- labelsParent: Labels;
- constructor(labelsParent: Labels, gauge: GaugeExtra) {
- this.gauge = gauge;
- this.labelsParent = labelsParent;
- }
-
- // Sorry for this mess, `prom-client` API choices are not great
- // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler
- inc(value?: number): void;
- inc(labels: Labels, value?: number): void;
- inc(arg1?: Labels | number, arg2?: number): void {
- if (typeof arg1 === "object") {
- this.gauge.inc({...this.labelsParent, ...arg1}, arg2 ?? 1);
- } else {
- this.gauge.inc(this.labelsParent, arg1 ?? 1);
- }
- }
-
- dec(value?: number): void;
- dec(labels: Labels, value?: number): void;
- dec(arg1?: Labels | number, arg2?: number): void {
- if (typeof arg1 === "object") {
- this.gauge.dec({...this.labelsParent, ...arg1}, arg2 ?? 1);
- } else {
- this.gauge.dec(this.labelsParent, arg1 ?? 1);
- }
- }
-
- set(value: number): void;
- set(labels: Labels, value: number): void;
- set(arg1?: Labels | number, arg2?: number): void {
- if (typeof arg1 === "object") {
- this.gauge.set({...this.labelsParent, ...arg1}, arg2 ?? 0);
- } else {
- this.gauge.set(this.labelsParent, arg1 ?? 0);
- }
- }
-
- addCollect(collectFn: CollectFn): void {
- this.gauge.addCollect(() => collectFn(this));
- }
-}
diff --git a/packages/beacon-node/src/metrics/utils/histogram.ts b/packages/beacon-node/src/metrics/utils/histogram.ts
deleted file mode 100644
index 4490929629f2..000000000000
--- a/packages/beacon-node/src/metrics/utils/histogram.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-import {Histogram, HistogramConfiguration} from "prom-client";
-import {IHistogram} from "../interface.js";
-
-type Labels = Partial>;
-
-/**
- * Extends the prom-client Histogram with extra features:
- * - Add multiple collect functions after instantiation
- * - Create child histograms with fixed labels
- */
-export class HistogramExtra extends Histogram implements IHistogram {
- constructor(configuration: HistogramConfiguration) {
- super(configuration);
- }
-
- child(labels: Labels): HistogramChild {
- return new HistogramChild(labels, this);
- }
-}
-
-export class HistogramChild implements IHistogram {
- histogram: HistogramExtra;
- labelsParent: Labels;
- constructor(labelsParent: Labels, histogram: HistogramExtra) {
- this.histogram = histogram;
- this.labelsParent = labelsParent;
- }
-
- // Sorry for this mess, `prom-client` API choices are not great
- // If the function signature was `observe(value: number, labels?: Labels)`, this would be simpler
- observe(value?: number): void;
- observe(labels: Labels, value?: number): void;
- observe(arg1?: Labels | number, arg2?: number): void {
- if (typeof arg1 === "object") {
- this.histogram.observe({...this.labelsParent, ...arg1}, arg2 ?? 0);
- } else {
- this.histogram.observe(this.labelsParent, arg1 ?? 0);
- }
- }
-
- startTimer(arg1?: Labels): (labels?: Labels) => number {
- if (typeof arg1 === "object") {
- return this.histogram.startTimer({...this.labelsParent, ...arg1});
- } else {
- return this.histogram.startTimer(this.labelsParent);
- }
- }
-}
diff --git a/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts b/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts
index 8864eb2c74c4..adec6f984702 100644
--- a/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts
+++ b/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts
@@ -1,33 +1,41 @@
-import {Gauge, GaugeConfiguration, Registry, HistogramConfiguration, CounterConfiguration, Counter} from "prom-client";
+import {Gauge, Registry, Counter, Histogram} from "prom-client";
+import {
+ AvgMinMaxConfig,
+ CounterConfig,
+ GaugeConfig,
+ HistogramConfig,
+ AvgMinMax as IAvgMinMax,
+ Counter as ICounter,
+ GaugeExtra as IGaugeExtra,
+ Histogram as IHistogram,
+ LabelKeys,
+ LabelsGeneric,
+ MetricsRegisterCustom,
+ NoLabels,
+ StaticConfig,
+} from "@lodestar/utils";
import {AvgMinMax} from "./avgMinMax.js";
import {GaugeExtra} from "./gauge.js";
-import {HistogramExtra} from "./histogram.js";
-type StaticConfiguration = {
- name: GaugeConfiguration["name"];
- help: GaugeConfiguration["help"];
- value: Record