Skip to content

Commit

Permalink
Partial implementation of submit blinded block & blobs. Refactor exis…
Browse files Browse the repository at this point in the history
…ting `BlobSidecar` related types to support blinded blobs.
  • Loading branch information
jimmygchen committed Jun 22, 2023
1 parent fdbba30 commit 59fb1af
Show file tree
Hide file tree
Showing 16 changed files with 254 additions and 229 deletions.
31 changes: 5 additions & 26 deletions beacon_node/beacon_chain/src/beacon_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,10 @@ use tokio_stream::Stream;
use tree_hash::TreeHash;
use types::beacon_block_body::KzgCommitments;
use types::beacon_state::CloneConfig;
use types::blob_sidecar::{BlobRoots, BlobSidecarList, Blobs};
use types::blob_sidecar::{
BlindedBlobSidecar, BlindedBlobSidecarList, BlobRoots, BlobSidecarList, Blobs, BlobsOrBlobRoots,
};
use types::consts::deneb::MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS;
use types::deneb_types::{BlindedBlobSidecar, BlobsOrBlobRoots, SidecarList};
use types::*;

pub type ForkChoiceError = fork_choice::Error<crate::ForkChoiceStoreError>;
Expand Down Expand Up @@ -4820,29 +4821,6 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
Ok((block, state))
}

fn compute_blob_kzg_proofs(
kzg: &Arc<Kzg>,
blobs: &Blobs<T::EthSpec>,
expected_kzg_commitments: &KzgCommitments<T::EthSpec>,
slot: Slot,
) -> Result<Vec<KzgProof>, BlockProductionError> {
blobs
.iter()
.enumerate()
.map(|(blob_index, blob)| {
let kzg_commitment = expected_kzg_commitments.get(blob_index).ok_or(
BlockProductionError::MissingKzgCommitment(format!(
"Missing KZG commitment for slot {} blob index {}",
slot, blob_index
)),
)?;

kzg_utils::compute_blob_kzg_proof::<T::EthSpec>(kzg, blob, *kzg_commitment)
.map_err(BlockProductionError::KzgError)
})
.collect::<Result<Vec<KzgProof>, BlockProductionError>>()
}

/// This method must be called whenever an execution engine indicates that a payload is
/// invalid.
///
Expand Down Expand Up @@ -6165,7 +6143,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
let beacon_block_root = block.canonical_root();
let slot = block.slot();

let blob_sidecars = SidecarList::<T::EthSpec, BlindedBlobSidecar>::from(
let blob_sidecars = BlindedBlobSidecarList::<T::EthSpec>::from(
blob_roots
.into_iter()
.enumerate()
Expand Down Expand Up @@ -6202,6 +6180,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
Ok(())
}

#[allow(clippy::type_complexity)] //TODO(jimmy): fix type complexity
fn merge_block_contents_into_beacon_block<E: EthSpec, Payload: AbstractExecPayload<E>>(
partial_beacon_block: PartialBeaconBlock<E, Payload>,
block_contents: Option<BlockProposalContents<E, Payload>>,
Expand Down
3 changes: 1 addition & 2 deletions beacon_node/beacon_chain/src/blob_cache.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use lru::LruCache;
use parking_lot::Mutex;
use types::deneb_types::{AbstractSidecar, SidecarList};
use types::{EthSpec, Hash256};
use types::{AbstractSidecar, EthSpec, Hash256, SidecarList};

pub const DEFAULT_BLOB_CACHE_SIZE: usize = 10;

Expand Down
4 changes: 2 additions & 2 deletions beacon_node/beacon_chain/src/blob_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -681,8 +681,8 @@ impl<E: EthSpec> From<SignedBeaconBlock<E>> for BlockWrapper<E> {
}
}

impl<E: EthSpec> From<BlockContentsTuple<E, FullPayload<E>>> for BlockWrapper<E> {
fn from(value: BlockContentsTuple<E, FullPayload<E>>) -> Self {
impl<E: EthSpec> From<BlockContentsTuple<E, FullPayload<E>, BlobSidecar<E>>> for BlockWrapper<E> {
fn from(value: BlockContentsTuple<E, FullPayload<E>, BlobSidecar<E>>) -> Self {
match value.1 {
Some(variable_list) => {
let mut blobs = Vec::with_capacity(E::max_blobs_per_block());
Expand Down
64 changes: 34 additions & 30 deletions beacon_node/beacon_chain/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,10 @@ where
&self,
mut state: BeaconState<E>,
slot: Slot,
) -> (BlockContentsTuple<E, FullPayload<E>>, BeaconState<E>) {
) -> (
BlockContentsTuple<E, FullPayload<E>, BlobSidecar<E>>,
BeaconState<E>,
) {
assert_ne!(slot, 0, "can't produce a block at slot 0");
assert!(slot >= state.slot());

Expand Down Expand Up @@ -814,35 +817,36 @@ where
&self.spec,
);

let block_contents: BlockContentsTuple<E, FullPayload<E>> = match &signed_block {
SignedBeaconBlock::Base(_)
| SignedBeaconBlock::Altair(_)
| SignedBeaconBlock::Merge(_)
| SignedBeaconBlock::Capella(_) => (signed_block, None),
SignedBeaconBlock::Deneb(_) => {
if let Some(blobs) = self
.chain
.proposal_blob_cache
.pop(&signed_block.canonical_root())
{
let signed_blobs = Vec::from(blobs)
.into_iter()
.map(|blob| {
blob.sign(
&self.validator_keypairs[proposer_index].sk,
&state.fork(),
state.genesis_validators_root(),
&self.spec,
)
})
.collect::<Vec<_>>()
.into();
(signed_block, Some(signed_blobs))
} else {
(signed_block, None)
let block_contents: BlockContentsTuple<E, FullPayload<E>, BlobSidecar<E>> =
match &signed_block {
SignedBeaconBlock::Base(_)
| SignedBeaconBlock::Altair(_)
| SignedBeaconBlock::Merge(_)
| SignedBeaconBlock::Capella(_) => (signed_block, None),
SignedBeaconBlock::Deneb(_) => {
if let Some(blobs) = self
.chain
.proposal_blob_cache
.pop(&signed_block.canonical_root())
{
let signed_blobs = Vec::from(blobs)
.into_iter()
.map(|blob| {
blob.sign(
&self.validator_keypairs[proposer_index].sk,
&state.fork(),
state.genesis_validators_root(),
&self.spec,
)
})
.collect::<Vec<_>>()
.into();
(signed_block, Some(signed_blobs))
} else {
(signed_block, None)
}
}
}
};
};

(block_contents, state)
}
Expand Down Expand Up @@ -1849,7 +1853,7 @@ where
) -> Result<
(
SignedBeaconBlockHash,
BlockContentsTuple<E, FullPayload<E>>,
BlockContentsTuple<E, FullPayload<E>, BlobSidecar<E>>,
BeaconState<E>,
),
BlockError<E>,
Expand Down
6 changes: 3 additions & 3 deletions beacon_node/execution_layer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,13 @@ use tokio::{
use tokio_stream::wrappers::WatchStream;
use tree_hash::TreeHash;
use types::beacon_block_body::KzgCommitments;
use types::blob_sidecar::Blobs;
use types::blob_sidecar::BlobsOrBlobRoots;
use types::builder_bid::BuilderBid;
use types::consts::deneb::BLOB_TX_TYPE;
use types::deneb_types::BlobsOrBlobRoots;
use types::transaction::{AccessTuple, BlobTransaction, EcdsaSignature, SignedBlobTransaction};
use types::{AbstractExecPayload, BeaconStateError, ExecPayload, VersionedHash};
use types::{
BlindedPayload, BlockType, ChainSpec, Epoch, ExecutionBlockHash, ExecutionPayload,
BlindedPayload, Blobs, BlockType, ChainSpec, Epoch, ExecutionBlockHash, ExecutionPayload,
ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, ForkName,
};
use types::{KzgProofs, Withdrawals};
Expand Down Expand Up @@ -1187,6 +1186,7 @@ impl<T: EthSpec> ExecutionLayer<T> {
}
};

// TODO(jimmy): cache blobs bundle
let payload_response = async {
debug!(
self.log(),
Expand Down
9 changes: 5 additions & 4 deletions beacon_node/http_api/src/build_block_contents.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
use beacon_chain::{BeaconChain, BeaconChainTypes, BlockProductionError};
use eth2::types::{BeaconBlockAndBlobSidecars, BlindedBeaconBlockAndBlobSidecars, BlockContents};
use std::sync::Arc;
use types::deneb_types::BlindedBlobSidecar;
use types::{BeaconBlock, BlindedPayload, BlobSidecar, ForkName, FullPayload};
use types::{BeaconBlock, BlindedBlobSidecar, BlindedPayload, BlobSidecar, ForkName, FullPayload};

type Error = warp::reject::Rejection;
type FullBlockContents<E> = BlockContents<E, FullPayload<E>, BlobSidecar<E>>;
type BlindedBlockContents<E> = BlockContents<E, BlindedPayload<E>, BlindedBlobSidecar>;

pub fn build_block_contents<T: BeaconChainTypes>(
fork_name: ForkName,
chain: Arc<BeaconChain<T>>,
block: BeaconBlock<T::EthSpec, FullPayload<T::EthSpec>>,
) -> Result<BlockContents<T::EthSpec, FullPayload<T::EthSpec>, BlobSidecar<T::EthSpec>>, Error> {
) -> Result<FullBlockContents<T::EthSpec>, Error> {
match fork_name {
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {
Ok(BlockContents::Block(block))
Expand All @@ -37,7 +38,7 @@ pub fn build_blinded_block_contents<T: BeaconChainTypes>(
fork_name: ForkName,
chain: Arc<BeaconChain<T>>,
block: BeaconBlock<T::EthSpec, BlindedPayload<T::EthSpec>>,
) -> Result<BlockContents<T::EthSpec, BlindedPayload<T::EthSpec>, BlindedBlobSidecar>, Error> {
) -> Result<BlindedBlockContents<T::EthSpec>, Error> {
match fork_name {
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {
Ok(BlockContents::Block(block))
Expand Down
17 changes: 10 additions & 7 deletions beacon_node/http_api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,10 @@ use tokio::sync::mpsc::{Sender, UnboundedSender};
use tokio_stream::{wrappers::BroadcastStream, StreamExt};
use types::{
Attestation, AttestationData, AttestationShufflingId, AttesterSlashing, BeaconStateError,
BlindedPayload, CommitteeCache, ConfigAndPreset, Epoch, EthSpec, ForkName, FullPayload,
ProposerPreparationData, ProposerSlashing, RelativeEpoch, SignedAggregateAndProof,
SignedBeaconBlock, SignedBlsToExecutionChange, SignedContributionAndProof,
SignedValidatorRegistrationData, SignedVoluntaryExit, Slot, SyncCommitteeMessage,
SyncContributionData,
BlindedBlobSidecar, BlindedPayload, CommitteeCache, ConfigAndPreset, Epoch, EthSpec, ForkName,
FullPayload, ProposerPreparationData, ProposerSlashing, RelativeEpoch, SignedAggregateAndProof,
SignedBlsToExecutionChange, SignedContributionAndProof, SignedValidatorRegistrationData,
SignedVoluntaryExit, Slot, SyncCommitteeMessage, SyncContributionData,
};
use version::{
add_consensus_version_header, execution_optimistic_finalized_fork_versioned_response,
Expand Down Expand Up @@ -1243,11 +1242,15 @@ pub fn serve<T: BeaconChainTypes>(
.and(network_tx_filter.clone())
.and(log_filter.clone())
.and_then(
|block: SignedBeaconBlock<T::EthSpec, BlindedPayload<_>>,
|block_contents: SignedBlockContents<
T::EthSpec,
BlindedPayload<_>,
BlindedBlobSidecar,
>,
chain: Arc<BeaconChain<T>>,
network_tx: UnboundedSender<NetworkMessage<T::EthSpec>>,
log: Logger| async move {
publish_blocks::publish_blinded_block(block, chain, &network_tx, log)
publish_blocks::publish_blinded_block(block_contents, chain, &network_tx, log)
.await
.map(|()| warp::reply().into_response())
},
Expand Down
21 changes: 14 additions & 7 deletions beacon_node/http_api/src/publish_blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use store::FixedVector;
use tokio::sync::mpsc::UnboundedSender;
use tree_hash::TreeHash;
use types::{
AbstractExecPayload, BeaconBlockRef, BlindedPayload, EthSpec, ExecPayload, ExecutionBlockHash,
FullPayload, Hash256, SignedBeaconBlock,
AbstractExecPayload, BeaconBlockRef, BlindedBlobSidecar, BlindedPayload, EthSpec, ExecPayload,
ExecutionBlockHash, FullPayload, Hash256, SignedBeaconBlock,
};
use warp::Rejection;

Expand Down Expand Up @@ -185,13 +185,14 @@ pub async fn publish_block<T: BeaconChainTypes>(
/// Handles a request from the HTTP API for blinded blocks. This converts blinded blocks into full
/// blocks before publishing.
pub async fn publish_blinded_block<T: BeaconChainTypes>(
block: SignedBeaconBlock<T::EthSpec, BlindedPayload<T::EthSpec>>,
block_contents: SignedBlockContents<T::EthSpec, BlindedPayload<T::EthSpec>, BlindedBlobSidecar>,
chain: Arc<BeaconChain<T>>,
network_tx: &UnboundedSender<NetworkMessage<T::EthSpec>>,
log: Logger,
) -> Result<(), Rejection> {
let block_root = block.canonical_root();
let full_block = reconstruct_block(chain.clone(), block_root, block, log.clone()).await?;
let block_root = block_contents.signed_block().canonical_root();
let full_block =
reconstruct_block(chain.clone(), block_root, block_contents, log.clone()).await?;
publish_block::<T>(Some(block_root), full_block, chain, network_tx, log).await
}

Expand All @@ -201,9 +202,10 @@ pub async fn publish_blinded_block<T: BeaconChainTypes>(
async fn reconstruct_block<T: BeaconChainTypes>(
chain: Arc<BeaconChain<T>>,
block_root: Hash256,
block: SignedBeaconBlock<T::EthSpec, BlindedPayload<T::EthSpec>>,
block_contents: SignedBlockContents<T::EthSpec, BlindedPayload<T::EthSpec>, BlindedBlobSidecar>,
log: Logger,
) -> Result<ProvenancedBlock<T::EthSpec>, Rejection> {
let block = block_contents.signed_block();
let full_payload_opt = if let Ok(payload_header) = block.message().body().execution_payload() {
let el = chain.execution_layer.as_ref().ok_or_else(|| {
warp_utils::reject::custom_server_error("Missing execution layer".to_string())
Expand All @@ -224,6 +226,7 @@ async fn reconstruct_block<T: BeaconChainTypes>(
.into();
ProvenancedPayload::Local(payload)
// If we already have an execution payload with this transactions root cached, use it.
// TODO(jimmy) get cached blobs
} else if let Some(cached_payload) =
el.get_payload_by_root(&payload_header.tree_hash_root())
{
Expand All @@ -245,8 +248,9 @@ async fn reconstruct_block<T: BeaconChainTypes>(
&log,
);

// TODO(jimmy): handle blinded blobs bundle response
let full_payload = el
.propose_blinded_beacon_block(block_root, &block)
.propose_blinded_beacon_block(block_root, block)
.await
.map_err(|e| {
warp_utils::reject::custom_server_error(format!(
Expand All @@ -263,6 +267,9 @@ async fn reconstruct_block<T: BeaconChainTypes>(
None
};

// TODO(jimmy) convert full `SignedBlockSidecars`
let (block, _maybe_blobs) = block_contents.deconstruct();

match full_payload_opt {
// A block without a payload is pre-merge and we consider it locally
// built.
Expand Down
1 change: 0 additions & 1 deletion common/eth2/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ pub mod types;

use self::mixin::{RequestAccept, ResponseOptional};
use self::types::{Error as ResponseError, *};
use ::types::deneb_types::AbstractSidecar;
use futures::Stream;
use futures_util::StreamExt;
use lighthouse_network::PeerId;
Expand Down
Loading

0 comments on commit 59fb1af

Please sign in to comment.