From 60154e0299aea8744c4077fbcf999102d49d1869 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 16 Feb 2026 14:43:32 +0100 Subject: [PATCH 01/25] Compute and add bytecode hashes to the report. --- Cargo.lock | 1 + crates/compiler/src/revive_resolc.rs | 7 ++- crates/compiler/src/solc.rs | 4 +- crates/report/Cargo.toml | 1 + crates/report/src/aggregator.rs | 85 ++++++++++++++++++++++------ 5 files changed, 77 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9bfa7cea..cd1efab4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5872,6 +5872,7 @@ dependencies = [ "serde", "serde_json", "serde_with", + "sha2 0.10.9", "tokio", "tracing", ] diff --git a/crates/compiler/src/revive_resolc.rs b/crates/compiler/src/revive_resolc.rs index fe3ae2f4..d7806dec 100644 --- a/crates/compiler/src/revive_resolc.rs +++ b/crates/compiler/src/revive_resolc.rs @@ -293,7 +293,10 @@ impl SolidityCompiler for Resolc { .canonicalize() .with_context(|| format!("Failed to canonicalize path {src_for_msg}"))?; - let map = compiler_output.contracts.entry(source_path).or_default(); + let contracts_at_path = compiler_output + .contracts + .entry(source_path.clone()) + .or_default(); for (contract_name, contract_information) in contracts.into_iter() { let Some(bytecode) = contract_information .evm @@ -345,7 +348,7 @@ impl SolidityCompiler for Resolc { serde_json::from_value::(abi_value.clone()) .context("ABI found in solc_metadata output is not valid ABI")? }; - map.insert(contract_name, (bytecode.object, abi)); + contracts_at_path.insert(contract_name, (bytecode.object, abi)); } } diff --git a/crates/compiler/src/solc.rs b/crates/compiler/src/solc.rs index 9a825add..17ff2b82 100644 --- a/crates/compiler/src/solc.rs +++ b/crates/compiler/src/solc.rs @@ -250,7 +250,7 @@ impl SolidityCompiler for Solc { let mut compiler_output = CompilerOutput::default(); for (contract_path, contracts) in parsed.contracts { - let map = compiler_output + let contracts_at_path = compiler_output .contracts .entry(contract_path.canonicalize().with_context(|| { format!( @@ -271,7 +271,7 @@ impl SolidityCompiler for Solc { let abi = contract_info .abi .context("Unexpected - contract compiled with solc as no ABI")?; - map.insert(contract_name, (source_code, abi)); + contracts_at_path.insert(contract_name, (source_code, abi)); } } diff --git a/crates/report/Cargo.toml b/crates/report/Cargo.toml index 89890e77..cf3d3fee 100644 --- a/crates/report/Cargo.toml +++ b/crates/report/Cargo.toml @@ -22,6 +22,7 @@ semver = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } serde_with = { workspace = true } +sha2 = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 3f8b0de8..2bd74ca6 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -9,7 +9,11 @@ use std::{ time::{SystemTime, UNIX_EPOCH}, }; -use alloy::primitives::{Address, BlockNumber, BlockTimestamp, TxHash}; +use alloy::{ + hex, + json_abi::JsonAbi, + primitives::{Address, BlockNumber, BlockTimestamp, TxHash}, +}; use anyhow::{Context as _, Result}; use indexmap::IndexMap; use itertools::Itertools; @@ -20,6 +24,7 @@ use revive_dt_format::{case::CaseIdx, metadata::ContractInstance, steps::StepPat use semver::Version; use serde::{Deserialize, Serialize}; use serde_with::{DisplayFromStr, serde_as}; +use sha2::{Digest, Sha256}; use tokio::sync::{ broadcast::{Sender, channel}, mpsc::{UnboundedReceiver, UnboundedSender, unbounded_channel}, @@ -307,18 +312,16 @@ impl ReportAggregator { } else { None }; - let compiler_output = if include_output { - Some(event.compiler_output) - } else { - None - }; execution_information.pre_link_compilation_status = Some(CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input, - compiler_output, + compiled_contracts_info: Self::generate_compiled_contracts_info( + event.compiler_output, + include_output, + ), }); } @@ -344,18 +347,16 @@ impl ReportAggregator { } else { None }; - let compiler_output = if include_output { - Some(event.compiler_output) - } else { - None - }; execution_information.post_link_compilation_status = Some(CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input, - compiler_output, + compiled_contracts_info: Self::generate_compiled_contracts_info( + event.compiler_output, + include_output, + ), }); } @@ -560,6 +561,45 @@ impl ReportAggregator { .or_default() .get_or_insert_default() } + + /// Generates the compiled contract information for each contract at each path. + fn generate_compiled_contracts_info( + compiler_output: CompilerOutput, + include_compiler_output: bool, + ) -> HashMap> { + let mut compiled_contracts_info = HashMap::new(); + + for (source_path, contracts) in compiler_output.contracts { + let mut contracts_info_at_path = HashMap::new(); + + for (contract_name, (bytecode, abi)) in contracts { + let bytecode_hash = Self::sha256_hash(&bytecode); + let info = if include_compiler_output { + CompiledContractInformation { + abi: Some(abi), + bytecode: Some(bytecode), + bytecode_hash, + } + } else { + CompiledContractInformation { + abi: None, + bytecode: None, + bytecode_hash, + } + }; + contracts_info_at_path.insert(contract_name, info); + } + + compiled_contracts_info.insert(source_path, contracts_info_at_path); + } + + compiled_contracts_info + } + + /// Computes the SHA-256 hash of the `input`. + fn sha256_hash(input: &str) -> String { + hex::encode(Sha256::digest(input.as_bytes())) + } } #[serde_as] @@ -705,10 +745,8 @@ pub enum CompilationStatus { /// the compiler was invoked. #[serde(default, skip_serializing_if = "Option::is_none")] compiler_input: Option, - /// The output of the compiler. This is only included if the appropriate flag is set in the - /// CLI contexts. - #[serde(default, skip_serializing_if = "Option::is_none")] - compiler_output: Option, + /// The information about each compiled contract at each path. + compiled_contracts_info: HashMap>, }, /// The compilation failed. Failure { @@ -728,6 +766,19 @@ pub enum CompilationStatus { }, } +/// Information about the compiled contract. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct CompiledContractInformation { + /// The JSON contract ABI. This is only included if the appropriate flag is set in the CLI context. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub abi: Option, + /// The contract bytecode. This is only included if the appropriate flag is set in the CLI context. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub bytecode: Option, + /// The hash of the bytecode. + pub bytecode_hash: String, +} + /// Information on each step in the execution. #[derive(Clone, Debug, Default, Serialize, Deserialize)] pub struct StepReport { From d97c8b4719388a9d997d3f4a2c3ad662cf48d2c2 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 16 Feb 2026 15:02:08 +0100 Subject: [PATCH 02/25] Update hash type to fixed-size byte array. --- crates/report/src/aggregator.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 2bd74ca6..e42a6d8f 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -10,9 +10,8 @@ use std::{ }; use alloy::{ - hex, json_abi::JsonAbi, - primitives::{Address, BlockNumber, BlockTimestamp, TxHash}, + primitives::{Address, B256, BlockNumber, BlockTimestamp, TxHash}, }; use anyhow::{Context as _, Result}; use indexmap::IndexMap; @@ -573,7 +572,7 @@ impl ReportAggregator { let mut contracts_info_at_path = HashMap::new(); for (contract_name, (bytecode, abi)) in contracts { - let bytecode_hash = Self::sha256_hash(&bytecode); + let bytecode_hash = Self::hash(&bytecode); let info = if include_compiler_output { CompiledContractInformation { abi: Some(abi), @@ -596,9 +595,9 @@ impl ReportAggregator { compiled_contracts_info } - /// Computes the SHA-256 hash of the `input`. - fn sha256_hash(input: &str) -> String { - hex::encode(Sha256::digest(input.as_bytes())) + /// Computes the hash of the `input`. + fn hash(input: &str) -> B256 { + B256::from_slice(&Sha256::digest(input.as_bytes())) } } @@ -776,7 +775,7 @@ pub struct CompiledContractInformation { #[serde(default, skip_serializing_if = "Option::is_none")] pub bytecode: Option, /// The hash of the bytecode. - pub bytecode_hash: String, + pub bytecode_hash: B256, } /// Information on each step in the execution. From 1b8113ba1938ba332c486cf397ed9bda18309348 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 16 Feb 2026 15:12:56 +0100 Subject: [PATCH 03/25] Remove leftover clone. --- crates/compiler/src/revive_resolc.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/compiler/src/revive_resolc.rs b/crates/compiler/src/revive_resolc.rs index d7806dec..3d51bc53 100644 --- a/crates/compiler/src/revive_resolc.rs +++ b/crates/compiler/src/revive_resolc.rs @@ -293,10 +293,7 @@ impl SolidityCompiler for Resolc { .canonicalize() .with_context(|| format!("Failed to canonicalize path {src_for_msg}"))?; - let contracts_at_path = compiler_output - .contracts - .entry(source_path.clone()) - .or_default(); + let contracts_at_path = compiler_output.contracts.entry(source_path).or_default(); for (contract_name, contract_information) in contracts.into_iter() { let Some(bytecode) = contract_information .evm From 2312786e830c169db821af143e2043f89d68fe0d Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 16 Feb 2026 18:11:55 +0100 Subject: [PATCH 04/25] Hex decode the bytecode string prior to hashing. --- crates/report/src/aggregator.rs | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index e42a6d8f..c56d5db6 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -10,6 +10,7 @@ use std::{ }; use alloy::{ + hex, json_abi::JsonAbi, primitives::{Address, B256, BlockNumber, BlockTimestamp, TxHash}, }; @@ -572,18 +573,21 @@ impl ReportAggregator { let mut contracts_info_at_path = HashMap::new(); for (contract_name, (bytecode, abi)) in contracts { - let bytecode_hash = Self::hash(&bytecode); + let (is_valid_hex, bytecode_hash) = Self::hex_decode_and_hash(&bytecode); + let requires_linking = !is_valid_hex; let info = if include_compiler_output { CompiledContractInformation { abi: Some(abi), bytecode: Some(bytecode), bytecode_hash, + requires_linking, } } else { CompiledContractInformation { abi: None, bytecode: None, bytecode_hash, + requires_linking, } }; contracts_info_at_path.insert(contract_name, info); @@ -595,9 +599,18 @@ impl ReportAggregator { compiled_contracts_info } - /// Computes the hash of the `input`. - fn hash(input: &str) -> B256 { - B256::from_slice(&Sha256::digest(input.as_bytes())) + /// Attempts to hex decode the input before hashing the result. If the input + /// is prefixed with `0x`, the prefix is stripped before decoding and hashing. + /// + /// Returns `(true, hash)` if decoding succeeded, with a hash of the raw bytes. + /// Returns `(false, hash)` if decoding failed due to invalid hex, with a hash of the string. + fn hex_decode_and_hash(input: &str) -> (bool, B256) { + let input = input.strip_prefix("0x").unwrap_or(input); + + match hex::decode(input) { + Ok(bytes) => (true, B256::from_slice(&Sha256::digest(&bytes))), + Err(_) => (false, B256::from_slice(&Sha256::digest(input.as_bytes()))), + } } } @@ -776,6 +789,8 @@ pub struct CompiledContractInformation { pub bytecode: Option, /// The hash of the bytecode. pub bytecode_hash: B256, + /// Whether the bytecode contains unresolved library placeholders and requires linking. + pub requires_linking: bool, } /// Information on each step in the execution. From e442719c84e47bc9780d0d8b44c88649fba8c63c Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 16 Feb 2026 18:25:11 +0100 Subject: [PATCH 05/25] Add additional comment to info field. --- crates/report/src/aggregator.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index c56d5db6..fd22ae33 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -788,6 +788,8 @@ pub struct CompiledContractInformation { #[serde(default, skip_serializing_if = "Option::is_none")] pub bytecode: Option, /// The hash of the bytecode. + /// Note that it is the hash of the raw bytecode bytes (the decoded `bytecode` string) + /// if `requires_linking` is false, otherwise it is the hash of the `bytecode` string. pub bytecode_hash: B256, /// Whether the bytecode contains unresolved library placeholders and requires linking. pub requires_linking: bool, From 88b26b539de5f17aa895e0a4c4abd1b9abb5aa97 Mon Sep 17 00:00:00 2001 From: elle-j Date: Tue, 17 Feb 2026 10:12:10 +0100 Subject: [PATCH 06/25] Implement the Compile context/subcommand. --- crates/common/src/types/mod.rs | 2 + .../src/types/parsed_compile_specifier.rs | 94 +++++++++ .../common/src/types/parsed_test_specifier.rs | 2 +- crates/config/src/lib.rs | 184 +++++++++++++++--- crates/core/src/main.rs | 6 + crates/report/src/aggregator.rs | 1 + 6 files changed, 262 insertions(+), 27 deletions(-) create mode 100644 crates/common/src/types/parsed_compile_specifier.rs diff --git a/crates/common/src/types/mod.rs b/crates/common/src/types/mod.rs index 4d52d087..2c755b8a 100644 --- a/crates/common/src/types/mod.rs +++ b/crates/common/src/types/mod.rs @@ -1,5 +1,6 @@ mod identifiers; mod mode; +mod parsed_compile_specifier; mod parsed_test_specifier; mod private_key_allocator; mod round_robin_pool; @@ -7,6 +8,7 @@ mod version_or_requirement; pub use identifiers::*; pub use mode::*; +pub use parsed_compile_specifier::*; pub use parsed_test_specifier::*; pub use private_key_allocator::*; pub use round_robin_pool::*; diff --git a/crates/common/src/types/parsed_compile_specifier.rs b/crates/common/src/types/parsed_compile_specifier.rs new file mode 100644 index 00000000..0f7878e1 --- /dev/null +++ b/crates/common/src/types/parsed_compile_specifier.rs @@ -0,0 +1,94 @@ +use std::{ + fmt::Display, + path::{Path, PathBuf}, + str::FromStr, +}; + +use anyhow::Context as _; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ParsedCompileSpecifier { + /// All of the contracts in the file should be compiled. + FileOrDirectory { + /// The path of the metadata file containing the contracts or the references to the contracts. + metadata_or_directory_file_path: PathBuf, + }, +} + +impl ParsedCompileSpecifier { + pub fn metadata_path(&self) -> &Path { + match self { + ParsedCompileSpecifier::FileOrDirectory { + metadata_or_directory_file_path: metadata_file_path, + } => metadata_file_path, + } + } +} + +impl Display for ParsedCompileSpecifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ParsedCompileSpecifier::FileOrDirectory { + metadata_or_directory_file_path, + } => { + write!(f, "{}", metadata_or_directory_file_path.display()) + } + } + } +} + +impl FromStr for ParsedCompileSpecifier { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let path = PathBuf::from(s) + .canonicalize() + .context("Failed to canonicalize the path of the contracts")?; + + Ok(Self::FileOrDirectory { + metadata_or_directory_file_path: path, + }) + } +} + +impl From for String { + fn from(value: ParsedCompileSpecifier) -> Self { + value.to_string() + } +} + +impl TryFrom for ParsedCompileSpecifier { + type Error = anyhow::Error; + + fn try_from(value: String) -> Result { + value.parse() + } +} + +impl TryFrom<&str> for ParsedCompileSpecifier { + type Error = anyhow::Error; + + fn try_from(value: &str) -> Result { + value.parse() + } +} + +impl Serialize for ParsedCompileSpecifier { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.to_string().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for ParsedCompileSpecifier { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let string = String::deserialize(deserializer)?; + string.parse().map_err(serde::de::Error::custom) + } +} diff --git a/crates/common/src/types/parsed_test_specifier.rs b/crates/common/src/types/parsed_test_specifier.rs index 2710bf0f..1944aa47 100644 --- a/crates/common/src/types/parsed_test_specifier.rs +++ b/crates/common/src/types/parsed_test_specifier.rs @@ -11,7 +11,7 @@ use crate::types::Mode; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ParsedTestSpecifier { - /// All of the test cases in the file should be ran across all of the specified modes + /// All of the test cases in the file should be ran across all of the specified modes. FileOrDirectory { /// The path of the metadata file containing the test cases. metadata_or_directory_file_path: PathBuf, diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index f13327d1..61bc6b7e 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -18,7 +18,7 @@ use alloy::{ }; use anyhow::Context as _; use clap::{Parser, ValueEnum, ValueHint}; -use revive_dt_common::types::{ParsedTestSpecifier, PlatformIdentifier}; +use revive_dt_common::types::{ParsedCompileSpecifier, ParsedTestSpecifier, PlatformIdentifier}; use semver::Version; use serde::{Deserialize, Serialize, Serializer}; use strum::{AsRefStr, Display, EnumString, IntoStaticStr}; @@ -38,6 +38,9 @@ pub enum Context { /// Exports the genesis file of the desired platform. ExportGenesis(Box), + + /// Compiles contracts using the provided compiler build, without executing any tests. + Compile(Box), } impl Context { @@ -51,10 +54,9 @@ impl Context { pub fn update_for_profile(&mut self) { match self { - Context::Test(ctx) => ctx.update_for_profile(), - Context::Benchmark(ctx) => ctx.update_for_profile(), - Context::ExportJsonSchema => {} - Context::ExportGenesis(..) => {} + Self::Test(ctx) => ctx.update_for_profile(), + Self::Benchmark(ctx) => ctx.update_for_profile(), + Self::ExportJsonSchema | Self::ExportGenesis(..) | Self::Compile(..) => {} } } } @@ -65,16 +67,29 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } -impl AsRef for Context { - fn as_ref(&self) -> &CorpusConfiguration { +impl AsRef for Context { + fn as_ref(&self) -> &CorpusExecutionConfiguration { match self { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::ExportJsonSchema | Self::ExportGenesis(..) | Self::Compile(..) => unreachable!(), + } + } +} + +impl AsRef for Context { + fn as_ref(&self) -> &CorpusCompilationConfiguration { + match self { + Self::Test(..) + | Self::Benchmark(..) + | Self::ExportJsonSchema + | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } @@ -85,6 +100,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } @@ -95,6 +111,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } @@ -105,7 +122,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportGenesis(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -116,7 +133,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportGenesis(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -127,7 +144,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportGenesis(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -138,7 +155,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportGenesis(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -149,7 +166,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportGenesis(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -159,7 +176,7 @@ impl AsRef for Context { match self { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::ExportJsonSchema | Self::ExportGenesis(..) | Self::Compile(..) => unreachable!(), } } } @@ -172,7 +189,7 @@ impl AsRef for Context { static GENESIS: LazyLock = LazyLock::new(Default::default); &GENESIS } - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -183,7 +200,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportGenesis(context) => context.as_ref().as_ref(), - Self::ExportJsonSchema => unreachable!(), + Self::ExportJsonSchema | Self::Compile(..) => unreachable!(), } } } @@ -194,6 +211,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } @@ -204,6 +222,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } @@ -214,6 +233,7 @@ impl AsRef for Context { Self::Test(context) => context.as_ref().as_ref(), Self::Benchmark(context) => context.as_ref().as_ref(), Self::ExportJsonSchema | Self::ExportGenesis(..) => unreachable!(), + Self::Compile(context) => context.as_ref().as_ref(), } } } @@ -224,8 +244,12 @@ impl AsRef for Context { match self { Self::Test(context) => context.as_ref().as_ref(), - Self::Benchmark(..) => &DEFAULT, - Self::ExportJsonSchema | Self::ExportGenesis(..) => &DEFAULT, + // TODO: Shouldn't these return `unreachable!()` instead if `&DEFAULT`? + // Only the `TestExecutionContext` has an `ignore_configuration` field. + Self::Benchmark(..) + | Self::ExportJsonSchema + | Self::ExportGenesis(..) + | Self::Compile(..) => &DEFAULT, } } } @@ -265,7 +289,7 @@ pub struct TestExecutionContext { /// Configuration parameters for the corpus files to use. #[clap(flatten, next_help_heading = "Corpus Configuration")] - pub corpus_configuration: CorpusConfiguration, + pub corpus_configuration: CorpusExecutionConfiguration, /// Configuration parameters for the solc compiler. #[clap(flatten, next_help_heading = "Solc Configuration")] @@ -412,7 +436,7 @@ pub struct BenchmarkingContext { /// Configuration parameters for the corpus files to use. #[clap(flatten, next_help_heading = "Corpus Configuration")] - pub corpus_configuration: CorpusConfiguration, + pub corpus_configuration: CorpusExecutionConfiguration, /// Configuration parameters for the solc compiler. #[clap(flatten, next_help_heading = "Solc Configuration")] @@ -529,6 +553,50 @@ pub struct ExportGenesisContext { pub wallet_configuration: WalletConfiguration, } +#[derive(Clone, Debug, Parser, Serialize, Deserialize)] +pub struct CompilationContext { + /// The label for the resolc build used (e.g., linux, macos, windows, wasm). + #[arg(long)] + pub build_label: String, + + /// The working directory that the program will use for all of the temporary artifacts needed at + /// runtime. + /// + /// If not specified, then a temporary directory will be created and used by the program for all + /// temporary artifacts. + #[clap( + short, + long, + default_value = "", + value_hint = ValueHint::DirPath, + )] + pub working_directory: WorkingDirectoryConfiguration, + + /// Configuration parameters for the corpus files to use. + #[clap(flatten, next_help_heading = "Corpus Configuration")] + pub corpus_configuration: CorpusCompilationConfiguration, + + /// Configuration parameters for the solc compiler. + #[clap(flatten, next_help_heading = "Solc Configuration")] + pub solc_configuration: SolcConfiguration, + + /// Configuration parameters for the resolc compiler. + #[clap(flatten, next_help_heading = "Resolc Configuration")] + pub resolc_configuration: ResolcConfiguration, + + /// Configuration parameters for concurrency. + #[clap(flatten, next_help_heading = "Concurrency Configuration")] + pub concurrency_configuration: ConcurrencyConfiguration, + + /// Configuration parameters for the compilers and compilation. + #[clap(flatten, next_help_heading = "Compilation Configuration")] + pub compilation_configuration: CompilationConfiguration, + + /// Configuration parameters for the report. + #[clap(flatten, next_help_heading = "Report Configuration")] + pub report_configuration: ReportConfiguration, +} + impl Default for TestExecutionContext { fn default() -> Self { Self::parse_from(["execution-context", "--test", "."]) @@ -541,8 +609,8 @@ impl AsRef for TestExecutionContext { } } -impl AsRef for TestExecutionContext { - fn as_ref(&self) -> &CorpusConfiguration { +impl AsRef for TestExecutionContext { + fn as_ref(&self) -> &CorpusExecutionConfiguration { &self.corpus_configuration } } @@ -643,8 +711,8 @@ impl AsRef for BenchmarkingContext { } } -impl AsRef for BenchmarkingContext { - fn as_ref(&self) -> &CorpusConfiguration { +impl AsRef for BenchmarkingContext { + fn as_ref(&self) -> &CorpusExecutionConfiguration { &self.corpus_configuration } } @@ -763,17 +831,65 @@ impl AsRef for ExportGenesisContext { } } +impl Default for CompilationContext { + fn default() -> Self { + Self::parse_from(["compilation-context", "--compile", "."]) + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &WorkingDirectoryConfiguration { + &self.working_directory + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &CorpusCompilationConfiguration { + &self.corpus_configuration + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &SolcConfiguration { + &self.solc_configuration + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &ResolcConfiguration { + &self.resolc_configuration + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &ConcurrencyConfiguration { + &self.concurrency_configuration + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &CompilationConfiguration { + &self.compilation_configuration + } +} + +impl AsRef for CompilationContext { + fn as_ref(&self) -> &ReportConfiguration { + &self.report_configuration + } +} + /// A set of configuration parameters for the corpus files to use for the execution. #[serde_with::serde_as] #[derive(Clone, Debug, Parser, Serialize, Deserialize)] -pub struct CorpusConfiguration { +pub struct CorpusExecutionConfiguration { /// A list of test specifiers for the tests that the tool should run. /// /// Test specifiers follow the following format: /// /// - `{directory_path|metadata_file_path}`: A path to a metadata file where all of the cases /// live and should be run. Alternatively, it points to a directory instructing the framework - /// to discover of the metadata files that live there an execute them. + /// to discover the metadata files that live there an execute them. /// - `{metadata_file_path}::{case_idx}`: The path to a metadata file and then a case idx /// separated by two colons. This specifies that only this specific test case within the /// metadata file should be executed. @@ -784,6 +900,22 @@ pub struct CorpusConfiguration { pub test_specifiers: Vec, } +/// A set of configuration parameters for the corpus files to use for the compilation. +#[serde_with::serde_as] +#[derive(Clone, Debug, Parser, Serialize, Deserialize)] +pub struct CorpusCompilationConfiguration { + /// A list of compile specifiers for the compilations that the tool should run. + /// + /// Compile specifiers follow the following format: + /// + /// - `{directory_path|metadata_file_path}`: A path to a metadata file where all of the contracts, + /// or references to the contracts, live and should be compiled. Alternatively, it points to a + /// directory instructing the framework to discover the metadata files that live there and compile them. + #[serde_as(as = "Vec")] + #[arg(short = 'c', long = "compile", required = true)] + pub compile_specifiers: Vec, +} + /// A set of configuration parameters for Solc. #[derive(Clone, Debug, Parser, Serialize, Deserialize)] pub struct SolcConfiguration { diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index ff11586b..babfcb1a 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -123,5 +123,11 @@ fn main() -> anyhow::Result<()> { Ok(()) } + // TODO: + Context::Compile(context) => { + println!("In Context::Compile"); + println!("{:?}", context); + todo!() + } } } diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index fd22ae33..149aa6d3 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -54,6 +54,7 @@ impl ReportAggregator { Context::Test(ref context) => context.report_configuration.file_name.clone(), Context::Benchmark(ref context) => context.report_configuration.file_name.clone(), Context::ExportJsonSchema | Context::ExportGenesis(..) => None, + Context::Compile(ref context) => context.report_configuration.file_name.clone(), }, report: Report::new(context), remaining_cases: Default::default(), From e30302b6f48b2e2dab5123ba9177003e301de344 Mon Sep 17 00:00:00 2001 From: elle-j Date: Thu, 19 Feb 2026 13:53:43 +0100 Subject: [PATCH 07/25] Implement driver and reporter for the compile subcommand. --- crates/common/src/types/mod.rs | 4 +- ...ier.rs => parsed_compilation_specifier.rs} | 24 +-- crates/config/src/lib.rs | 30 +-- crates/core/src/compilations/driver.rs | 37 ++++ crates/core/src/compilations/entry_point.rs | 148 ++++++++++++++ crates/core/src/compilations/mod.rs | 9 + .../src/differential_benchmarks/driver.rs | 11 +- crates/core/src/differential_tests/driver.rs | 11 +- crates/core/src/helpers/cached_compiler.rs | 182 ++++++++++++------ crates/core/src/helpers/compile.rs | 148 ++++++++++++++ crates/core/src/helpers/mod.rs | 2 + crates/core/src/main.rs | 34 +++- crates/format/src/corpus.rs | 40 +++- crates/report/src/aggregator.rs | 97 ++++++++++ crates/report/src/common.rs | 7 + crates/report/src/runner_event.rs | 180 ++++++++++++++++- 16 files changed, 860 insertions(+), 104 deletions(-) rename crates/common/src/types/{parsed_compile_specifier.rs => parsed_compilation_specifier.rs} (76%) create mode 100644 crates/core/src/compilations/driver.rs create mode 100644 crates/core/src/compilations/entry_point.rs create mode 100644 crates/core/src/compilations/mod.rs create mode 100644 crates/core/src/helpers/compile.rs diff --git a/crates/common/src/types/mod.rs b/crates/common/src/types/mod.rs index 2c755b8a..c426a1af 100644 --- a/crates/common/src/types/mod.rs +++ b/crates/common/src/types/mod.rs @@ -1,6 +1,6 @@ mod identifiers; mod mode; -mod parsed_compile_specifier; +mod parsed_compilation_specifier; mod parsed_test_specifier; mod private_key_allocator; mod round_robin_pool; @@ -8,7 +8,7 @@ mod version_or_requirement; pub use identifiers::*; pub use mode::*; -pub use parsed_compile_specifier::*; +pub use parsed_compilation_specifier::*; pub use parsed_test_specifier::*; pub use private_key_allocator::*; pub use round_robin_pool::*; diff --git a/crates/common/src/types/parsed_compile_specifier.rs b/crates/common/src/types/parsed_compilation_specifier.rs similarity index 76% rename from crates/common/src/types/parsed_compile_specifier.rs rename to crates/common/src/types/parsed_compilation_specifier.rs index 0f7878e1..50e8818f 100644 --- a/crates/common/src/types/parsed_compile_specifier.rs +++ b/crates/common/src/types/parsed_compilation_specifier.rs @@ -8,7 +8,7 @@ use anyhow::Context as _; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum ParsedCompileSpecifier { +pub enum ParsedCompilationSpecifier { /// All of the contracts in the file should be compiled. FileOrDirectory { /// The path of the metadata file containing the contracts or the references to the contracts. @@ -16,20 +16,20 @@ pub enum ParsedCompileSpecifier { }, } -impl ParsedCompileSpecifier { +impl ParsedCompilationSpecifier { pub fn metadata_path(&self) -> &Path { match self { - ParsedCompileSpecifier::FileOrDirectory { + ParsedCompilationSpecifier::FileOrDirectory { metadata_or_directory_file_path: metadata_file_path, } => metadata_file_path, } } } -impl Display for ParsedCompileSpecifier { +impl Display for ParsedCompilationSpecifier { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - ParsedCompileSpecifier::FileOrDirectory { + ParsedCompilationSpecifier::FileOrDirectory { metadata_or_directory_file_path, } => { write!(f, "{}", metadata_or_directory_file_path.display()) @@ -38,7 +38,7 @@ impl Display for ParsedCompileSpecifier { } } -impl FromStr for ParsedCompileSpecifier { +impl FromStr for ParsedCompilationSpecifier { type Err = anyhow::Error; fn from_str(s: &str) -> Result { @@ -52,13 +52,13 @@ impl FromStr for ParsedCompileSpecifier { } } -impl From for String { - fn from(value: ParsedCompileSpecifier) -> Self { +impl From for String { + fn from(value: ParsedCompilationSpecifier) -> Self { value.to_string() } } -impl TryFrom for ParsedCompileSpecifier { +impl TryFrom for ParsedCompilationSpecifier { type Error = anyhow::Error; fn try_from(value: String) -> Result { @@ -66,7 +66,7 @@ impl TryFrom for ParsedCompileSpecifier { } } -impl TryFrom<&str> for ParsedCompileSpecifier { +impl TryFrom<&str> for ParsedCompilationSpecifier { type Error = anyhow::Error; fn try_from(value: &str) -> Result { @@ -74,7 +74,7 @@ impl TryFrom<&str> for ParsedCompileSpecifier { } } -impl Serialize for ParsedCompileSpecifier { +impl Serialize for ParsedCompilationSpecifier { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -83,7 +83,7 @@ impl Serialize for ParsedCompileSpecifier { } } -impl<'de> Deserialize<'de> for ParsedCompileSpecifier { +impl<'de> Deserialize<'de> for ParsedCompilationSpecifier { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 61bc6b7e..2760f72e 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -18,7 +18,9 @@ use alloy::{ }; use anyhow::Context as _; use clap::{Parser, ValueEnum, ValueHint}; -use revive_dt_common::types::{ParsedCompileSpecifier, ParsedTestSpecifier, PlatformIdentifier}; +use revive_dt_common::types::{ + ParsedCompilationSpecifier, ParsedTestSpecifier, PlatformIdentifier, +}; use semver::Version; use serde::{Deserialize, Serialize, Serializer}; use strum::{AsRefStr, Display, EnumString, IntoStaticStr}; @@ -40,7 +42,7 @@ pub enum Context { ExportGenesis(Box), /// Compiles contracts using the provided compiler build, without executing any tests. - Compile(Box), + Compile(Box), } impl Context { @@ -554,7 +556,7 @@ pub struct ExportGenesisContext { } #[derive(Clone, Debug, Parser, Serialize, Deserialize)] -pub struct CompilationContext { +pub struct StandaloneCompilationContext { /// The label for the resolc build used (e.g., linux, macos, windows, wasm). #[arg(long)] pub build_label: String, @@ -595,6 +597,10 @@ pub struct CompilationContext { /// Configuration parameters for the report. #[clap(flatten, next_help_heading = "Report Configuration")] pub report_configuration: ReportConfiguration, + + /// The output format to use for the tool's output. + #[arg(short, long, default_value_t = OutputFormat::CargoTestLike)] + pub output_format: OutputFormat, } impl Default for TestExecutionContext { @@ -831,49 +837,49 @@ impl AsRef for ExportGenesisContext { } } -impl Default for CompilationContext { +impl Default for StandaloneCompilationContext { fn default() -> Self { Self::parse_from(["compilation-context", "--compile", "."]) } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &WorkingDirectoryConfiguration { &self.working_directory } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &CorpusCompilationConfiguration { &self.corpus_configuration } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &SolcConfiguration { &self.solc_configuration } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &ResolcConfiguration { &self.resolc_configuration } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &ConcurrencyConfiguration { &self.concurrency_configuration } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &CompilationConfiguration { &self.compilation_configuration } } -impl AsRef for CompilationContext { +impl AsRef for StandaloneCompilationContext { fn as_ref(&self) -> &ReportConfiguration { &self.report_configuration } @@ -913,7 +919,7 @@ pub struct CorpusCompilationConfiguration { /// directory instructing the framework to discover the metadata files that live there and compile them. #[serde_as(as = "Vec")] #[arg(short = 'c', long = "compile", required = true)] - pub compile_specifiers: Vec, + pub compilation_specifiers: Vec, } /// A set of configuration parameters for Solc. diff --git a/crates/core/src/compilations/driver.rs b/crates/core/src/compilations/driver.rs new file mode 100644 index 00000000..fdf35c1d --- /dev/null +++ b/crates/core/src/compilations/driver.rs @@ -0,0 +1,37 @@ +use anyhow::{Context as _, Result}; +use revive_dt_report::CompilationReporter; +use tracing::error; + +use crate::helpers::{CachedCompiler, CompilationDefinition}; + +pub struct Driver<'a> { + /// The definition of the compilation that the driver is instructed to execute. + compilation_definition: &'a CompilationDefinition<'a>, +} + +impl<'a> Driver<'a> { + pub fn new(compilation_definition: &'a CompilationDefinition<'a>) -> Self { + Self { + compilation_definition, + } + } + + pub async fn compile_all(&self, cached_compiler: &CachedCompiler<'a>) -> Result<()> { + cached_compiler + .compile_contracts( + self.compilation_definition.metadata, + self.compilation_definition.metadata_file_path, + self.compilation_definition.mode.clone(), + None, + self.compilation_definition.compiler.as_ref(), + self.compilation_definition.compiler_identifier, + None, + &CompilationReporter::Standalone(&self.compilation_definition.reporter), + ) + .await + .inspect_err(|err| error!(?err, "Compilation failed")) + .context("Failed to produce the compiled contracts")?; + + Ok(()) + } +} diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs new file mode 100644 index 00000000..7c83a962 --- /dev/null +++ b/crates/core/src/compilations/entry_point.rs @@ -0,0 +1,148 @@ +//! The main entry point into compiling in standalone mode without any test execution. + +use std::{collections::BTreeSet, sync::Arc, time::Duration}; + +use anyhow::Context as _; +use futures::{FutureExt, StreamExt}; +use revive_dt_compiler::{Mode, ModeOptimizerSetting, ModePipeline}; +use revive_dt_format::corpus::Corpus; +use tokio::sync::{RwLock, Semaphore}; +use tracing::{Instrument, error, info, info_span, instrument}; + +use revive_dt_config::{Context, OutputFormat, StandaloneCompilationContext}; +use revive_dt_report::Reporter; + +use crate::{ + compilations::Driver, + helpers::{CachedCompiler, create_compilation_definitions_stream}, +}; + +/// Handles the compilations according to the information defined in the context. +#[instrument(level = "info", err(Debug), skip_all)] +pub async fn handle_compilations( + context: StandaloneCompilationContext, + reporter: Reporter, +) -> anyhow::Result<()> { + let reporter_clone = reporter.clone(); + + // Discover all of the metadata files that are defined in the context. + let corpus = context + .corpus_configuration + .compilation_specifiers + .clone() + .into_iter() + .try_fold(Corpus::default(), Corpus::with_compilation_specifier) + .context("Failed to parse the compile corpus")?; + info!( + len = corpus.metadata_file_count(), + "Discovered metadata files" + ); + + let full_context = Context::Compile(Box::new(context.clone())); + let compilation_definitions = create_compilation_definitions_stream( + &full_context, + &corpus, + // TODO (temporarily always using `z`): Accept mode(s) via CLI. + Mode { + pipeline: ModePipeline::ViaYulIR, + optimize_setting: ModeOptimizerSetting::Mz, + version: None, + }, + reporter.clone(), + ) + .await + .collect::>() + .await; + info!( + len = compilation_definitions.len(), + "Created compilation definitions" + ); + + let cached_compiler = CachedCompiler::new( + context + .working_directory + .as_path() + .join("compilation_cache"), + context + .compilation_configuration + .invalidate_compilation_cache, + ) + .await + .map(Arc::new) + .context("Failed to initialize cached compiler")?; + + // Creating the driver and compiling all of the contracts. + let semaphore = context + .concurrency_configuration + .concurrency_limit() + .map(Semaphore::new) + .map(Arc::new); + let running_task_list = Arc::new(RwLock::new(BTreeSet::::new())); + let driver_task = futures::future::join_all(compilation_definitions.iter().enumerate().map( + |(compilation_id, compilation_definition)| { + let running_task_list = running_task_list.clone(); + let semaphore = semaphore.clone(); + + let cached_compiler = cached_compiler.clone(); + let mode = compilation_definition.mode.clone(); + let span = info_span!( + "Compiling Related Files", + compilation_id, + metadata_file_path = %compilation_definition.metadata_file_path.display(), + mode = %mode, + ); + async move { + let permit = match semaphore.as_ref() { + Some(semaphore) => Some(semaphore.acquire().await.expect("Can't fail")), + None => None, + }; + + running_task_list.write().await.insert(compilation_id); + + let driver = Driver::new(compilation_definition); + match driver.compile_all(&cached_compiler).await { + Ok(()) => { /* Reporting already happens by the cached compiler. */ } + Err(_) => { + /* Reporting already happens by the cached compiler. */ + error!("Compilation Failed"); + } + }; + info!("Finished the compilation of the contracts"); + drop(permit); + running_task_list.write().await.remove(&compilation_id); + } + .instrument(span) + }, + )) + .inspect(|_| { + info!("Finished compiling all contracts"); + reporter_clone + .report_completion_event() + .expect("Can't fail") + }); + + let cli_reporting_task = start_cli_reporting_task(context.output_format, reporter); + + tokio::task::spawn(async move { + loop { + let remaining_tasks = running_task_list.read().await; + info!( + count = remaining_tasks.len(), + ?remaining_tasks, + "Remaining Tasks" + ); + drop(remaining_tasks); + tokio::time::sleep(Duration::from_secs(10)).await + } + }); + + futures::future::join(driver_task, cli_reporting_task).await; + + Ok(()) +} + +// TODO: UPDATE! +#[allow(irrefutable_let_patterns, clippy::uninlined_format_args)] +async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporter) { + todo!() +} diff --git a/crates/core/src/compilations/mod.rs b/crates/core/src/compilations/mod.rs new file mode 100644 index 00000000..739b760c --- /dev/null +++ b/crates/core/src/compilations/mod.rs @@ -0,0 +1,9 @@ +//! This module contains all of the code responsible for performing compilations, +//! including the driver implementation and the core logic that allows for contracts +//! to be compiled in standalone mode without any test execution. + +mod driver; +mod entry_point; + +pub use driver::*; +pub use entry_point::*; diff --git a/crates/core/src/differential_benchmarks/driver.rs b/crates/core/src/differential_benchmarks/driver.rs index cc7ffad0..dd84efbf 100644 --- a/crates/core/src/differential_benchmarks/driver.rs +++ b/crates/core/src/differential_benchmarks/driver.rs @@ -33,6 +33,7 @@ use revive_dt_format::{ }, traits::{ResolutionContext, ResolverApi}, }; +use revive_dt_report::CompilationReporter; use tokio::sync::{Mutex, OnceCell, mpsc::UnboundedSender}; use tracing::{Span, debug, error, field::display, info, instrument}; @@ -123,8 +124,9 @@ where self.test_definition.mode.clone(), None, self.platform_information.compiler.as_ref(), - self.platform_information.platform, - &self.platform_information.reporter, + self.platform_information.platform.compiler_identifier(), + Some(self.platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&self.platform_information.reporter), ) .await .inspect_err(|err| error!(?err, "Pre-linking compilation failed")) @@ -199,8 +201,9 @@ where self.test_definition.mode.clone(), deployed_libraries.as_ref(), self.platform_information.compiler.as_ref(), - self.platform_information.platform, - &self.platform_information.reporter, + self.platform_information.platform.compiler_identifier(), + Some(self.platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&self.platform_information.reporter), ) .await .inspect_err(|err| error!(?err, "Post-linking compilation failed")) diff --git a/crates/core/src/differential_tests/driver.rs b/crates/core/src/differential_tests/driver.rs index 852b64b0..536b6db7 100644 --- a/crates/core/src/differential_tests/driver.rs +++ b/crates/core/src/differential_tests/driver.rs @@ -30,6 +30,7 @@ use revive_dt_format::{ }, traits::ResolutionContext, }; +use revive_dt_report::CompilationReporter; use subxt::{ext::codec::Decode, metadata::Metadata, tx::Payload}; use tokio::sync::Mutex; use tracing::{error, info, instrument}; @@ -186,8 +187,9 @@ where test_definition.mode.clone(), None, platform_information.compiler.as_ref(), - platform_information.platform, - &platform_information.reporter, + platform_information.platform.compiler_identifier(), + Some(platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&platform_information.reporter), ) .await .inspect_err(|err| { @@ -269,8 +271,9 @@ where test_definition.mode.clone(), deployed_libraries.as_ref(), platform_information.compiler.as_ref(), - platform_information.platform, - &platform_information.reporter, + platform_information.platform.compiler_identifier(), + Some(platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&platform_information.reporter), ) .await .inspect_err(|err| { diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index 000c3bd9..1dbf2aec 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -9,14 +9,16 @@ use std::{ }; use futures::FutureExt; -use revive_dt_common::{iterators::FilesWithExtensionIterator, types::CompilerIdentifier}; +use revive_dt_common::{ + iterators::FilesWithExtensionIterator, + types::{CompilerIdentifier, PlatformIdentifier}, +}; use revive_dt_compiler::{Compiler, CompilerOutput, Mode, SolidityCompiler}; -use revive_dt_core::Platform; use revive_dt_format::metadata::{ContractIdent, ContractInstance, Metadata}; use alloy::{hex::ToHexExt, json_abi::JsonAbi, primitives::Address}; use anyhow::{Context as _, Error, Result}; -use revive_dt_report::ExecutionSpecificReporter; +use revive_dt_report::CompilationReporter; use semver::Version; use serde::{Deserialize, Serialize}; use tokio::sync::{Mutex, RwLock, Semaphore}; @@ -55,7 +57,8 @@ impl<'a> CachedCompiler<'a> { fields( metadata_file_path = %metadata_file_path.display(), %mode, - platform = %platform.platform_identifier() + compiler = %compiler_identifier, + platform = ?platform_identifier, ), err )] @@ -66,11 +69,12 @@ impl<'a> CachedCompiler<'a> { mode: Cow<'a, Mode>, deployed_libraries: Option<&HashMap>, compiler: &dyn SolidityCompiler, - platform: &dyn Platform, - reporter: &ExecutionSpecificReporter, + compiler_identifier: CompilerIdentifier, + platform_identifier: Option, + reporter: &CompilationReporter<'_>, ) -> Result { let cache_key = CacheKey { - compiler_identifier: platform.compiler_identifier(), + compiler_identifier, compiler_version: compiler.version().clone(), metadata_file_path, solc_mode: mode.clone(), @@ -142,25 +146,45 @@ impl<'a> CachedCompiler<'a> { match self.artifacts_cache.get(&cache_key).await { Some(cache_value) => { if deployed_libraries.is_some() { - reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::Execution(reporter) => reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ), + CompilationReporter::Standalone(reporter) => reporter + .report_standalone_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ), + } + .expect("Can't happen"); } else { - reporter - .report_pre_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::Execution(reporter) => reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ), + CompilationReporter::Standalone(reporter) => reporter + .report_standalone_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ), + } + .expect("Can't happen"); } cache_value.compiler_output } @@ -196,7 +220,7 @@ async fn compile_contracts( mode: &Mode, deployed_libraries: Option<&HashMap>, compiler: &dyn SolidityCompiler, - reporter: &ExecutionSpecificReporter, + reporter: &CompilationReporter<'_>, ) -> Result { // Puts a limit on how many compilations we can perform at any given instance which helps us // with some of the errors we've been seeing with high concurrency on MacOS (we have not tried @@ -239,46 +263,84 @@ async fn compile_contracts( match (output.as_ref(), deployed_libraries.is_some()) { (Ok(output), true) => { - reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::Execution(reporter) => reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ), + CompilationReporter::Standalone(reporter) => reporter + .report_standalone_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ), + } + .expect("Can't happen"); } (Ok(output), false) => { - reporter - .report_pre_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::Execution(reporter) => reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ), + CompilationReporter::Standalone(reporter) => reporter + .report_standalone_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ), + } + .expect("Can't happen"); } (Err(err), true) => { - reporter - .report_post_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::Execution(reporter) => reporter + .report_post_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ), + CompilationReporter::Standalone(reporter) => reporter + .report_standalone_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ), + } + .expect("Can't happen"); } (Err(err), false) => { - reporter - .report_pre_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::Execution(reporter) => reporter + .report_pre_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ), + CompilationReporter::Standalone(reporter) => reporter + .report_standalone_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ), + } + .expect("Can't happen"); } } diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs new file mode 100644 index 00000000..0dbb0aa1 --- /dev/null +++ b/crates/core/src/helpers/compile.rs @@ -0,0 +1,148 @@ +use std::sync::Arc; +use std::{borrow::Cow, path::Path}; + +use futures::{Stream, StreamExt, stream}; +use indexmap::{IndexMap, indexmap}; +use revive_dt_common::types::CompilerIdentifier; +use revive_dt_compiler::revive_resolc::Resolc; +use revive_dt_config::Context; +use revive_dt_format::corpus::Corpus; +use serde_json::{Value, json}; + +use revive_dt_compiler::Mode; +use revive_dt_compiler::SolidityCompiler; +use revive_dt_format::metadata::MetadataFile; +use revive_dt_report::{CompilationSpecifier, Reporter, StandaloneCompilationSpecificReporter}; +use tracing::{debug, error, info}; + +pub async fn create_compilation_definitions_stream<'a>( + context: &Context, + corpus: &'a Corpus, + mode: Mode, + reporter: Reporter, +) -> impl Stream> { + let cloned_reporter = reporter.clone(); + stream::iter( + corpus + .compilation_metadata_files_iterator() + .inspect(move |metadata_file| { + cloned_reporter + .report_metadata_file_discovery_event( + metadata_file.metadata_file_path.clone(), + metadata_file.content.clone(), + ) + .unwrap(); + }) + .map(move |metadata_file| { + let reporter = reporter.clone(); + + ( + metadata_file, + Cow::<'_, Mode>::Owned(mode.clone()), + reporter.compilation_specific_reporter(Arc::new(CompilationSpecifier { + solc_mode: mode.clone(), + metadata_file_path: metadata_file.metadata_file_path.clone(), + })), + ) + }), + ) + // Creating the `CompilationDefinition` objects from all of the various objects we have. + .filter_map(move |(metadata_file, mode, reporter)| async move { + // NOTE: Currently always specifying the resolc compiler. + let compiler = Resolc::new(context.clone(), mode.version.clone().map(Into::into)) + .await + .map(|compiler| Box::new(compiler) as Box) + .inspect_err(|err| error!(?err, "Failed to instantiate the compiler")) + .ok()?; + + Some(CompilationDefinition { + metadata: metadata_file, + metadata_file_path: metadata_file.metadata_file_path.as_path(), + mode: mode.clone(), + // NOTE: Currently always specifying the resolc compiler. + compiler_identifier: CompilerIdentifier::Resolc, + compiler, + reporter, + }) + }) + // Filter out the compilations which are incompatible. + .filter_map(move |compilation| async move { + match compilation.check_compatibility() { + Ok(()) => Some(compilation), + Err((reason, additional_information)) => { + debug!( + metadata_file_path = %compilation.metadata.metadata_file_path.display(), + mode = %compilation.mode, + reason, + additional_information = + serde_json::to_string(&additional_information).unwrap(), + "Ignoring Compilation" + ); + compilation + .reporter + .report_standalone_contracts_compilation_ignored_event( + reason.to_string(), + additional_information + .into_iter() + .map(|(k, v)| (k.into(), v)) + .collect::>(), + ) + .expect("Can't fail"); + None + } + } + }) + .inspect(|compilation| { + info!( + metadata_file_path = %compilation.metadata_file_path.display(), + mode = %compilation.mode, + "Created a compilation definition" + ); + }) +} + +/// This is a full description of a compilation to run alongside the full metadata file +/// and the specific mode to compile with. +pub struct CompilationDefinition<'a> { + pub metadata: &'a MetadataFile, + pub metadata_file_path: &'a Path, + pub mode: Cow<'a, Mode>, + pub compiler_identifier: CompilerIdentifier, + pub compiler: Box, + pub reporter: StandaloneCompilationSpecificReporter, +} + +impl<'a> CompilationDefinition<'a> { + /// Checks if this compilation can be run with the current configuration. + pub fn check_compatibility(&self) -> CompilationCheckFunctionResult { + self.check_metadata_file_ignored()?; + self.check_compiler_compatibility()?; + Ok(()) + } + + /// Checks if the metadata file is ignored or not. + fn check_metadata_file_ignored(&self) -> CompilationCheckFunctionResult { + if self.metadata.ignore.is_some_and(|ignore| ignore) { + Err(("Metadata file is ignored.", indexmap! {})) + } else { + Ok(()) + } + } + + /// Checks if the compiler supports the provided mode. + fn check_compiler_compatibility(&self) -> CompilationCheckFunctionResult { + let mut error_map = indexmap! {}; + let is_compatible = self + .compiler + .supports_mode(self.mode.optimize_setting, self.mode.pipeline); + error_map.insert(self.compiler_identifier.into(), json!(is_compatible)); + + if is_compatible { + Ok(()) + } else { + Err(("The compiler does not support this mode.", error_map)) + } + } +} + +type CompilationCheckFunctionResult = Result<(), (&'static str, IndexMap<&'static str, Value>)>; diff --git a/crates/core/src/helpers/mod.rs b/crates/core/src/helpers/mod.rs index d2948af3..236962fe 100644 --- a/crates/core/src/helpers/mod.rs +++ b/crates/core/src/helpers/mod.rs @@ -1,7 +1,9 @@ mod cached_compiler; +mod compile; mod pool; mod test; pub use cached_compiler::*; +pub use compile::*; pub use pool::*; pub use test::*; diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index babfcb1a..6fc7377e 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -1,10 +1,11 @@ +mod compilations; mod differential_benchmarks; mod differential_tests; mod helpers; use anyhow::{Context as _, bail}; use clap::Parser; -use revive_dt_report::{ReportAggregator, TestCaseStatus}; +use revive_dt_report::{CompilationStatus, ReportAggregator, TestCaseStatus}; use schemars::schema_for; use tracing::{info, level_filters::LevelFilter}; use tracing_subscriber::{EnvFilter, FmtSubscriber}; @@ -14,7 +15,7 @@ use revive_dt_core::Platform; use revive_dt_format::metadata::Metadata; use crate::{ - differential_benchmarks::handle_differential_benchmarks, + compilations::handle_compilations, differential_benchmarks::handle_differential_benchmarks, differential_tests::handle_differential_tests, }; @@ -123,11 +124,28 @@ fn main() -> anyhow::Result<()> { Ok(()) } - // TODO: - Context::Compile(context) => { - println!("In Context::Compile"); - println!("{:?}", context); - todo!() - } + Context::Compile(context) => tokio::runtime::Builder::new_multi_thread() + .worker_threads(context.concurrency_configuration.number_of_threads) + .enable_all() + .build() + .expect("Failed building the Runtime") + .block_on(async move { + let compilations_handling_task = handle_compilations(*context, reporter); + + let (_, report) = + futures::future::try_join(compilations_handling_task, report_aggregator_task) + .await?; + + let contains_failure = report + .compilation_information + .values() + .any(|report| matches!(report.status, Some(CompilationStatus::Failure { .. }))); + + if contains_failure { + bail!("Some compilations failed") + } + + Ok(()) + }), } } diff --git a/crates/format/src/corpus.rs b/crates/format/src/corpus.rs index 83cc8479..178a9fde 100644 --- a/crates/format/src/corpus.rs +++ b/crates/format/src/corpus.rs @@ -7,7 +7,7 @@ use std::{ use itertools::Itertools; use revive_dt_common::{ iterators::{EitherIter, FilesWithExtensionIterator}, - types::{Mode, ParsedMode, ParsedTestSpecifier}, + types::{Mode, ParsedCompilationSpecifier, ParsedMode, ParsedTestSpecifier}, }; use tracing::{debug, warn}; @@ -19,6 +19,7 @@ use crate::{ #[derive(Default)] pub struct Corpus { test_specifiers: HashMap>, + compilation_specifiers: HashMap>, metadata_files: HashMap, } @@ -59,6 +60,32 @@ impl Corpus { Ok(self) } + pub fn with_compilation_specifier( + mut self, + compilation_specifier: ParsedCompilationSpecifier, + ) -> anyhow::Result { + match &compilation_specifier { + ParsedCompilationSpecifier::FileOrDirectory { + metadata_or_directory_file_path: metadata_file_path, + } => { + let metadata_files = enumerate_metadata_files(metadata_file_path); + self.compilation_specifiers.insert( + compilation_specifier, + metadata_files + .iter() + .map(|metadata_file| metadata_file.metadata_file_path.clone()) + .collect(), + ); + for metadata_file in metadata_files.into_iter() { + self.metadata_files + .insert(metadata_file.metadata_file_path.clone(), metadata_file); + } + } + }; + + Ok(self) + } + pub fn cases_iterator( &self, ) -> impl Iterator)> + '_ { @@ -153,6 +180,17 @@ impl Corpus { iterator.unique_by(|item| (&item.0.metadata_file_path, item.1, item.3.clone())) } + /// Iterator over the metadata files for the compilation specifiers. + pub fn compilation_metadata_files_iterator( + &self, + ) -> impl Iterator + '_ { + self.compilation_specifiers + .values() + .flatten() + .map(|path| self.metadata_files.get(path).expect("Must succeed")) + .unique_by(|metadata_file| &metadata_file.metadata_file_path) + } + pub fn metadata_file_count(&self) -> usize { self.metadata_files.len() } diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 149aa6d3..28136b5d 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -112,6 +112,15 @@ impl ReportAggregator { RunnerEvent::PostLinkContractsCompilationFailed(event) => { self.handle_post_link_contracts_compilation_failed_event(*event) } + RunnerEvent::StandaloneContractsCompilationSucceeded(event) => { + self.handle_standalone_contracts_compilation_succeeded_event(*event) + } + RunnerEvent::StandaloneContractsCompilationFailed(event) => { + self.handle_standalone_contracts_compilation_failed_event(*event) + } + RunnerEvent::StandaloneContractsCompilationIgnored(event) => { + self.handle_standalone_contracts_compilation_ignored_event(*event); + } RunnerEvent::LibrariesDeployed(event) => { self.handle_libraries_deployed_event(*event); } @@ -389,6 +398,66 @@ impl ReportAggregator { }); } + fn handle_standalone_contracts_compilation_succeeded_event( + &mut self, + event: StandaloneContractsCompilationSucceededEvent, + ) { + let include_input = self + .report + .context + .report_configuration() + .include_compiler_input; + let include_output = self + .report + .context + .report_configuration() + .include_compiler_output; + + let compilation_report = self.compilation_report(&event.compilation_specifier); + + let compiler_input = if include_input { + event.compiler_input + } else { + None + }; + + compilation_report.status = Some(CompilationStatus::Success { + is_cached: event.is_cached, + compiler_version: event.compiler_version, + compiler_path: event.compiler_path, + compiler_input, + compiled_contracts_info: Self::generate_compiled_contracts_info( + event.compiler_output, + include_output, + ), + }); + } + + fn handle_standalone_contracts_compilation_failed_event( + &mut self, + event: StandaloneContractsCompilationFailedEvent, + ) { + let compilation_report = self.compilation_report(&event.compilation_specifier); + + compilation_report.status = Some(CompilationStatus::Failure { + reason: event.reason, + compiler_version: event.compiler_version, + compiler_path: event.compiler_path, + compiler_input: event.compiler_input, + }); + } + + fn handle_standalone_contracts_compilation_ignored_event( + &mut self, + event: StandaloneContractsCompilationIgnoredEvent, + ) { + let report = self.compilation_report(&event.compilation_specifier); + report.status = Some(CompilationStatus::Ignored { + reason: event.reason, + additional_fields: event.additional_fields, + }); + } + fn handle_libraries_deployed_event(&mut self, event: LibrariesDeployedEvent) { self.execution_information(&event.execution_specifier) .deployed_libraries = Some(event.libraries); @@ -563,6 +632,13 @@ impl ReportAggregator { .get_or_insert_default() } + fn compilation_report(&mut self, specifier: &CompilationSpecifier) -> &mut CompilationReport { + self.report + .compilation_information + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + } + /// Generates the compiled contract information for each contract at each path. fn generate_compiled_contracts_info( compiler_output: CompilerOutput, @@ -626,7 +702,11 @@ pub struct Report { #[serde(default, skip_serializing_if = "Option::is_none")] pub metrics: Option, /// Information relating to each test case. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub execution_information: BTreeMap, + /// Information relating to each compilation if in standalone compilation mode. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub compilation_information: BTreeMap, } impl Report { @@ -636,6 +716,7 @@ impl Report { metrics: Default::default(), metadata_files: Default::default(), execution_information: Default::default(), + compilation_information: Default::default(), } } } @@ -741,6 +822,14 @@ pub struct ExecutionInformation { pub deployed_contracts: Option>, } +/// The compilation report. +#[derive(Clone, Debug, Serialize, Deserialize, Default)] +pub struct CompilationReport { + /// The compilation status. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub status: Option, +} + /// Information related to compilation #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(tag = "status")] @@ -777,6 +866,14 @@ pub enum CompilationStatus { #[serde(default, skip_serializing_if = "Option::is_none")] compiler_input: Option, }, + /// The compilation was ignored. + Ignored { + /// The reason behind the compilation being ignored. + reason: String, + /// Additional fields that describe more information on why the compilation is ignored. + #[serde(flatten)] + additional_fields: IndexMap, + }, } /// Information about the compiled contract. diff --git a/crates/report/src/common.rs b/crates/report/src/common.rs index 2c28bf14..47139ef2 100644 --- a/crates/report/src/common.rs +++ b/crates/report/src/common.rs @@ -35,3 +35,10 @@ pub struct StepExecutionSpecifier { pub execution_specifier: Arc, pub step_idx: StepPath, } + +/// An absolute specifier for a compilation. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct CompilationSpecifier { + pub solc_mode: Mode, + pub metadata_file_path: PathBuf, +} diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs index fe4155a7..78bc3e04 100644 --- a/crates/report/src/runner_event.rs +++ b/crates/report/src/runner_event.rs @@ -16,7 +16,10 @@ use tokio::sync::{broadcast, oneshot}; use crate::MinedBlockInformation; use crate::TransactionInformation; -use crate::{ExecutionSpecifier, ReporterEvent, TestSpecifier, common::MetadataFilePath}; +use crate::{ + CompilationSpecifier, ExecutionSpecifier, ReporterEvent, TestSpecifier, + common::MetadataFilePath, +}; macro_rules! __report_gen_emit_test_specific { ( @@ -303,6 +306,106 @@ macro_rules! __report_gen_for_variant_step { }; } +macro_rules! __report_gen_emit_compilation_specific { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* + ; + $( $aname:ident : $aty:ty, )* + ) => { + paste::paste! { + pub fn [< report_ $variant_ident:snake _event >]( + &self + $(, $bname: impl Into<$bty> )* + $(, $aname: impl Into<$aty> )* + ) -> anyhow::Result<()> { + self.report([< $variant_ident Event >] { + $skip_field: self.compilation_specifier.clone() + $(, $bname: $bname.into() )* + $(, $aname: $aname.into() )* + }) + } + } + }; +} + +macro_rules! __report_gen_emit_compilation_specific_by_parse { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* + ) => { + __report_gen_emit_compilation_specific!( + $ident, $variant_ident, $skip_field; + $( $bname : $bty, )* ; $( $aname : $aty, )* + ); + }; +} + +macro_rules! __report_gen_scan_before_compilation { + // Match case: found `compilation_specifier` field - generate the method. + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + compilation_specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_emit_compilation_specific_by_parse!( + $ident, $variant_ident, compilation_specifier; + $( $before : $bty, )* ; $( $after : $aty, )* + ); + }; + // Recursive case: field doesn't match, move it to "before" and continue scanning. + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_scan_before_compilation!( + $ident, $variant_ident; + $( $before : $bty, )* $name : $ty, + ; + $( $after : $aty, )* + ; + ); + }; + // Terminal case: no more fields to scan, no `compilation_specifier` found - do nothing. + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + ; + ) => {}; +} + +macro_rules! __report_gen_for_variant_compilation { + // Empty variant case. + ( + $ident:ident, + $variant_ident:ident; + ) => {}; + // Variant with fields - start scanning. + ( + $ident:ident, + $variant_ident:ident; + $( $field_ident:ident : $field_ty:ty ),+ $(,)? + ) => { + __report_gen_scan_before_compilation!( + $ident, $variant_ident; + ; + $( $field_ident : $field_ty, )* + ; + ); + }; +} + /// Defines the runner-event which is sent from the test runners to the report aggregator. /// /// This macro defines a number of things related to the reporting infrastructure and the interface @@ -401,6 +504,16 @@ macro_rules! define_event { } } + pub fn compilation_specific_reporter( + &self, + compilation_specifier: impl Into> + ) -> [< $ident StandaloneCompilationSpecificReporter >] { + [< $ident StandaloneCompilationSpecificReporter >] { + reporter: self.clone(), + compilation_specifier: compilation_specifier.into(), + } + } + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { self.0.send(event.into()).map_err(Into::into) } @@ -480,6 +593,23 @@ macro_rules! define_event { __report_gen_for_variant_step! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } )* } + + /// A reporter that's tied to a specific compilation. + #[derive(Clone, Debug)] + pub struct [< $ident StandaloneCompilationSpecificReporter >] { + $vis reporter: [< $ident Reporter >], + $vis compilation_specifier: std::sync::Arc, + } + + impl [< $ident StandaloneCompilationSpecificReporter >] { + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { + self.reporter.report(event) + } + + $( + __report_gen_for_variant_compilation! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + )* + } } }; } @@ -605,6 +735,47 @@ define_event! { /// The failure reason. reason: String, }, + /// An event emitted by the runners when the compilation of the contracts has succeeded. + /// Unlike [`PreLinkContractsCompilationSucceeded`], this should be used in standalone compilation mode. + StandaloneContractsCompilationSucceeded { + /// A specifier for the compilation that's taking place. + compilation_specifier: Arc, + /// The version of the compiler used to compile the contracts. + compiler_version: Version, + /// The path of the compiler used to compile the contracts. + compiler_path: PathBuf, + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled anew. + is_cached: bool, + /// The input provided to the compiler - this is optional and not provided if the + /// contracts were obtained from the cache. + compiler_input: Option, + /// The output of the compiler. + compiler_output: CompilerOutput + }, + /// An event emitted by the runners when the compilation of the contracts has failed. + /// Unlike [`PreLinkContractsCompilationFailed`], this should be used in standalone compilation mode. + StandaloneContractsCompilationFailed { + /// A specifier for the compilation that's taking place. + compilation_specifier: Arc, + /// The version of the compiler used to compile the contracts. + compiler_version: Option, + /// The path of the compiler used to compile the contracts. + compiler_path: Option, + /// The input provided to the compiler - this is optional and not provided if the + /// contracts were obtained from the cache. + compiler_input: Option, + /// The failure reason. + reason: String, + }, + /// An event emitted by the runners when a compilation is ignored. + StandaloneContractsCompilationIgnored { + /// A specifier for the compilation that has been ignored. + compilation_specifier: Arc, + /// A reason for the compilation to be ignored. + reason: String, + /// Additional fields that describe more information on why the compilation was ignored. + additional_fields: IndexMap + }, /// An event emitted by the runners when a library has been deployed. LibrariesDeployed { /// A specifier for the execution that's taking place. @@ -667,3 +838,10 @@ impl RunnerEventReporter { pub type Reporter = RunnerEventReporter; pub type TestSpecificReporter = RunnerEventTestSpecificReporter; pub type ExecutionSpecificReporter = RunnerEventExecutionSpecificReporter; +pub type StandaloneCompilationSpecificReporter = RunnerEventStandaloneCompilationSpecificReporter; + +/// A wrapper that allows functions to accept either reporter type for compilation events. +pub enum CompilationReporter<'a> { + Execution(&'a ExecutionSpecificReporter), + Standalone(&'a StandaloneCompilationSpecificReporter), +} From 0d7fdca6b4521ce530be0e0f4a4bb96df149a379 Mon Sep 17 00:00:00 2001 From: elle-j Date: Thu, 19 Feb 2026 18:51:20 +0100 Subject: [PATCH 08/25] Refactor runner_event macros to improve readability and maintainability. This removes the need for a lot of duplicate code. If adding support for a new specifier, instead of having to add 4 new macros that share almost identical logic with the other macros, only 1 match arm needs to be added to one existing macro. This refactor also decouples the event's specifier field name from the reporter's specifier field name, allowing them to be named differently if needed. I think this also increases the readability somewhat of the macros, showing more clearly what it matches on. --- crates/report/src/runner_event.rs | 376 ++++++++---------------------- 1 file changed, 93 insertions(+), 283 deletions(-) diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs index 78bc3e04..305640ce 100644 --- a/crates/report/src/runner_event.rs +++ b/crates/report/src/runner_event.rs @@ -21,11 +21,13 @@ use crate::{ common::MetadataFilePath, }; -macro_rules! __report_gen_emit_test_specific { +/// Generates a report method that emits an event, auto-filling the specifier from self. +macro_rules! __report_gen_emit_with_specifier { ( $ident:ident, $variant_ident:ident, - $skip_field:ident; + $specifier_field_on_self:ident, + $specifier_field_on_event:ident; $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* @@ -37,7 +39,7 @@ macro_rules! __report_gen_emit_test_specific { $(, $aname: impl Into<$aty> )* ) -> anyhow::Result<()> { self.report([< $variant_ident Event >] { - $skip_field: self.test_specifier.clone() + $specifier_field_on_event: self.$specifier_field_on_self.clone() $(, $bname: $bname.into() )* $(, $aname: $aname.into() )* }) @@ -46,359 +48,147 @@ macro_rules! __report_gen_emit_test_specific { }; } -macro_rules! __report_gen_emit_test_specific_by_parse { +/// Scans event fields looking for a matching specifier field name. +/// +/// Each MATCH arm maps a specifier field on `self` (the reporter) to a specifier field +/// on the event enum variant. This allows for the event's field to have a different name +/// than the reporter's specifier field if needed (e.g., `specifier` instead of `test_specifier`). +/// +/// To support a new specifier field, just add a corresponding MATCH arm. +macro_rules! __report_gen_scan_for_specifier { + // MATCH: test_specifier (on self) -> test_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_test_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before { - ( - $ident:ident, $variant_ident:ident; + test_specifier; $( $before:ident : $bty:ty, )* ; test_specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_test_specific_by_parse!( - $ident, $variant_ident, test_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + test_specifier, + test_specifier; $( $before : $bty, )* ; $( $after : $aty, )* ); }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* - ; - ) => { - __report_gen_scan_before!( - $ident, $variant_ident; - $( $before : $bty, )* $name : $ty, - ; - $( $after : $aty, )* - ; - ); - }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - ; - ) => {}; -} -macro_rules! __report_gen_for_variant { - ( - $ident:ident, - $variant_ident:ident; - ) => {}; - ( - $ident:ident, - $variant_ident:ident; - $( $field_ident:ident : $field_ty:ty ),+ $(,)? - ) => { - __report_gen_scan_before!( - $ident, $variant_ident; - ; - $( $field_ident : $field_ty, )* - ; - ); - }; -} - -macro_rules! __report_gen_emit_execution_specific { + // MATCH: execution_specifier (on self) -> execution_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* - ; - $( $aname:ident : $aty:ty, )* - ) => { - paste::paste! { - pub fn [< report_ $variant_ident:snake _event >]( - &self - $(, $bname: impl Into<$bty> )* - $(, $aname: impl Into<$aty> )* - ) -> anyhow::Result<()> { - self.report([< $variant_ident Event >] { - $skip_field: self.execution_specifier.clone() - $(, $bname: $bname.into() )* - $(, $aname: $aname.into() )* - }) - } - } - }; -} - -macro_rules! __report_gen_emit_execution_specific_by_parse { - ( - $ident:ident, - $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_execution_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before_exec { - ( - $ident:ident, $variant_ident:ident; + execution_specifier; $( $before:ident : $bty:ty, )* ; execution_specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_execution_specific_by_parse!( - $ident, $variant_ident, execution_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + execution_specifier, + execution_specifier; $( $before : $bty, )* ; $( $after : $aty, )* ); }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* - ; - ) => { - __report_gen_scan_before_exec!( - $ident, $variant_ident; - $( $before : $bty, )* $name : $ty, - ; - $( $after : $aty, )* - ; - ); - }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - ; - ) => {}; -} -macro_rules! __report_gen_for_variant_exec { - ( - $ident:ident, - $variant_ident:ident; - ) => {}; - ( - $ident:ident, - $variant_ident:ident; - $( $field_ident:ident : $field_ty:ty ),+ $(,)? - ) => { - __report_gen_scan_before_exec!( - $ident, $variant_ident; - ; - $( $field_ident : $field_ty, )* - ; - ); - }; -} - -macro_rules! __report_gen_emit_step_execution_specific { + // MATCH: step_specifier (on self) -> step_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* - ; - $( $aname:ident : $aty:ty, )* - ) => { - paste::paste! { - pub fn [< report_ $variant_ident:snake _event >]( - &self - $(, $bname: impl Into<$bty> )* - $(, $aname: impl Into<$aty> )* - ) -> anyhow::Result<()> { - self.report([< $variant_ident Event >] { - $skip_field: self.step_specifier.clone() - $(, $bname: $bname.into() )* - $(, $aname: $aname.into() )* - }) - } - } - }; -} - -macro_rules! __report_gen_emit_step_execution_specific_by_parse { - ( - $ident:ident, - $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_step_execution_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before_step { - ( - $ident:ident, $variant_ident:ident; + step_specifier; $( $before:ident : $bty:ty, )* ; step_specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_step_execution_specific_by_parse!( - $ident, $variant_ident, step_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + step_specifier, + step_specifier; $( $before : $bty, )* ; $( $after : $aty, )* ); }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* - ; - ) => { - __report_gen_scan_before_step!( - $ident, $variant_ident; - $( $before : $bty, )* $name : $ty, - ; - $( $after : $aty, )* - ; - ); - }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - ; - ) => {}; -} - -macro_rules! __report_gen_for_variant_step { - ( - $ident:ident, - $variant_ident:ident; - ) => {}; - ( - $ident:ident, - $variant_ident:ident; - $( $field_ident:ident : $field_ty:ty ),+ $(,)? - ) => { - __report_gen_scan_before_step!( - $ident, $variant_ident; - ; - $( $field_ident : $field_ty, )* - ; - ); - }; -} - -macro_rules! __report_gen_emit_compilation_specific { - ( - $ident:ident, - $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* - ; - $( $aname:ident : $aty:ty, )* - ) => { - paste::paste! { - pub fn [< report_ $variant_ident:snake _event >]( - &self - $(, $bname: impl Into<$bty> )* - $(, $aname: impl Into<$aty> )* - ) -> anyhow::Result<()> { - self.report([< $variant_ident Event >] { - $skip_field: self.compilation_specifier.clone() - $(, $bname: $bname.into() )* - $(, $aname: $aname.into() )* - }) - } - } - }; -} -macro_rules! __report_gen_emit_compilation_specific_by_parse { + // MATCH: compilation_specifier (on self) -> compilation_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_compilation_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before_compilation { - // Match case: found `compilation_specifier` field - generate the method. - ( - $ident:ident, $variant_ident:ident; + compilation_specifier; $( $before:ident : $bty:ty, )* ; compilation_specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_compilation_specific_by_parse!( - $ident, $variant_ident, compilation_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + compilation_specifier, + compilation_specifier; $( $before : $bty, )* ; $( $after : $aty, )* ); }; - // Recursive case: field doesn't match, move it to "before" and continue scanning. + + // RECURSIVE: Field doesn't match, continue scanning. ( - $ident:ident, $variant_ident:ident; + $ident:ident, + $variant_ident:ident, + $specifier_field_on_self:ident; $( $before:ident : $bty:ty, )* ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + $name:ident : $ty:ty, + $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_scan_before_compilation!( - $ident, $variant_ident; + __report_gen_scan_for_specifier!( + $ident, + $variant_ident, + $specifier_field_on_self; $( $before : $bty, )* $name : $ty, ; $( $after : $aty, )* ; ); }; - // Terminal case: no more fields to scan, no `compilation_specifier` found - do nothing. + + // TERMINAL: No matching specifier found. ( - $ident:ident, $variant_ident:ident; + $ident:ident, + $variant_ident:ident, + $specifier_field_on_self:ident; $( $before:ident : $bty:ty, )* ; ; ) => {}; } -macro_rules! __report_gen_for_variant_compilation { - // Empty variant case. +/// Entry point: Processes a variant and starts scanning for specifier fields. +macro_rules! __report_gen_for_variant { + // Empty variant - no fields. ( $ident:ident, - $variant_ident:ident; + $variant_ident:ident, + $specifier_field_on_self:ident; ) => {}; + // Variant with fields - start scanning. ( $ident:ident, - $variant_ident:ident; + $variant_ident:ident, + $specifier_field_on_self:ident; $( $field_ident:ident : $field_ty:ty ),+ $(,)? ) => { - __report_gen_scan_before_compilation!( - $ident, $variant_ident; + __report_gen_scan_for_specifier!( + $ident, + $variant_ident, + $specifier_field_on_self; ; $( $field_ident : $field_ty, )* ; @@ -555,7 +345,12 @@ macro_rules! define_event { } $( - __report_gen_for_variant! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + test_specifier; + $( $field_ident : $field_ty ),* + } )* } @@ -573,7 +368,12 @@ macro_rules! define_event { } $( - __report_gen_for_variant_exec! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + execution_specifier; + $( $field_ident : $field_ty ),* + } )* } @@ -590,7 +390,12 @@ macro_rules! define_event { } $( - __report_gen_for_variant_step! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + step_specifier; + $( $field_ident : $field_ty ),* + } )* } @@ -607,7 +412,12 @@ macro_rules! define_event { } $( - __report_gen_for_variant_compilation! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + compilation_specifier; + $( $field_ident : $field_ty ),* + } )* } } From 27a7bbb6d784ccf07ed42dc2bec35805b68e5e9f Mon Sep 17 00:00:00 2001 From: elle-j Date: Fri, 20 Feb 2026 10:31:16 +0100 Subject: [PATCH 09/25] Implement CLI reporting for standalone compile mode. --- crates/config/src/lib.rs | 5 + crates/core/src/compilations/entry_point.rs | 212 +++++++++++++++++++- crates/report/src/aggregator.rs | 58 +++++- crates/report/src/reporter_event.rs | 10 +- 4 files changed, 267 insertions(+), 18 deletions(-) diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 2760f72e..07292aea 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -601,6 +601,11 @@ pub struct StandaloneCompilationContext { /// The output format to use for the tool's output. #[arg(short, long, default_value_t = OutputFormat::CargoTestLike)] pub output_format: OutputFormat, + + /// Show verbose compilation details for each contract, rather than for + /// all contracts referenced in/by a metadata file combined. + #[arg(short, long)] + pub verbose: bool, } impl Default for TestExecutionContext { diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index 7c83a962..bbf11621 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -1,7 +1,13 @@ //! The main entry point into compiling in standalone mode without any test execution. -use std::{collections::BTreeSet, sync::Arc, time::Duration}; +use std::{ + collections::BTreeSet, + io::{BufWriter, Write, stderr}, + sync::Arc, + time::{Duration, Instant}, +}; +use ansi_term::{ANSIStrings, Color}; use anyhow::Context as _; use futures::{FutureExt, StreamExt}; use revive_dt_compiler::{Mode, ModeOptimizerSetting, ModePipeline}; @@ -10,7 +16,7 @@ use tokio::sync::{RwLock, Semaphore}; use tracing::{Instrument, error, info, info_span, instrument}; use revive_dt_config::{Context, OutputFormat, StandaloneCompilationContext}; -use revive_dt_report::Reporter; +use revive_dt_report::{CompilationStatus, Reporter, ReporterEvent}; use crate::{ compilations::Driver, @@ -121,7 +127,8 @@ pub async fn handle_compilations( .expect("Can't fail") }); - let cli_reporting_task = start_cli_reporting_task(context.output_format, reporter); + let cli_reporting_task = + start_cli_reporting_task(context.output_format, context.verbose, reporter); tokio::task::spawn(async move { loop { @@ -141,8 +148,201 @@ pub async fn handle_compilations( Ok(()) } -// TODO: UPDATE! #[allow(irrefutable_let_patterns, clippy::uninlined_format_args)] -async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporter) { - todo!() +async fn start_cli_reporting_task(output_format: OutputFormat, verbose: bool, reporter: Reporter) { + let mut aggregator_events_rx = reporter.subscribe().await.expect("Can't fail"); + drop(reporter); + + let start = Instant::now(); + + let mut global_success_count = 0; + let mut global_failure_count = 0; + let mut global_ignore_count = 0; + + let mut buf = BufWriter::new(stderr()); + while let Ok(event) = aggregator_events_rx.recv().await { + let ReporterEvent::MetadataFileStandaloneCompilationCompleted { + metadata_file_path, + mode, + status, + } = event + else { + continue; + }; + + match output_format { + OutputFormat::Legacy => { + let _ = write!(buf, "{} - {}: ", mode, metadata_file_path.display()); + let _ = match &status { + CompilationStatus::Success { + is_cached, + compiled_contracts_info, + .. + } => { + global_success_count += 1; + let contract_count: usize = compiled_contracts_info + .values() + .map(|contracts| contracts.len()) + .sum(); + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Green.bold().paint("Compilation Succeeded"), + Color::Green.paint(format!( + " - Contracts compiled: {}, Cached: {}", + contract_count, + if *is_cached { "yes" } else { "no" } + )), + ]) + ) + } + CompilationStatus::Failure { reason, .. } => { + global_failure_count += 1; + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Red.bold().paint("Compilation Failed"), + Color::Red.paint(format!(" - Reason: {}", reason.trim())), + ]) + ) + } + CompilationStatus::Ignored { reason, .. } => { + global_ignore_count += 1; + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Yellow.bold().paint("Compilation Ignored"), + Color::Yellow.paint(format!(" - Reason: {}", reason.trim())), + ]) + ) + } + }; + let _ = writeln!(buf); + } + OutputFormat::CargoTestLike => { + match &status { + CompilationStatus::Success { + compiled_contracts_info, + .. + } => { + global_success_count += 1; + let contract_count: usize = compiled_contracts_info + .values() + .map(|contracts| contracts.len()) + .sum(); + + if verbose { + // Verbose: show header + per-contract lines + summary. + writeln!( + buf, + "\t{} {} - {}\n", + Color::Green.paint("Compiling"), + metadata_file_path.display(), + mode + ) + .unwrap(); + writeln!(buf, "compiling {} contracts", contract_count).unwrap(); + + for (source_path, contracts) in compiled_contracts_info { + for (contract_name, _) in contracts { + writeln!( + buf, + "compile {}::{} ... {}", + source_path.display(), + contract_name, + Color::Green.paint("ok") + ) + .unwrap(); + } + } + writeln!(buf).unwrap(); + + writeln!( + buf, + "compile result: {}. {} contracts compiled", + Color::Green.paint("ok"), + contract_count + ) + .unwrap(); + writeln!(buf).unwrap(); + } else { + // Non-verbose: single line with contract count. + writeln!( + buf, + "compile {} ({}) ... {} ({} contracts)", + metadata_file_path.display(), + mode, + Color::Green.paint("ok"), + contract_count + ) + .unwrap(); + } + } + CompilationStatus::Failure { reason, .. } => { + global_failure_count += 1; + writeln!( + buf, + "compile {} ({}) ... {}", + metadata_file_path.display(), + mode, + Color::Red.paint(format!("FAILED, {}", reason.trim())) + ) + .unwrap(); + } + CompilationStatus::Ignored { reason, .. } => { + global_ignore_count += 1; + writeln!( + buf, + "compile {} ({}) ... {}", + metadata_file_path.display(), + mode, + Color::Yellow.paint(format!("ignored, {}", reason.trim())) + ) + .unwrap(); + } + } + + if aggregator_events_rx.is_empty() { + buf = tokio::task::spawn_blocking(move || { + buf.flush().unwrap(); + buf + }) + .await + .unwrap(); + } + } + } + } + info!("Aggregator Broadcast Channel Closed"); + + // Summary at the end. + let total = global_success_count + global_failure_count + global_ignore_count; + match output_format { + OutputFormat::Legacy => { + writeln!( + buf, + "{} compilations: {} succeeded, {} failed, {} ignored in {} seconds", + total, + Color::Green.paint(global_success_count.to_string()), + Color::Red.paint(global_failure_count.to_string()), + Color::Yellow.paint(global_ignore_count.to_string()), + start.elapsed().as_secs() + ) + .unwrap(); + } + OutputFormat::CargoTestLike => { + writeln!( + buf, + "\nrun finished. {} succeeded; {} failed; {} ignored; finished in {}s", + global_success_count, + global_failure_count, + global_ignore_count, + start.elapsed().as_secs() + ) + .unwrap(); + } + } } diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 28136b5d..7c2cea82 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -413,15 +413,13 @@ impl ReportAggregator { .report_configuration() .include_compiler_output; - let compilation_report = self.compilation_report(&event.compilation_specifier); - let compiler_input = if include_input { event.compiler_input } else { None }; - compilation_report.status = Some(CompilationStatus::Success { + let status = CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, @@ -430,32 +428,70 @@ impl ReportAggregator { event.compiler_output, include_output, ), - }); + }; + + let report = self.compilation_report(&event.compilation_specifier); + report.status = Some(status.clone()); + + self.handle_post_standalone_contracts_compilation_status_update( + &event.compilation_specifier, + status, + ); } fn handle_standalone_contracts_compilation_failed_event( &mut self, event: StandaloneContractsCompilationFailedEvent, ) { - let compilation_report = self.compilation_report(&event.compilation_specifier); - - compilation_report.status = Some(CompilationStatus::Failure { + let status = CompilationStatus::Failure { reason: event.reason, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input: event.compiler_input, - }); + }; + + let report = self.compilation_report(&event.compilation_specifier); + report.status = Some(status.clone()); + + self.handle_post_standalone_contracts_compilation_status_update( + &event.compilation_specifier, + status, + ); } fn handle_standalone_contracts_compilation_ignored_event( &mut self, event: StandaloneContractsCompilationIgnoredEvent, ) { - let report = self.compilation_report(&event.compilation_specifier); - report.status = Some(CompilationStatus::Ignored { + let status = CompilationStatus::Ignored { reason: event.reason, additional_fields: event.additional_fields, - }); + }; + + let report = self.compilation_report(&event.compilation_specifier); + report.status = Some(status.clone()); + + self.handle_post_standalone_contracts_compilation_status_update( + &event.compilation_specifier, + status, + ); + } + + fn handle_post_standalone_contracts_compilation_status_update( + &mut self, + specifier: &CompilationSpecifier, + status: CompilationStatus, + ) { + let event = ReporterEvent::MetadataFileStandaloneCompilationCompleted { + metadata_file_path: specifier.metadata_file_path.clone().into(), + mode: specifier.solc_mode.clone(), + status, + }; + + // According to the documentation on send, the sending fails if there are no more receiver + // handles. Therefore, this isn't an error that we want to bubble up or anything. If we fail + // to send then we ignore the error. + let _ = self.listener_tx.send(event); } fn handle_libraries_deployed_event(&mut self, event: LibrariesDeployedEvent) { diff --git a/crates/report/src/reporter_event.rs b/crates/report/src/reporter_event.rs index 0211e643..dec4c0d5 100644 --- a/crates/report/src/reporter_event.rs +++ b/crates/report/src/reporter_event.rs @@ -5,7 +5,7 @@ use std::collections::BTreeMap; use revive_dt_compiler::Mode; use revive_dt_format::case::CaseIdx; -use crate::{MetadataFilePath, TestCaseStatus}; +use crate::{CompilationStatus, MetadataFilePath, TestCaseStatus}; #[derive(Clone, Debug)] pub enum ReporterEvent { @@ -19,4 +19,12 @@ pub enum ReporterEvent { /// The status of each one of the cases. case_status: BTreeMap, }, + + /// An event sent by the reporter once an entire metadata file and its referenced + /// contracts have finished compilation. + MetadataFileStandaloneCompilationCompleted { + metadata_file_path: MetadataFilePath, + mode: Mode, + status: CompilationStatus, + }, } From d9f5363c534d94ad1953b6f89c07c1f6d9e38dea Mon Sep 17 00:00:00 2001 From: elle-j Date: Fri, 20 Feb 2026 15:31:03 +0100 Subject: [PATCH 10/25] Fix clippy. --- crates/config/src/lib.rs | 2 +- crates/core/src/compilations/entry_point.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 07292aea..5a661686 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -915,7 +915,7 @@ pub struct CorpusExecutionConfiguration { #[serde_with::serde_as] #[derive(Clone, Debug, Parser, Serialize, Deserialize)] pub struct CorpusCompilationConfiguration { - /// A list of compile specifiers for the compilations that the tool should run. + /// A list of compilation specifiers for the compilations that the tool should run. /// /// Compile specifiers follow the following format: /// diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index bbf11621..5234a383 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -247,7 +247,7 @@ async fn start_cli_reporting_task(output_format: OutputFormat, verbose: bool, re writeln!(buf, "compiling {} contracts", contract_count).unwrap(); for (source_path, contracts) in compiled_contracts_info { - for (contract_name, _) in contracts { + for contract_name in contracts.keys() { writeln!( buf, "compile {}::{} ... {}", From b884fa951660a0c8bbbf4a7f989e00ecf7a2c176 Mon Sep 17 00:00:00 2001 From: elle-j Date: Fri, 20 Feb 2026 17:04:44 +0100 Subject: [PATCH 11/25] Key compilation information in report by mode. --- crates/core/src/main.rs | 1 + crates/report/src/aggregator.rs | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index 6fc7377e..bc572e5e 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -139,6 +139,7 @@ fn main() -> anyhow::Result<()> { let contains_failure = report .compilation_information .values() + .flat_map(|reports_per_mode| reports_per_mode.values()) .any(|report| matches!(report.status, Some(CompilationStatus::Failure { .. }))); if contains_failure { diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 7c2cea82..40656564 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -673,6 +673,8 @@ impl ReportAggregator { .compilation_information .entry(specifier.metadata_file_path.clone().into()) .or_default() + .entry(specifier.solc_mode.clone()) + .or_default() } /// Generates the compiled contract information for each contract at each path. @@ -742,7 +744,8 @@ pub struct Report { pub execution_information: BTreeMap, /// Information relating to each compilation if in standalone compilation mode. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub compilation_information: BTreeMap, + #[serde_as(as = "BTreeMap<_, BTreeMap>")] + pub compilation_information: BTreeMap>, } impl Report { From 5934bdb105a426b3f89c0ee44cf75c8cbb6e4b91 Mon Sep 17 00:00:00 2001 From: elle-j Date: Fri, 20 Feb 2026 20:00:49 +0100 Subject: [PATCH 12/25] Prepare multi-mode support. --- crates/core/src/compilations/entry_point.rs | 272 +++++++++++--------- crates/core/src/helpers/compile.rs | 5 + crates/report/src/aggregator.rs | 96 +++++-- crates/report/src/reporter_event.rs | 9 +- crates/report/src/runner_event.rs | 5 + 5 files changed, 236 insertions(+), 151 deletions(-) diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index 5234a383..311bb801 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -12,7 +12,7 @@ use anyhow::Context as _; use futures::{FutureExt, StreamExt}; use revive_dt_compiler::{Mode, ModeOptimizerSetting, ModePipeline}; use revive_dt_format::corpus::Corpus; -use tokio::sync::{RwLock, Semaphore}; +use tokio::sync::{RwLock, Semaphore, broadcast}; use tracing::{Instrument, error, info, info_span, instrument}; use revive_dt_config::{Context, OutputFormat, StandaloneCompilationContext}; @@ -31,6 +31,10 @@ pub async fn handle_compilations( ) -> anyhow::Result<()> { let reporter_clone = reporter.clone(); + // Subscribe early, before stream collection, to capture all events including + // ignored compilations determined during compatibility checks. + let aggregator_events_rx = reporter.subscribe().await.expect("Can't fail"); + // Discover all of the metadata files that are defined in the context. let corpus = context .corpus_configuration @@ -59,6 +63,7 @@ pub async fn handle_compilations( .await .collect::>() .await; + drop(reporter); info!( len = compilation_definitions.len(), "Created compilation definitions" @@ -128,7 +133,7 @@ pub async fn handle_compilations( }); let cli_reporting_task = - start_cli_reporting_task(context.output_format, context.verbose, reporter); + start_cli_reporting_task(context.output_format, context.verbose, aggregator_events_rx); tokio::task::spawn(async move { loop { @@ -149,10 +154,11 @@ pub async fn handle_compilations( } #[allow(irrefutable_let_patterns, clippy::uninlined_format_args)] -async fn start_cli_reporting_task(output_format: OutputFormat, verbose: bool, reporter: Reporter) { - let mut aggregator_events_rx = reporter.subscribe().await.expect("Can't fail"); - drop(reporter); - +async fn start_cli_reporting_task( + output_format: OutputFormat, + verbose: bool, + mut aggregator_events_rx: broadcast::Receiver, +) { let start = Instant::now(); let mut global_success_count = 0; @@ -161,10 +167,9 @@ async fn start_cli_reporting_task(output_format: OutputFormat, verbose: bool, re let mut buf = BufWriter::new(stderr()); while let Ok(event) = aggregator_events_rx.recv().await { - let ReporterEvent::MetadataFileStandaloneCompilationCompleted { + let ReporterEvent::MetadataFileModeCombinationCompilationCompleted { metadata_file_path, - mode, - status, + compilation_status, } = event else { continue; @@ -172,139 +177,164 @@ async fn start_cli_reporting_task(output_format: OutputFormat, verbose: bool, re match output_format { OutputFormat::Legacy => { - let _ = write!(buf, "{} - {}: ", mode, metadata_file_path.display()); - let _ = match &status { - CompilationStatus::Success { - is_cached, - compiled_contracts_info, - .. - } => { - global_success_count += 1; - let contract_count: usize = compiled_contracts_info - .values() - .map(|contracts| contracts.len()) - .sum(); - writeln!( - buf, - "{}", - ANSIStrings(&[ - Color::Green.bold().paint("Compilation Succeeded"), - Color::Green.paint(format!( - " - Contracts compiled: {}, Cached: {}", - contract_count, - if *is_cached { "yes" } else { "no" } - )), - ]) - ) - } - CompilationStatus::Failure { reason, .. } => { - global_failure_count += 1; - writeln!( - buf, - "{}", - ANSIStrings(&[ - Color::Red.bold().paint("Compilation Failed"), - Color::Red.paint(format!(" - Reason: {}", reason.trim())), - ]) - ) - } - CompilationStatus::Ignored { reason, .. } => { - global_ignore_count += 1; - writeln!( - buf, - "{}", - ANSIStrings(&[ - Color::Yellow.bold().paint("Compilation Ignored"), - Color::Yellow.paint(format!(" - Reason: {}", reason.trim())), - ]) - ) - } - }; + let _ = write!(buf, "{}", metadata_file_path.display()); + for (mode, status) in compilation_status { + let _ = write!(buf, "\tMode {}: ", mode); + let _ = match &status { + CompilationStatus::Success { + is_cached, + compiled_contracts_info, + .. + } => { + global_success_count += 1; + let contract_count: usize = compiled_contracts_info + .values() + .map(|contracts| contracts.len()) + .sum(); + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Green.bold().paint("Compilation Succeeded"), + Color::Green.paint(format!( + " - Contracts compiled: {}, Cached: {}", + contract_count, + if *is_cached { "yes" } else { "no" } + )), + ]) + ) + } + CompilationStatus::Failure { reason, .. } => { + global_failure_count += 1; + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Red.bold().paint("Compilation Failed"), + Color::Red.paint(format!(" - Reason: {}", reason.trim())), + ]) + ) + } + CompilationStatus::Ignored { reason, .. } => { + global_ignore_count += 1; + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Yellow.bold().paint("Compilation Ignored"), + Color::Yellow.paint(format!(" - Reason: {}", reason.trim())), + ]) + ) + } + }; + } let _ = writeln!(buf); } OutputFormat::CargoTestLike => { - match &status { - CompilationStatus::Success { - compiled_contracts_info, - .. - } => { - global_success_count += 1; - let contract_count: usize = compiled_contracts_info - .values() - .map(|contracts| contracts.len()) - .sum(); + let mut success_count = 0; + let mut failure_count = 0; + let mut ignored_count = 0; - if verbose { - // Verbose: show header + per-contract lines + summary. - writeln!( - buf, - "\t{} {} - {}\n", - Color::Green.paint("Compiling"), - metadata_file_path.display(), - mode - ) - .unwrap(); - writeln!(buf, "compiling {} contracts", contract_count).unwrap(); + for (mode, status) in compilation_status { + match &status { + CompilationStatus::Success { + compiled_contracts_info, + .. + } => { + success_count += 1; + global_success_count += 1; + let contract_count: usize = compiled_contracts_info + .values() + .map(|contracts| contracts.len()) + .sum(); - for (source_path, contracts) in compiled_contracts_info { - for contract_name in contracts.keys() { - writeln!( - buf, - "compile {}::{} ... {}", - source_path.display(), - contract_name, - Color::Green.paint("ok") - ) - .unwrap(); + if verbose { + // Verbose: show header + per-contract lines + summary. + writeln!( + buf, + "\t{} {} - {}\n", + Color::Green.paint("Compiling"), + metadata_file_path.display(), + mode + ) + .unwrap(); + writeln!(buf, "compiling {} contracts", contract_count).unwrap(); + + for (source_path, contracts) in compiled_contracts_info { + for contract_name in contracts.keys() { + writeln!( + buf, + "compile {}::{} ... {}", + source_path.display(), + contract_name, + Color::Green.paint("ok") + ) + .unwrap(); + } } - } - writeln!(buf).unwrap(); + writeln!(buf).unwrap(); + writeln!( + buf, + "compile result: {}. {} contracts compiled", + Color::Green.paint("ok"), + contract_count + ) + .unwrap(); + writeln!(buf).unwrap(); + } else { + // Non-verbose: single line with contract count. + writeln!( + buf, + "compile {} ({}) ... {} ({} contracts)", + metadata_file_path.display(), + mode, + Color::Green.paint("ok"), + contract_count + ) + .unwrap(); + } + } + CompilationStatus::Failure { reason, .. } => { + failure_count += 1; + global_failure_count += 1; writeln!( buf, - "compile result: {}. {} contracts compiled", - Color::Green.paint("ok"), - contract_count + "compile {} ({}) ... {}", + metadata_file_path.display(), + mode, + Color::Red.paint(format!("FAILED, {}", reason.trim())) ) .unwrap(); - writeln!(buf).unwrap(); - } else { - // Non-verbose: single line with contract count. + } + CompilationStatus::Ignored { reason, .. } => { + ignored_count += 1; + global_ignore_count += 1; writeln!( buf, - "compile {} ({}) ... {} ({} contracts)", + "compile {} ({}) ... {}", metadata_file_path.display(), mode, - Color::Green.paint("ok"), - contract_count + Color::Yellow.paint(format!("ignored, {}", reason.trim())) ) .unwrap(); } } - CompilationStatus::Failure { reason, .. } => { - global_failure_count += 1; - writeln!( - buf, - "compile {} ({}) ... {}", - metadata_file_path.display(), - mode, - Color::Red.paint(format!("FAILED, {}", reason.trim())) - ) - .unwrap(); - } - CompilationStatus::Ignored { reason, .. } => { - global_ignore_count += 1; - writeln!( - buf, - "compile {} ({}) ... {}", - metadata_file_path.display(), - mode, - Color::Yellow.paint(format!("ignored, {}", reason.trim())) - ) - .unwrap(); - } } + let status = if failure_count > 0 { + Color::Red.paint("FAILED") + } else { + Color::Green.paint("ok") + }; + writeln!( + buf, + "compile result: {}. {} succeeded; {} failed; {} ignored", + status, success_count, failure_count, ignored_count, + ) + .unwrap(); + writeln!(buf).unwrap(); + if aggregator_events_rx.is_empty() { buf = tokio::task::spawn_blocking(move || { buf.flush().unwrap(); diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs index 0dbb0aa1..699847a1 100644 --- a/crates/core/src/helpers/compile.rs +++ b/crates/core/src/helpers/compile.rs @@ -44,6 +44,11 @@ pub async fn create_compilation_definitions_stream<'a>( metadata_file_path: metadata_file.metadata_file_path.clone(), })), ) + }) + .inspect(|(_, _, reporter)| { + reporter + .report_standalone_compilation_discovery_event() + .expect("Can't fail"); }), ) // Creating the `CompilationDefinition` objects from all of the various objects we have. diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 40656564..e470a24b 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -37,6 +37,7 @@ pub struct ReportAggregator { /* Internal Report State */ report: Report, remaining_cases: HashMap>>, + remaining_compilation_modes: HashMap>, /* Channels */ runner_tx: Option>, runner_rx: UnboundedReceiver, @@ -58,6 +59,7 @@ impl ReportAggregator { }, report: Report::new(context), remaining_cases: Default::default(), + remaining_compilation_modes: Default::default(), runner_tx: Some(runner_tx), runner_rx, listener_tx, @@ -88,6 +90,9 @@ impl ReportAggregator { RunnerEvent::TestCaseDiscovery(event) => { self.handle_test_case_discovery(*event); } + RunnerEvent::StandaloneCompilationDiscovery(event) => { + self.handle_standalone_compilation_discovery(*event); + } RunnerEvent::TestSucceeded(event) => { self.handle_test_succeeded_event(*event); } @@ -195,14 +200,25 @@ impl ReportAggregator { .insert(event.test_specifier.case_idx); } + fn handle_standalone_compilation_discovery( + &mut self, + event: StandaloneCompilationDiscoveryEvent, + ) { + self.remaining_compilation_modes + .entry( + event + .compilation_specifier + .metadata_file_path + .clone() + .into(), + ) + .or_default() + .insert(event.compilation_specifier.solc_mode.clone()); + } + fn handle_test_succeeded_event(&mut self, event: TestSucceededEvent) { // Remove this from the set of cases we're tracking since it has completed. - self.remaining_cases - .entry(event.test_specifier.metadata_file_path.clone().into()) - .or_default() - .entry(event.test_specifier.solc_mode.clone()) - .or_default() - .remove(&event.test_specifier.case_idx); + self.remove_remaining_case(&event.test_specifier); // Add information on the fact that the case was ignored to the report. let test_case_report = self.test_case_report(&event.test_specifier); @@ -214,12 +230,7 @@ impl ReportAggregator { fn handle_test_failed_event(&mut self, event: TestFailedEvent) { // Remove this from the set of cases we're tracking since it has completed. - self.remaining_cases - .entry(event.test_specifier.metadata_file_path.clone().into()) - .or_default() - .entry(event.test_specifier.solc_mode.clone()) - .or_default() - .remove(&event.test_specifier.case_idx); + self.remove_remaining_case(&event.test_specifier); // Add information on the fact that the case was ignored to the report. let test_case_report = self.test_case_report(&event.test_specifier); @@ -231,12 +242,7 @@ impl ReportAggregator { fn handle_test_ignored_event(&mut self, event: TestIgnoredEvent) { // Remove this from the set of cases we're tracking since it has completed. - self.remaining_cases - .entry(event.test_specifier.metadata_file_path.clone().into()) - .or_default() - .entry(event.test_specifier.solc_mode.clone()) - .or_default() - .remove(&event.test_specifier.case_idx); + self.remove_remaining_case(&event.test_specifier); // Add information on the fact that the case was ignored to the report. let test_case_report = self.test_case_report(&event.test_specifier); @@ -402,6 +408,9 @@ impl ReportAggregator { &mut self, event: StandaloneContractsCompilationSucceededEvent, ) { + // Remove this from the set we're tracking since it has completed. + self.remove_remaining_compilation_mode(&event.compilation_specifier); + let include_input = self .report .context @@ -435,7 +444,6 @@ impl ReportAggregator { self.handle_post_standalone_contracts_compilation_status_update( &event.compilation_specifier, - status, ); } @@ -443,6 +451,9 @@ impl ReportAggregator { &mut self, event: StandaloneContractsCompilationFailedEvent, ) { + // Remove this from the set we're tracking since it has completed. + self.remove_remaining_compilation_mode(&event.compilation_specifier); + let status = CompilationStatus::Failure { reason: event.reason, compiler_version: event.compiler_version, @@ -455,7 +466,6 @@ impl ReportAggregator { self.handle_post_standalone_contracts_compilation_status_update( &event.compilation_specifier, - status, ); } @@ -463,6 +473,9 @@ impl ReportAggregator { &mut self, event: StandaloneContractsCompilationIgnoredEvent, ) { + // Remove this from the set we're tracking since it has completed. + self.remove_remaining_compilation_mode(&event.compilation_specifier); + let status = CompilationStatus::Ignored { reason: event.reason, additional_fields: event.additional_fields, @@ -473,19 +486,36 @@ impl ReportAggregator { self.handle_post_standalone_contracts_compilation_status_update( &event.compilation_specifier, - status, ); } fn handle_post_standalone_contracts_compilation_status_update( &mut self, specifier: &CompilationSpecifier, - status: CompilationStatus, ) { - let event = ReporterEvent::MetadataFileStandaloneCompilationCompleted { + let remaining_modes = self + .remaining_compilation_modes + .entry(specifier.metadata_file_path.clone().into()) + .or_default(); + if !remaining_modes.is_empty() { + return; + } + + let final_status = self + .report + .compilation_information + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .iter() + .flat_map(|(mode, report)| { + let status = report.status.clone().expect("Can't be uninitialized"); + Some((mode.clone(), status)) + }) + .collect::>(); + + let event = ReporterEvent::MetadataFileModeCombinationCompilationCompleted { metadata_file_path: specifier.metadata_file_path.clone().into(), - mode: specifier.solc_mode.clone(), - status, + compilation_status: final_status, }; // According to the documentation on send, the sending fails if there are no more receiver @@ -727,6 +757,22 @@ impl ReportAggregator { Err(_) => (false, B256::from_slice(&Sha256::digest(input.as_bytes()))), } } + + fn remove_remaining_case(&mut self, specifier: &TestSpecifier) { + self.remaining_cases + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .entry(specifier.solc_mode.clone()) + .or_default() + .remove(&specifier.case_idx); + } + + fn remove_remaining_compilation_mode(&mut self, specifier: &CompilationSpecifier) { + self.remaining_compilation_modes + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .remove(&specifier.solc_mode); + } } #[serde_as] diff --git a/crates/report/src/reporter_event.rs b/crates/report/src/reporter_event.rs index dec4c0d5..c2df136a 100644 --- a/crates/report/src/reporter_event.rs +++ b/crates/report/src/reporter_event.rs @@ -20,11 +20,10 @@ pub enum ReporterEvent { case_status: BTreeMap, }, - /// An event sent by the reporter once an entire metadata file and its referenced - /// contracts have finished compilation. - MetadataFileStandaloneCompilationCompleted { + /// An event sent by the reporter once an entire metadata file and mode combination has + /// finished standalone compilation. + MetadataFileModeCombinationCompilationCompleted { metadata_file_path: MetadataFilePath, - mode: Mode, - status: CompilationStatus, + compilation_status: BTreeMap, }, } diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs index 305640ce..049ebff8 100644 --- a/crates/report/src/runner_event.rs +++ b/crates/report/src/runner_event.rs @@ -445,6 +445,11 @@ define_event! { /// A specifier for the test that was discovered. test_specifier: Arc, }, + /// An event emitted by the runners when they discover a standalone compilation. + StandaloneCompilationDiscovery { + /// A specifier for the compilation that was discovered. + compilation_specifier: Arc, + }, /// An event emitted by the runners when a test case is ignored. TestIgnored { /// A specifier for the test that's been ignored. From 4481128604b6ad9b22e860242d3cb5ab8cb8ea11 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 23 Feb 2026 12:51:08 +0100 Subject: [PATCH 13/25] Add compatibility check for pragma solidity version. --- Cargo.lock | 1 + crates/core/Cargo.toml | 1 + crates/core/src/compilations/entry_point.rs | 2 +- crates/core/src/helpers/compile.rs | 179 ++++++++++++++++++-- crates/report/src/aggregator.rs | 4 +- 5 files changed, 174 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd1efab4..2c81b2a3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5780,6 +5780,7 @@ dependencies = [ "clap", "futures", "indexmap 2.13.0", + "regex", "revive-dt-common", "revive-dt-compiler", "revive-dt-config", diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 2b0f76f6..48c532d7 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -29,6 +29,7 @@ cacache = { workspace = true } clap = { workspace = true } futures = { workspace = true } indexmap = { workspace = true } +regex = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } tracing-appender = { workspace = true } diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index 311bb801..457fb200 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -42,7 +42,7 @@ pub async fn handle_compilations( .clone() .into_iter() .try_fold(Corpus::default(), Corpus::with_compilation_specifier) - .context("Failed to parse the compile corpus")?; + .context("Failed to parse the compilation corpus")?; info!( len = corpus.metadata_file_count(), "Discovered metadata files" diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs index 699847a1..1263f528 100644 --- a/crates/core/src/helpers/compile.rs +++ b/crates/core/src/helpers/compile.rs @@ -1,18 +1,16 @@ -use std::sync::Arc; +use std::sync::{Arc, LazyLock}; use std::{borrow::Cow, path::Path}; use futures::{Stream, StreamExt, stream}; use indexmap::{IndexMap, indexmap}; -use revive_dt_common::types::CompilerIdentifier; -use revive_dt_compiler::revive_resolc::Resolc; +use regex::Regex; +use revive_dt_common::{cached_fs::read_to_string, types::CompilerIdentifier}; +use revive_dt_compiler::{Mode, SolidityCompiler, revive_resolc::Resolc}; use revive_dt_config::Context; -use revive_dt_format::corpus::Corpus; -use serde_json::{Value, json}; - -use revive_dt_compiler::Mode; -use revive_dt_compiler::SolidityCompiler; -use revive_dt_format::metadata::MetadataFile; +use revive_dt_format::{corpus::Corpus, metadata::MetadataFile}; use revive_dt_report::{CompilationSpecifier, Reporter, StandaloneCompilationSpecificReporter}; +use semver::VersionReq; +use serde_json::{self, json}; use tracing::{debug, error, info}; pub async fn create_compilation_definitions_stream<'a>( @@ -122,6 +120,7 @@ impl<'a> CompilationDefinition<'a> { pub fn check_compatibility(&self) -> CompilationCheckFunctionResult { self.check_metadata_file_ignored()?; self.check_compiler_compatibility()?; + self.check_pragma_solidity_compatibility()?; Ok(()) } @@ -148,6 +147,166 @@ impl<'a> CompilationDefinition<'a> { Err(("The compiler does not support this mode.", error_map)) } } + + /// Checks if the file-specified Solidity version is compatible with the configured version. + fn check_pragma_solidity_compatibility(&self) -> CompilationCheckFunctionResult { + let files_to_compile = self.metadata.files_to_compile().map_err(|e| { + ( + "Failed to enumerate files to compile.", + indexmap! { + "metadata_file_path" => json!(self.metadata_file_path.display().to_string()), + "error" => json!(e.to_string()), + }, + ) + })?; + let mut incompatible_files: Vec = Vec::new(); + + for source_path in files_to_compile { + let source = read_to_string(&source_path).map_err(|e| { + ( + "Failed to read source file.", + indexmap! { + "source_path" => json!(source_path.display().to_string()), + "error" => json!(e.to_string()), + }, + ) + })?; + + if let Some(version_requirement) = Self::parse_pragma_solidity_requirement(&source) { + if !version_requirement.matches(self.compiler.version()) { + incompatible_files.push(json!({ + "source_path": source_path.display().to_string(), + "pragma": version_requirement.to_string(), + })); + } + } + } + + if incompatible_files.is_empty() { + Ok(()) + } else { + Err(( + "Source pragma is incompatible with the Solidity compiler version.", + indexmap! { + "compiler_version" => json!(self.compiler.version().to_string()), + "incompatible_files" => json!(incompatible_files), + }, + )) + } + } + + /// Parses the Solidity version requirement from `source`. + /// Returns `None` if no pragma is found or if it cannot be parsed. + fn parse_pragma_solidity_requirement(source: &str) -> Option { + static PRAGMA_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"pragma\s+solidity\s+(?P[^;]+);").unwrap()); + + let caps = PRAGMA_REGEX.captures(source)?; + let solidity_version_format = caps.name("version")?.as_str().trim(); + let semver_format = Self::solidity_version_to_semver(solidity_version_format); + + VersionReq::parse(&semver_format).ok() + } + + /// Converts Solidity version constraints to semver-compatible format. + /// Example: + /// ```txt + /// Solidity: ">=0.8.0 <0.9.0" or "^0.8.0" or "0.8.33" + /// semver: ">=0.8.0, <0.9.0" or "^0.8.0" or "=0.8.33" + /// ``` + fn solidity_version_to_semver(version: &str) -> String { + version + .split_whitespace() + .map(|part| { + let is_exact_version = part.starts_with(|c: char| c.is_ascii_digit()); + if is_exact_version { + format!("={}", part) + } else { + part.to_string() + } + }) + .collect::>() + .join(", ") + } } -type CompilationCheckFunctionResult = Result<(), (&'static str, IndexMap<&'static str, Value>)>; +type CompilationCheckFunctionResult = + Result<(), (&'static str, IndexMap<&'static str, serde_json::Value>)>; + +#[cfg(test)] +mod tests { + use super::*; + use semver::Version; + + #[test] + fn test_parse_pragma_compound_constraint() { + let source = r#" + // SPDX-License-Identifier: MIT + pragma solidity >=0.8.0 <0.9.0; + + contract Test {} + "#; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse(">=0.8.0, <0.9.0").unwrap()); + assert!(req.matches(&Version::new(0, 8, 0))); + assert!(req.matches(&Version::new(0, 8, 99))); + assert!(!req.matches(&Version::new(0, 7, 99))); + assert!(!req.matches(&Version::new(0, 9, 0))); + } + + #[test] + fn test_parse_pragma_exact_version() { + let source = r#" + // SPDX-License-Identifier: MIT + pragma solidity 0.8.19; + + contract Test {} + "#; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("=0.8.19").unwrap()); + assert!(req.matches(&Version::new(0, 8, 19))); + assert!(!req.matches(&Version::new(0, 8, 20))); + } + + #[test] + fn test_parse_pragma_caret_version() { + let source = "pragma solidity ^0.8.0;"; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("^0.8.0").unwrap()); + assert!(req.matches(&Version::new(0, 8, 0))); + assert!(req.matches(&Version::new(0, 8, 33))); + assert!(!req.matches(&Version::new(0, 9, 0))); + assert!(!req.matches(&Version::new(0, 7, 0))); + } + + #[test] + fn test_parse_pragma_tilde_version() { + let source = "pragma solidity ~0.8.19;"; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("~0.8.19").unwrap()); + assert!(req.matches(&Version::new(0, 8, 19))); + assert!(req.matches(&Version::new(0, 8, 33))); + assert!(!req.matches(&Version::new(0, 8, 18))); + assert!(!req.matches(&Version::new(0, 9, 0))); + } + + #[test] + fn test_parse_pragma_upper_bound_version() { + let source = "pragma solidity <=0.4.21;"; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("<=0.4.21").unwrap()); + assert!(req.matches(&Version::new(0, 4, 21))); + assert!(req.matches(&Version::new(0, 4, 20))); + assert!(!req.matches(&Version::new(0, 8, 33))); + } + + #[test] + fn test_parse_pragma_missing() { + let source = r#" + // SPDX-License-Identifier: MIT + contract Test {} + "#; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source); + assert!(req.is_none()); + } +} diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index e470a24b..8d3cca44 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -501,7 +501,7 @@ impl ReportAggregator { return; } - let final_status = self + let status_per_mode = self .report .compilation_information .entry(specifier.metadata_file_path.clone().into()) @@ -515,7 +515,7 @@ impl ReportAggregator { let event = ReporterEvent::MetadataFileModeCombinationCompilationCompleted { metadata_file_path: specifier.metadata_file_path.clone().into(), - compilation_status: final_status, + compilation_status: status_per_mode, }; // According to the documentation on send, the sending fails if there are no more receiver From 921c1878dbe01e7c01d39569a25d689e77fc310d Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 23 Feb 2026 16:09:18 +0100 Subject: [PATCH 14/25] Minor docs and structure update. --- .../src/types/parsed_compilation_specifier.rs | 16 +- crates/core/src/compilations/driver.rs | 3 + crates/core/src/helpers/compile.rs | 183 +++++++++--------- crates/report/src/aggregator.rs | 2 + 4 files changed, 98 insertions(+), 106 deletions(-) diff --git a/crates/common/src/types/parsed_compilation_specifier.rs b/crates/common/src/types/parsed_compilation_specifier.rs index 50e8818f..e57c1301 100644 --- a/crates/common/src/types/parsed_compilation_specifier.rs +++ b/crates/common/src/types/parsed_compilation_specifier.rs @@ -1,8 +1,4 @@ -use std::{ - fmt::Display, - path::{Path, PathBuf}, - str::FromStr, -}; +use std::{fmt::Display, path::PathBuf, str::FromStr}; use anyhow::Context as _; use serde::{Deserialize, Serialize}; @@ -16,16 +12,6 @@ pub enum ParsedCompilationSpecifier { }, } -impl ParsedCompilationSpecifier { - pub fn metadata_path(&self) -> &Path { - match self { - ParsedCompilationSpecifier::FileOrDirectory { - metadata_or_directory_file_path: metadata_file_path, - } => metadata_file_path, - } - } -} - impl Display for ParsedCompilationSpecifier { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff --git a/crates/core/src/compilations/driver.rs b/crates/core/src/compilations/driver.rs index fdf35c1d..0d885d31 100644 --- a/crates/core/src/compilations/driver.rs +++ b/crates/core/src/compilations/driver.rs @@ -4,18 +4,21 @@ use tracing::error; use crate::helpers::{CachedCompiler, CompilationDefinition}; +/// The compilation driver. pub struct Driver<'a> { /// The definition of the compilation that the driver is instructed to execute. compilation_definition: &'a CompilationDefinition<'a>, } impl<'a> Driver<'a> { + /// Creates a new driver. pub fn new(compilation_definition: &'a CompilationDefinition<'a>) -> Self { Self { compilation_definition, } } + /// Compiles all contracts specified by the [`CompilationDefinition`]. pub async fn compile_all(&self, cached_compiler: &CachedCompiler<'a>) -> Result<()> { cached_compiler .compile_contracts( diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs index 1263f528..ce889ddb 100644 --- a/crates/core/src/helpers/compile.rs +++ b/crates/core/src/helpers/compile.rs @@ -13,97 +13,6 @@ use semver::VersionReq; use serde_json::{self, json}; use tracing::{debug, error, info}; -pub async fn create_compilation_definitions_stream<'a>( - context: &Context, - corpus: &'a Corpus, - mode: Mode, - reporter: Reporter, -) -> impl Stream> { - let cloned_reporter = reporter.clone(); - stream::iter( - corpus - .compilation_metadata_files_iterator() - .inspect(move |metadata_file| { - cloned_reporter - .report_metadata_file_discovery_event( - metadata_file.metadata_file_path.clone(), - metadata_file.content.clone(), - ) - .unwrap(); - }) - .map(move |metadata_file| { - let reporter = reporter.clone(); - - ( - metadata_file, - Cow::<'_, Mode>::Owned(mode.clone()), - reporter.compilation_specific_reporter(Arc::new(CompilationSpecifier { - solc_mode: mode.clone(), - metadata_file_path: metadata_file.metadata_file_path.clone(), - })), - ) - }) - .inspect(|(_, _, reporter)| { - reporter - .report_standalone_compilation_discovery_event() - .expect("Can't fail"); - }), - ) - // Creating the `CompilationDefinition` objects from all of the various objects we have. - .filter_map(move |(metadata_file, mode, reporter)| async move { - // NOTE: Currently always specifying the resolc compiler. - let compiler = Resolc::new(context.clone(), mode.version.clone().map(Into::into)) - .await - .map(|compiler| Box::new(compiler) as Box) - .inspect_err(|err| error!(?err, "Failed to instantiate the compiler")) - .ok()?; - - Some(CompilationDefinition { - metadata: metadata_file, - metadata_file_path: metadata_file.metadata_file_path.as_path(), - mode: mode.clone(), - // NOTE: Currently always specifying the resolc compiler. - compiler_identifier: CompilerIdentifier::Resolc, - compiler, - reporter, - }) - }) - // Filter out the compilations which are incompatible. - .filter_map(move |compilation| async move { - match compilation.check_compatibility() { - Ok(()) => Some(compilation), - Err((reason, additional_information)) => { - debug!( - metadata_file_path = %compilation.metadata.metadata_file_path.display(), - mode = %compilation.mode, - reason, - additional_information = - serde_json::to_string(&additional_information).unwrap(), - "Ignoring Compilation" - ); - compilation - .reporter - .report_standalone_contracts_compilation_ignored_event( - reason.to_string(), - additional_information - .into_iter() - .map(|(k, v)| (k.into(), v)) - .collect::>(), - ) - .expect("Can't fail"); - None - } - } - }) - .inspect(|compilation| { - info!( - metadata_file_path = %compilation.metadata_file_path.display(), - mode = %compilation.mode, - "Created a compilation definition" - ); - }) -} - /// This is a full description of a compilation to run alongside the full metadata file /// and the specific mode to compile with. pub struct CompilationDefinition<'a> { @@ -233,6 +142,98 @@ impl<'a> CompilationDefinition<'a> { type CompilationCheckFunctionResult = Result<(), (&'static str, IndexMap<&'static str, serde_json::Value>)>; +/// Creates a stream of [`CompilationDefinition`]s for the contracts to be compiled. +pub async fn create_compilation_definitions_stream<'a>( + context: &Context, + corpus: &'a Corpus, + mode: Mode, + reporter: Reporter, +) -> impl Stream> { + let cloned_reporter = reporter.clone(); + stream::iter( + corpus + .compilation_metadata_files_iterator() + .inspect(move |metadata_file| { + cloned_reporter + .report_metadata_file_discovery_event( + metadata_file.metadata_file_path.clone(), + metadata_file.content.clone(), + ) + .unwrap(); + }) + .map(move |metadata_file| { + let reporter = reporter.clone(); + + ( + metadata_file, + Cow::<'_, Mode>::Owned(mode.clone()), + reporter.compilation_specific_reporter(Arc::new(CompilationSpecifier { + solc_mode: mode.clone(), + metadata_file_path: metadata_file.metadata_file_path.clone(), + })), + ) + }) + .inspect(|(_, _, reporter)| { + reporter + .report_standalone_compilation_discovery_event() + .expect("Can't fail"); + }), + ) + // Creating the `CompilationDefinition` objects from all of the various objects we have. + .filter_map(move |(metadata_file, mode, reporter)| async move { + // NOTE: Currently always specifying the resolc compiler. + let compiler = Resolc::new(context.clone(), mode.version.clone().map(Into::into)) + .await + .map(|compiler| Box::new(compiler) as Box) + .inspect_err(|err| error!(?err, "Failed to instantiate the compiler")) + .ok()?; + + Some(CompilationDefinition { + metadata: metadata_file, + metadata_file_path: metadata_file.metadata_file_path.as_path(), + mode: mode.clone(), + // NOTE: Currently always specifying the resolc compiler. + compiler_identifier: CompilerIdentifier::Resolc, + compiler, + reporter, + }) + }) + // Filter out the compilations which are incompatible. + .filter_map(move |compilation| async move { + match compilation.check_compatibility() { + Ok(()) => Some(compilation), + Err((reason, additional_information)) => { + debug!( + metadata_file_path = %compilation.metadata.metadata_file_path.display(), + mode = %compilation.mode, + reason, + additional_information = + serde_json::to_string(&additional_information).unwrap(), + "Ignoring Compilation" + ); + compilation + .reporter + .report_standalone_contracts_compilation_ignored_event( + reason.to_string(), + additional_information + .into_iter() + .map(|(k, v)| (k.into(), v)) + .collect::>(), + ) + .expect("Can't fail"); + None + } + } + }) + .inspect(|compilation| { + info!( + metadata_file_path = %compilation.metadata_file_path.display(), + mode = %compilation.mode, + "Created a compilation definition" + ); + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 8d3cca44..d4a5ab8b 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -758,6 +758,7 @@ impl ReportAggregator { } } + /// Removes the case specified by the `specifier` from the tracked remaining cases. fn remove_remaining_case(&mut self, specifier: &TestSpecifier) { self.remaining_cases .entry(specifier.metadata_file_path.clone().into()) @@ -767,6 +768,7 @@ impl ReportAggregator { .remove(&specifier.case_idx); } + /// Removes the compilation mode specified by the `specifier` from the tracked remaining compilation modes. fn remove_remaining_compilation_mode(&mut self, specifier: &CompilationSpecifier) { self.remaining_compilation_modes .entry(specifier.metadata_file_path.clone().into()) From 28d1cbc7b2fdffb106cb2bd4a715dff61c7e99e0 Mon Sep 17 00:00:00 2001 From: elle-j Date: Tue, 24 Feb 2026 16:04:52 +0100 Subject: [PATCH 15/25] Move compilation reports onto the MetadataFileReport. --- crates/core/src/main.rs | 6 ++++-- crates/report/src/aggregator.rs | 20 +++++++++++--------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index bc572e5e..3aefd786 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -137,9 +137,11 @@ fn main() -> anyhow::Result<()> { .await?; let contains_failure = report - .compilation_information + .execution_information .values() - .flat_map(|reports_per_mode| reports_per_mode.values()) + .flat_map(|metadata_file_report| { + metadata_file_report.compilation_reports.values() + }) .any(|report| matches!(report.status, Some(CompilationStatus::Failure { .. }))); if contains_failure { diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index d4a5ab8b..9cea2673 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -503,9 +503,10 @@ impl ReportAggregator { let status_per_mode = self .report - .compilation_information + .execution_information .entry(specifier.metadata_file_path.clone().into()) .or_default() + .compilation_reports .iter() .flat_map(|(mode, report)| { let status = report.status.clone().expect("Can't be uninitialized"); @@ -700,9 +701,10 @@ impl ReportAggregator { fn compilation_report(&mut self, specifier: &CompilationSpecifier) -> &mut CompilationReport { self.report - .compilation_information + .execution_information .entry(specifier.metadata_file_path.clone().into()) .or_default() + .compilation_reports .entry(specifier.solc_mode.clone()) .or_default() } @@ -777,7 +779,6 @@ impl ReportAggregator { } } -#[serde_as] #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Report { /// The context that the tool was started up with. @@ -787,13 +788,9 @@ pub struct Report { /// Metrics from the execution. #[serde(default, skip_serializing_if = "Option::is_none")] pub metrics: Option, - /// Information relating to each test case. + /// Information relating to each metadata file after executing the tool. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub execution_information: BTreeMap, - /// Information relating to each compilation if in standalone compilation mode. - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - #[serde_as(as = "BTreeMap<_, BTreeMap>")] - pub compilation_information: BTreeMap>, } impl Report { @@ -803,18 +800,23 @@ impl Report { metrics: Default::default(), metadata_files: Default::default(), execution_information: Default::default(), - compilation_information: Default::default(), } } } +#[serde_as] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct MetadataFileReport { /// Metrics from the execution. #[serde(default, skip_serializing_if = "Option::is_none")] pub metrics: Option, /// The report of each case keyed by the case idx. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub case_reports: BTreeMap, + /// The [`CompilationReport`] for each of the [`Mode`]s. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + #[serde_as(as = "BTreeMap")] + pub compilation_reports: BTreeMap, } #[serde_as] From fe0cc4b17e6f6ebfb82fe2b25d08524fe005d648 Mon Sep 17 00:00:00 2001 From: elle-j Date: Tue, 24 Feb 2026 16:07:20 +0100 Subject: [PATCH 16/25] Remove build_label from context for now. --- crates/config/src/lib.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 5a661686..41ac04c1 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -557,10 +557,6 @@ pub struct ExportGenesisContext { #[derive(Clone, Debug, Parser, Serialize, Deserialize)] pub struct StandaloneCompilationContext { - /// The label for the resolc build used (e.g., linux, macos, windows, wasm). - #[arg(long)] - pub build_label: String, - /// The working directory that the program will use for all of the temporary artifacts needed at /// runtime. /// From aa8f68a794b7f1a12643f7119a2799ffdb72a218 Mon Sep 17 00:00:00 2001 From: elle-j Date: Wed, 25 Feb 2026 08:59:07 +0100 Subject: [PATCH 17/25] Share pre-link comp events between execution and standalone contexts. --- crates/core/src/helpers/cached_compiler.rs | 184 ++++++++++----------- crates/core/src/helpers/compile.rs | 14 +- crates/report/src/aggregator.rs | 167 ++++++------------- crates/report/src/common.rs | 11 +- crates/report/src/runner_event.rs | 129 +++++++++------ 5 files changed, 228 insertions(+), 277 deletions(-) diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index 1dbf2aec..02708cc7 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -145,46 +145,41 @@ impl<'a> CachedCompiler<'a> { match self.artifacts_cache.get(&cache_key).await { Some(cache_value) => { - if deployed_libraries.is_some() { - match reporter { - CompilationReporter::Execution(reporter) => reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ), - CompilationReporter::Standalone(reporter) => reporter - .report_standalone_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ), - } - .expect("Can't happen"); - } else { - match reporter { - CompilationReporter::Execution(reporter) => reporter + match reporter { + CompilationReporter::Standalone(reporter) => { + reporter .report_pre_link_contracts_compilation_succeeded_event( compiler.version().clone(), compiler.path(), true, None, cache_value.compiler_output.clone(), - ), - CompilationReporter::Standalone(reporter) => reporter - .report_standalone_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ), + ) + .expect("Can't happen"); + } + CompilationReporter::Execution(reporter) => { + if deployed_libraries.is_some() { + reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ) + .expect("Can't happen"); + } else { + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ) + .expect("Can't happen"); + } } - .expect("Can't happen"); } cache_value.compiler_output } @@ -261,86 +256,75 @@ async fn compile_contracts( let input = compilation.input().clone(); let output = compilation.try_build(compiler).await; - match (output.as_ref(), deployed_libraries.is_some()) { - (Ok(output), true) => { - match reporter { - CompilationReporter::Execution(reporter) => reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ), - CompilationReporter::Standalone(reporter) => reporter - .report_standalone_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ), - } - .expect("Can't happen"); - } - (Ok(output), false) => { - match reporter { - CompilationReporter::Execution(reporter) => reporter + match reporter { + CompilationReporter::Standalone(reporter) => match output.as_ref() { + Ok(output) => { + reporter .report_pre_link_contracts_compilation_succeeded_event( compiler.version().clone(), compiler.path(), false, input, output.clone(), - ), - CompilationReporter::Standalone(reporter) => reporter - .report_standalone_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ), + ) + .expect("Can't happen"); } - .expect("Can't happen"); - } - (Err(err), true) => { - match reporter { - CompilationReporter::Execution(reporter) => reporter - .report_post_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ), - CompilationReporter::Standalone(reporter) => reporter - .report_standalone_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ), - } - .expect("Can't happen"); - } - (Err(err), false) => { - match reporter { - CompilationReporter::Execution(reporter) => reporter + Err(err) => { + reporter .report_pre_link_contracts_compilation_failed_event( compiler.version().clone(), compiler.path().to_path_buf(), input, format!("{err:#}"), - ), - CompilationReporter::Standalone(reporter) => reporter - .report_standalone_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ), + ) + .expect("Can't happen"); + } + }, + CompilationReporter::Execution(reporter) => { + match (output.as_ref(), deployed_libraries.is_some()) { + (Ok(output), true) => { + reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + (Ok(output), false) => { + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + (Err(err), true) => { + reporter + .report_post_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); + } + (Err(err), false) => { + reporter + .report_pre_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); + } } - .expect("Can't happen"); } } diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs index ce889ddb..0c3bfacb 100644 --- a/crates/core/src/helpers/compile.rs +++ b/crates/core/src/helpers/compile.rs @@ -8,7 +8,9 @@ use revive_dt_common::{cached_fs::read_to_string, types::CompilerIdentifier}; use revive_dt_compiler::{Mode, SolidityCompiler, revive_resolc::Resolc}; use revive_dt_config::Context; use revive_dt_format::{corpus::Corpus, metadata::MetadataFile}; -use revive_dt_report::{CompilationSpecifier, Reporter, StandaloneCompilationSpecificReporter}; +use revive_dt_report::{ + Reporter, StandaloneCompilationSpecificReporter, StandaloneCompilationSpecifier, +}; use semver::VersionReq; use serde_json::{self, json}; use tracing::{debug, error, info}; @@ -167,10 +169,12 @@ pub async fn create_compilation_definitions_stream<'a>( ( metadata_file, Cow::<'_, Mode>::Owned(mode.clone()), - reporter.compilation_specific_reporter(Arc::new(CompilationSpecifier { - solc_mode: mode.clone(), - metadata_file_path: metadata_file.metadata_file_path.clone(), - })), + reporter.compilation_specific_reporter(Arc::new( + StandaloneCompilationSpecifier { + solc_mode: mode.clone(), + metadata_file_path: metadata_file.metadata_file_path.clone(), + }, + )), ) }) .inspect(|(_, _, reporter)| { diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 9cea2673..6d9c9f2d 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -117,12 +117,6 @@ impl ReportAggregator { RunnerEvent::PostLinkContractsCompilationFailed(event) => { self.handle_post_link_contracts_compilation_failed_event(*event) } - RunnerEvent::StandaloneContractsCompilationSucceeded(event) => { - self.handle_standalone_contracts_compilation_succeeded_event(*event) - } - RunnerEvent::StandaloneContractsCompilationFailed(event) => { - self.handle_standalone_contracts_compilation_failed_event(*event) - } RunnerEvent::StandaloneContractsCompilationIgnored(event) => { self.handle_standalone_contracts_compilation_ignored_event(*event); } @@ -310,150 +304,91 @@ impl ReportAggregator { &mut self, event: PreLinkContractsCompilationSucceededEvent, ) { - let include_input = self - .report - .context - .report_configuration() - .include_compiler_input; - let include_output = self - .report - .context - .report_configuration() - .include_compiler_output; - - let execution_information = self.execution_information(&event.execution_specifier); - - let compiler_input = if include_input { + let report_configuration = self.report.context.report_configuration(); + let compiler_input = if report_configuration.include_compiler_input { event.compiler_input } else { None }; - execution_information.pre_link_compilation_status = Some(CompilationStatus::Success { + let status = CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input, compiled_contracts_info: Self::generate_compiled_contracts_info( event.compiler_output, - include_output, + report_configuration.include_compiler_output, ), - }); + }; + + match &event.specifier { + CompilationSpecifier::Execution(specifier) => { + let execution_information = self.execution_information(specifier); + execution_information.pre_link_compilation_status = Some(status); + } + CompilationSpecifier::Standalone(specifier) => { + let report = self.compilation_report(specifier); + report.status = Some(status); + self.handle_post_standalone_contracts_compilation_status_update(specifier); + } + } } fn handle_post_link_contracts_compilation_succeeded_event( &mut self, event: PostLinkContractsCompilationSucceededEvent, ) { - let include_input = self - .report - .context - .report_configuration() - .include_compiler_input; - let include_output = self - .report - .context - .report_configuration() - .include_compiler_output; - - let execution_information = self.execution_information(&event.execution_specifier); - - let compiler_input = if include_input { + let report_configuration = self.report.context.report_configuration(); + let compiler_input = if report_configuration.include_compiler_input { event.compiler_input } else { None }; - execution_information.post_link_compilation_status = Some(CompilationStatus::Success { + let status = CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input, compiled_contracts_info: Self::generate_compiled_contracts_info( event.compiler_output, - include_output, + report_configuration.include_compiler_output, ), - }); - } + }; - fn handle_pre_link_contracts_compilation_failed_event( - &mut self, - event: PreLinkContractsCompilationFailedEvent, - ) { let execution_information = self.execution_information(&event.execution_specifier); - - execution_information.pre_link_compilation_status = Some(CompilationStatus::Failure { - reason: event.reason, - compiler_version: event.compiler_version, - compiler_path: event.compiler_path, - compiler_input: event.compiler_input, - }); + execution_information.post_link_compilation_status = Some(status); } - fn handle_post_link_contracts_compilation_failed_event( + fn handle_pre_link_contracts_compilation_failed_event( &mut self, - event: PostLinkContractsCompilationFailedEvent, + event: PreLinkContractsCompilationFailedEvent, ) { - let execution_information = self.execution_information(&event.execution_specifier); - - execution_information.post_link_compilation_status = Some(CompilationStatus::Failure { + let status = CompilationStatus::Failure { reason: event.reason, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input: event.compiler_input, - }); - } - - fn handle_standalone_contracts_compilation_succeeded_event( - &mut self, - event: StandaloneContractsCompilationSucceededEvent, - ) { - // Remove this from the set we're tracking since it has completed. - self.remove_remaining_compilation_mode(&event.compilation_specifier); - - let include_input = self - .report - .context - .report_configuration() - .include_compiler_input; - let include_output = self - .report - .context - .report_configuration() - .include_compiler_output; - - let compiler_input = if include_input { - event.compiler_input - } else { - None - }; - - let status = CompilationStatus::Success { - is_cached: event.is_cached, - compiler_version: event.compiler_version, - compiler_path: event.compiler_path, - compiler_input, - compiled_contracts_info: Self::generate_compiled_contracts_info( - event.compiler_output, - include_output, - ), }; - let report = self.compilation_report(&event.compilation_specifier); - report.status = Some(status.clone()); - - self.handle_post_standalone_contracts_compilation_status_update( - &event.compilation_specifier, - ); + match &event.specifier { + CompilationSpecifier::Execution(specifier) => { + let execution_information = self.execution_information(specifier); + execution_information.pre_link_compilation_status = Some(status); + } + CompilationSpecifier::Standalone(specifier) => { + let report = self.compilation_report(specifier); + report.status = Some(status); + self.handle_post_standalone_contracts_compilation_status_update(specifier); + } + } } - fn handle_standalone_contracts_compilation_failed_event( + fn handle_post_link_contracts_compilation_failed_event( &mut self, - event: StandaloneContractsCompilationFailedEvent, + event: PostLinkContractsCompilationFailedEvent, ) { - // Remove this from the set we're tracking since it has completed. - self.remove_remaining_compilation_mode(&event.compilation_specifier); - let status = CompilationStatus::Failure { reason: event.reason, compiler_version: event.compiler_version, @@ -461,21 +396,14 @@ impl ReportAggregator { compiler_input: event.compiler_input, }; - let report = self.compilation_report(&event.compilation_specifier); - report.status = Some(status.clone()); - - self.handle_post_standalone_contracts_compilation_status_update( - &event.compilation_specifier, - ); + let execution_information = self.execution_information(&event.execution_specifier); + execution_information.post_link_compilation_status = Some(status); } fn handle_standalone_contracts_compilation_ignored_event( &mut self, event: StandaloneContractsCompilationIgnoredEvent, ) { - // Remove this from the set we're tracking since it has completed. - self.remove_remaining_compilation_mode(&event.compilation_specifier); - let status = CompilationStatus::Ignored { reason: event.reason, additional_fields: event.additional_fields, @@ -483,7 +411,6 @@ impl ReportAggregator { let report = self.compilation_report(&event.compilation_specifier); report.status = Some(status.clone()); - self.handle_post_standalone_contracts_compilation_status_update( &event.compilation_specifier, ); @@ -491,8 +418,11 @@ impl ReportAggregator { fn handle_post_standalone_contracts_compilation_status_update( &mut self, - specifier: &CompilationSpecifier, + specifier: &StandaloneCompilationSpecifier, ) { + // Remove this from the set we're tracking since it has completed. + self.remove_remaining_compilation_mode(specifier); + let remaining_modes = self .remaining_compilation_modes .entry(specifier.metadata_file_path.clone().into()) @@ -699,7 +629,10 @@ impl ReportAggregator { .get_or_insert_default() } - fn compilation_report(&mut self, specifier: &CompilationSpecifier) -> &mut CompilationReport { + fn compilation_report( + &mut self, + specifier: &StandaloneCompilationSpecifier, + ) -> &mut CompilationReport { self.report .execution_information .entry(specifier.metadata_file_path.clone().into()) @@ -771,7 +704,7 @@ impl ReportAggregator { } /// Removes the compilation mode specified by the `specifier` from the tracked remaining compilation modes. - fn remove_remaining_compilation_mode(&mut self, specifier: &CompilationSpecifier) { + fn remove_remaining_compilation_mode(&mut self, specifier: &StandaloneCompilationSpecifier) { self.remaining_compilation_modes .entry(specifier.metadata_file_path.clone().into()) .or_default() diff --git a/crates/report/src/common.rs b/crates/report/src/common.rs index 47139ef2..4cf2a042 100644 --- a/crates/report/src/common.rs +++ b/crates/report/src/common.rs @@ -38,7 +38,16 @@ pub struct StepExecutionSpecifier { /// An absolute specifier for a compilation. #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct CompilationSpecifier { +pub struct StandaloneCompilationSpecifier { pub solc_mode: Mode, pub metadata_file_path: PathBuf, } + +/// An absolute specifier for compilation events depending on the context. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum CompilationSpecifier { + /// Compilation happening as part of test execution. + Execution(Arc), + /// Standalone compilation happening without test execution. + Standalone(Arc), +} diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs index 049ebff8..cd4e67e8 100644 --- a/crates/report/src/runner_event.rs +++ b/crates/report/src/runner_event.rs @@ -17,17 +17,29 @@ use tokio::sync::{broadcast, oneshot}; use crate::MinedBlockInformation; use crate::TransactionInformation; use crate::{ - CompilationSpecifier, ExecutionSpecifier, ReporterEvent, TestSpecifier, - common::MetadataFilePath, + CompilationSpecifier, ExecutionSpecifier, ReporterEvent, StandaloneCompilationSpecifier, + TestSpecifier, common::MetadataFilePath, }; +/// Conditionally wraps a value, or returns it as is. +macro_rules! __maybe_wrap { + ($value:expr, $wrapper:path) => { + $wrapper($value) + }; + ($value:expr) => { + $value + }; +} + /// Generates a report method that emits an event, auto-filling the specifier from self. +/// Optionally wraps the specifier in if a wrapper path is provided. macro_rules! __report_gen_emit_with_specifier { ( $ident:ident, $variant_ident:ident, $specifier_field_on_self:ident, - $specifier_field_on_event:ident; + $specifier_field_on_event:ident + $(, $specifier_wrapper:path)?; $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* @@ -39,7 +51,10 @@ macro_rules! __report_gen_emit_with_specifier { $(, $aname: impl Into<$aty> )* ) -> anyhow::Result<()> { self.report([< $variant_ident Event >] { - $specifier_field_on_event: self.$specifier_field_on_self.clone() + $specifier_field_on_event: __maybe_wrap!( + self.$specifier_field_on_self.clone() + $(, $specifier_wrapper)? + ) $(, $bname: $bname.into() )* $(, $aname: $aname.into() )* }) @@ -96,6 +111,27 @@ macro_rules! __report_gen_scan_for_specifier { ); }; + // MATCH: execution_specifier (on self) -> specifier (on event). + ( + $ident:ident, + $variant_ident:ident, + execution_specifier; + $( $before:ident : $bty:ty, )* + ; + specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + execution_specifier, + specifier, + $crate::CompilationSpecifier::Execution; + $( $before : $bty, )* ; $( $after : $aty, )* + ); + }; + // MATCH: step_specifier (on self) -> step_specifier (on event). ( $ident:ident, @@ -136,6 +172,27 @@ macro_rules! __report_gen_scan_for_specifier { ); }; + // MATCH: compilation_specifier (on self) -> specifier (on event). + ( + $ident:ident, + $variant_ident:ident, + compilation_specifier; + $( $before:ident : $bty:ty, )* + ; + specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + compilation_specifier, + specifier, + $crate::CompilationSpecifier::Standalone; + $( $before : $bty, )* ; $( $after : $aty, )* + ); + }; + // RECURSIVE: Field doesn't match, continue scanning. ( $ident:ident, @@ -296,7 +353,7 @@ macro_rules! define_event { pub fn compilation_specific_reporter( &self, - compilation_specifier: impl Into> + compilation_specifier: impl Into> ) -> [< $ident StandaloneCompilationSpecificReporter >] { [< $ident StandaloneCompilationSpecificReporter >] { reporter: self.clone(), @@ -403,7 +460,7 @@ macro_rules! define_event { #[derive(Clone, Debug)] pub struct [< $ident StandaloneCompilationSpecificReporter >] { $vis reporter: [< $ident Reporter >], - $vis compilation_specifier: std::sync::Arc, + $vis compilation_specifier: std::sync::Arc, } impl [< $ident StandaloneCompilationSpecificReporter >] { @@ -448,7 +505,7 @@ define_event! { /// An event emitted by the runners when they discover a standalone compilation. StandaloneCompilationDiscovery { /// A specifier for the compilation that was discovered. - compilation_specifier: Arc, + compilation_specifier: Arc, }, /// An event emitted by the runners when a test case is ignored. TestIgnored { @@ -487,14 +544,13 @@ define_event! { /// An event emitted by the runners when the compilation of the contracts has succeeded /// on the pre-link contracts. PreLinkContractsCompilationSucceeded { - /// A specifier for the execution that's taking place. - execution_specifier: Arc, + /// A specifier for the compilation taking place. + specifier: CompilationSpecifier, /// The version of the compiler used to compile the contracts. compiler_version: Version, /// The path of the compiler used to compile the contracts. compiler_path: PathBuf, - /// A flag of whether the contract bytecode and ABI were cached or if they were compiled - /// anew. + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled anew. is_cached: bool, /// The input provided to the compiler - this is optional and not provided if the /// contracts were obtained from the cache. @@ -505,14 +561,13 @@ define_event! { /// An event emitted by the runners when the compilation of the contracts has succeeded /// on the post-link contracts. PostLinkContractsCompilationSucceeded { - /// A specifier for the execution that's taking place. + /// A specifier for the compilation taking place in an execution context. execution_specifier: Arc, /// The version of the compiler used to compile the contracts. compiler_version: Version, /// The path of the compiler used to compile the contracts. compiler_path: PathBuf, - /// A flag of whether the contract bytecode and ABI were cached or if they were compiled - /// anew. + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled anew. is_cached: bool, /// The input provided to the compiler - this is optional and not provided if the /// contracts were obtained from the cache. @@ -520,11 +575,10 @@ define_event! { /// The output of the compiler. compiler_output: CompilerOutput }, - /// An event emitted by the runners when the compilation of the pre-link contract has - /// failed. + /// An event emitted by the runners when the compilation of the pre-link contract has failed. PreLinkContractsCompilationFailed { - /// A specifier for the execution that's taking place. - execution_specifier: Arc, + /// A specifier for the compilation taking place. + specifier: CompilationSpecifier, /// The version of the compiler used to compile the contracts. compiler_version: Option, /// The path of the compiler used to compile the contracts. @@ -535,10 +589,9 @@ define_event! { /// The failure reason. reason: String, }, - /// An event emitted by the runners when the compilation of the post-link contract has - /// failed. + /// An event emitted by the runners when the compilation of the post-link contract has failed. PostLinkContractsCompilationFailed { - /// A specifier for the execution that's taking place. + /// A specifier for the compilation taking place in an execution context. execution_specifier: Arc, /// The version of the compiler used to compile the contracts. compiler_version: Option, @@ -550,42 +603,10 @@ define_event! { /// The failure reason. reason: String, }, - /// An event emitted by the runners when the compilation of the contracts has succeeded. - /// Unlike [`PreLinkContractsCompilationSucceeded`], this should be used in standalone compilation mode. - StandaloneContractsCompilationSucceeded { - /// A specifier for the compilation that's taking place. - compilation_specifier: Arc, - /// The version of the compiler used to compile the contracts. - compiler_version: Version, - /// The path of the compiler used to compile the contracts. - compiler_path: PathBuf, - /// A flag of whether the contract bytecode and ABI were cached or if they were compiled anew. - is_cached: bool, - /// The input provided to the compiler - this is optional and not provided if the - /// contracts were obtained from the cache. - compiler_input: Option, - /// The output of the compiler. - compiler_output: CompilerOutput - }, - /// An event emitted by the runners when the compilation of the contracts has failed. - /// Unlike [`PreLinkContractsCompilationFailed`], this should be used in standalone compilation mode. - StandaloneContractsCompilationFailed { - /// A specifier for the compilation that's taking place. - compilation_specifier: Arc, - /// The version of the compiler used to compile the contracts. - compiler_version: Option, - /// The path of the compiler used to compile the contracts. - compiler_path: Option, - /// The input provided to the compiler - this is optional and not provided if the - /// contracts were obtained from the cache. - compiler_input: Option, - /// The failure reason. - reason: String, - }, /// An event emitted by the runners when a compilation is ignored. StandaloneContractsCompilationIgnored { /// A specifier for the compilation that has been ignored. - compilation_specifier: Arc, + compilation_specifier: Arc, /// A reason for the compilation to be ignored. reason: String, /// Additional fields that describe more information on why the compilation was ignored. From 3edf5f9ada2f14c2e295ee6efb6432a5559fbaad Mon Sep 17 00:00:00 2001 From: elle-j Date: Wed, 25 Feb 2026 12:49:24 +0100 Subject: [PATCH 18/25] Remove unnecessary ignore check for compilation. --- crates/core/src/helpers/compile.rs | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs index 0c3bfacb..845cc3d8 100644 --- a/crates/core/src/helpers/compile.rs +++ b/crates/core/src/helpers/compile.rs @@ -29,21 +29,11 @@ pub struct CompilationDefinition<'a> { impl<'a> CompilationDefinition<'a> { /// Checks if this compilation can be run with the current configuration. pub fn check_compatibility(&self) -> CompilationCheckFunctionResult { - self.check_metadata_file_ignored()?; self.check_compiler_compatibility()?; self.check_pragma_solidity_compatibility()?; Ok(()) } - /// Checks if the metadata file is ignored or not. - fn check_metadata_file_ignored(&self) -> CompilationCheckFunctionResult { - if self.metadata.ignore.is_some_and(|ignore| ignore) { - Err(("Metadata file is ignored.", indexmap! {})) - } else { - Ok(()) - } - } - /// Checks if the compiler supports the provided mode. fn check_compiler_compatibility(&self) -> CompilationCheckFunctionResult { let mut error_map = indexmap! {}; From 8706916023623b4f794e7fba3d136101134896c7 Mon Sep 17 00:00:00 2001 From: elle-j Date: Wed, 25 Feb 2026 14:54:47 +0100 Subject: [PATCH 19/25] Update terminology from "standalone" to "pre-link". --- crates/config/src/lib.rs | 20 ++++---- crates/core/src/compilations/driver.rs | 2 +- crates/core/src/compilations/entry_point.rs | 6 +-- crates/core/src/compilations/mod.rs | 2 +- crates/core/src/helpers/cached_compiler.rs | 4 +- crates/core/src/helpers/compile.rs | 14 +++--- crates/report/src/aggregator.rs | 53 ++++++++++----------- crates/report/src/common.rs | 8 ++-- crates/report/src/reporter_event.rs | 2 +- crates/report/src/runner_event.rs | 34 ++++++------- 10 files changed, 69 insertions(+), 76 deletions(-) diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 41ac04c1..de7d5772 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -42,7 +42,7 @@ pub enum Context { ExportGenesis(Box), /// Compiles contracts using the provided compiler build, without executing any tests. - Compile(Box), + Compile(Box), } impl Context { @@ -556,7 +556,7 @@ pub struct ExportGenesisContext { } #[derive(Clone, Debug, Parser, Serialize, Deserialize)] -pub struct StandaloneCompilationContext { +pub struct PreLinkCompilationContext { /// The working directory that the program will use for all of the temporary artifacts needed at /// runtime. /// @@ -838,49 +838,49 @@ impl AsRef for ExportGenesisContext { } } -impl Default for StandaloneCompilationContext { +impl Default for PreLinkCompilationContext { fn default() -> Self { Self::parse_from(["compilation-context", "--compile", "."]) } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &WorkingDirectoryConfiguration { &self.working_directory } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &CorpusCompilationConfiguration { &self.corpus_configuration } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &SolcConfiguration { &self.solc_configuration } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &ResolcConfiguration { &self.resolc_configuration } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &ConcurrencyConfiguration { &self.concurrency_configuration } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &CompilationConfiguration { &self.compilation_configuration } } -impl AsRef for StandaloneCompilationContext { +impl AsRef for PreLinkCompilationContext { fn as_ref(&self) -> &ReportConfiguration { &self.report_configuration } diff --git a/crates/core/src/compilations/driver.rs b/crates/core/src/compilations/driver.rs index 0d885d31..dceb2544 100644 --- a/crates/core/src/compilations/driver.rs +++ b/crates/core/src/compilations/driver.rs @@ -29,7 +29,7 @@ impl<'a> Driver<'a> { self.compilation_definition.compiler.as_ref(), self.compilation_definition.compiler_identifier, None, - &CompilationReporter::Standalone(&self.compilation_definition.reporter), + &CompilationReporter::PreLink(&self.compilation_definition.reporter), ) .await .inspect_err(|err| error!(?err, "Compilation failed")) diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index 457fb200..848a152a 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -1,4 +1,4 @@ -//! The main entry point into compiling in standalone mode without any test execution. +//! The main entry point into compiling in pre-link-only mode without any test execution. use std::{ collections::BTreeSet, @@ -15,7 +15,7 @@ use revive_dt_format::corpus::Corpus; use tokio::sync::{RwLock, Semaphore, broadcast}; use tracing::{Instrument, error, info, info_span, instrument}; -use revive_dt_config::{Context, OutputFormat, StandaloneCompilationContext}; +use revive_dt_config::{Context, OutputFormat, PreLinkCompilationContext}; use revive_dt_report::{CompilationStatus, Reporter, ReporterEvent}; use crate::{ @@ -26,7 +26,7 @@ use crate::{ /// Handles the compilations according to the information defined in the context. #[instrument(level = "info", err(Debug), skip_all)] pub async fn handle_compilations( - context: StandaloneCompilationContext, + context: PreLinkCompilationContext, reporter: Reporter, ) -> anyhow::Result<()> { let reporter_clone = reporter.clone(); diff --git a/crates/core/src/compilations/mod.rs b/crates/core/src/compilations/mod.rs index 739b760c..4741118b 100644 --- a/crates/core/src/compilations/mod.rs +++ b/crates/core/src/compilations/mod.rs @@ -1,6 +1,6 @@ //! This module contains all of the code responsible for performing compilations, //! including the driver implementation and the core logic that allows for contracts -//! to be compiled in standalone mode without any test execution. +//! to be compiled in pre-link-only mode without any test execution. mod driver; mod entry_point; diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index 02708cc7..63bf2abf 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -146,7 +146,7 @@ impl<'a> CachedCompiler<'a> { match self.artifacts_cache.get(&cache_key).await { Some(cache_value) => { match reporter { - CompilationReporter::Standalone(reporter) => { + CompilationReporter::PreLink(reporter) => { reporter .report_pre_link_contracts_compilation_succeeded_event( compiler.version().clone(), @@ -257,7 +257,7 @@ async fn compile_contracts( let output = compilation.try_build(compiler).await; match reporter { - CompilationReporter::Standalone(reporter) => match output.as_ref() { + CompilationReporter::PreLink(reporter) => match output.as_ref() { Ok(output) => { reporter .report_pre_link_contracts_compilation_succeeded_event( diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs index 845cc3d8..795638b3 100644 --- a/crates/core/src/helpers/compile.rs +++ b/crates/core/src/helpers/compile.rs @@ -8,9 +8,7 @@ use revive_dt_common::{cached_fs::read_to_string, types::CompilerIdentifier}; use revive_dt_compiler::{Mode, SolidityCompiler, revive_resolc::Resolc}; use revive_dt_config::Context; use revive_dt_format::{corpus::Corpus, metadata::MetadataFile}; -use revive_dt_report::{ - Reporter, StandaloneCompilationSpecificReporter, StandaloneCompilationSpecifier, -}; +use revive_dt_report::{PreLinkCompilationSpecificReporter, PreLinkCompilationSpecifier, Reporter}; use semver::VersionReq; use serde_json::{self, json}; use tracing::{debug, error, info}; @@ -23,7 +21,7 @@ pub struct CompilationDefinition<'a> { pub mode: Cow<'a, Mode>, pub compiler_identifier: CompilerIdentifier, pub compiler: Box, - pub reporter: StandaloneCompilationSpecificReporter, + pub reporter: PreLinkCompilationSpecificReporter, } impl<'a> CompilationDefinition<'a> { @@ -159,8 +157,8 @@ pub async fn create_compilation_definitions_stream<'a>( ( metadata_file, Cow::<'_, Mode>::Owned(mode.clone()), - reporter.compilation_specific_reporter(Arc::new( - StandaloneCompilationSpecifier { + reporter.pre_link_compilation_specific_reporter(Arc::new( + PreLinkCompilationSpecifier { solc_mode: mode.clone(), metadata_file_path: metadata_file.metadata_file_path.clone(), }, @@ -169,7 +167,7 @@ pub async fn create_compilation_definitions_stream<'a>( }) .inspect(|(_, _, reporter)| { reporter - .report_standalone_compilation_discovery_event() + .report_pre_link_compilation_discovery_event() .expect("Can't fail"); }), ) @@ -207,7 +205,7 @@ pub async fn create_compilation_definitions_stream<'a>( ); compilation .reporter - .report_standalone_contracts_compilation_ignored_event( + .report_pre_link_contracts_compilation_ignored_event( reason.to_string(), additional_information .into_iter() diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 6d9c9f2d..9393636e 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -90,8 +90,8 @@ impl ReportAggregator { RunnerEvent::TestCaseDiscovery(event) => { self.handle_test_case_discovery(*event); } - RunnerEvent::StandaloneCompilationDiscovery(event) => { - self.handle_standalone_compilation_discovery(*event); + RunnerEvent::PreLinkCompilationDiscovery(event) => { + self.handle_pre_link_compilation_discovery(*event); } RunnerEvent::TestSucceeded(event) => { self.handle_test_succeeded_event(*event); @@ -117,8 +117,8 @@ impl ReportAggregator { RunnerEvent::PostLinkContractsCompilationFailed(event) => { self.handle_post_link_contracts_compilation_failed_event(*event) } - RunnerEvent::StandaloneContractsCompilationIgnored(event) => { - self.handle_standalone_contracts_compilation_ignored_event(*event); + RunnerEvent::PreLinkContractsCompilationIgnored(event) => { + self.handle_pre_link_contracts_compilation_ignored_event(*event); } RunnerEvent::LibrariesDeployed(event) => { self.handle_libraries_deployed_event(*event); @@ -194,10 +194,7 @@ impl ReportAggregator { .insert(event.test_specifier.case_idx); } - fn handle_standalone_compilation_discovery( - &mut self, - event: StandaloneCompilationDiscoveryEvent, - ) { + fn handle_pre_link_compilation_discovery(&mut self, event: PreLinkCompilationDiscoveryEvent) { self.remaining_compilation_modes .entry( event @@ -327,10 +324,10 @@ impl ReportAggregator { let execution_information = self.execution_information(specifier); execution_information.pre_link_compilation_status = Some(status); } - CompilationSpecifier::Standalone(specifier) => { - let report = self.compilation_report(specifier); + CompilationSpecifier::PreLink(specifier) => { + let report = self.pre_link_compilation_report(specifier); report.status = Some(status); - self.handle_post_standalone_contracts_compilation_status_update(specifier); + self.handle_post_pre_link_contracts_compilation_status_update(specifier); } } } @@ -377,10 +374,10 @@ impl ReportAggregator { let execution_information = self.execution_information(specifier); execution_information.pre_link_compilation_status = Some(status); } - CompilationSpecifier::Standalone(specifier) => { - let report = self.compilation_report(specifier); + CompilationSpecifier::PreLink(specifier) => { + let report = self.pre_link_compilation_report(specifier); report.status = Some(status); - self.handle_post_standalone_contracts_compilation_status_update(specifier); + self.handle_post_pre_link_contracts_compilation_status_update(specifier); } } } @@ -400,25 +397,23 @@ impl ReportAggregator { execution_information.post_link_compilation_status = Some(status); } - fn handle_standalone_contracts_compilation_ignored_event( + fn handle_pre_link_contracts_compilation_ignored_event( &mut self, - event: StandaloneContractsCompilationIgnoredEvent, + event: PreLinkContractsCompilationIgnoredEvent, ) { let status = CompilationStatus::Ignored { reason: event.reason, additional_fields: event.additional_fields, }; - let report = self.compilation_report(&event.compilation_specifier); + let report = self.pre_link_compilation_report(&event.compilation_specifier); report.status = Some(status.clone()); - self.handle_post_standalone_contracts_compilation_status_update( - &event.compilation_specifier, - ); + self.handle_post_pre_link_contracts_compilation_status_update(&event.compilation_specifier); } - fn handle_post_standalone_contracts_compilation_status_update( + fn handle_post_pre_link_contracts_compilation_status_update( &mut self, - specifier: &StandaloneCompilationSpecifier, + specifier: &PreLinkCompilationSpecifier, ) { // Remove this from the set we're tracking since it has completed. self.remove_remaining_compilation_mode(specifier); @@ -629,10 +624,10 @@ impl ReportAggregator { .get_or_insert_default() } - fn compilation_report( + fn pre_link_compilation_report( &mut self, - specifier: &StandaloneCompilationSpecifier, - ) -> &mut CompilationReport { + specifier: &PreLinkCompilationSpecifier, + ) -> &mut PreLinkCompilationReport { self.report .execution_information .entry(specifier.metadata_file_path.clone().into()) @@ -704,7 +699,7 @@ impl ReportAggregator { } /// Removes the compilation mode specified by the `specifier` from the tracked remaining compilation modes. - fn remove_remaining_compilation_mode(&mut self, specifier: &StandaloneCompilationSpecifier) { + fn remove_remaining_compilation_mode(&mut self, specifier: &PreLinkCompilationSpecifier) { self.remaining_compilation_modes .entry(specifier.metadata_file_path.clone().into()) .or_default() @@ -749,7 +744,7 @@ pub struct MetadataFileReport { /// The [`CompilationReport`] for each of the [`Mode`]s. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] #[serde_as(as = "BTreeMap")] - pub compilation_reports: BTreeMap, + pub compilation_reports: BTreeMap, } #[serde_as] @@ -844,9 +839,9 @@ pub struct ExecutionInformation { pub deployed_contracts: Option>, } -/// The compilation report. +/// The pre-link-only compilation report. #[derive(Clone, Debug, Serialize, Deserialize, Default)] -pub struct CompilationReport { +pub struct PreLinkCompilationReport { /// The compilation status. #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option, diff --git a/crates/report/src/common.rs b/crates/report/src/common.rs index 4cf2a042..30edf6dd 100644 --- a/crates/report/src/common.rs +++ b/crates/report/src/common.rs @@ -36,9 +36,9 @@ pub struct StepExecutionSpecifier { pub step_idx: StepPath, } -/// An absolute specifier for a compilation. +/// An absolute specifier for pre-link-only compilation. #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct StandaloneCompilationSpecifier { +pub struct PreLinkCompilationSpecifier { pub solc_mode: Mode, pub metadata_file_path: PathBuf, } @@ -48,6 +48,6 @@ pub struct StandaloneCompilationSpecifier { pub enum CompilationSpecifier { /// Compilation happening as part of test execution. Execution(Arc), - /// Standalone compilation happening without test execution. - Standalone(Arc), + /// Pre-link-only compilation happening without test execution. + PreLink(Arc), } diff --git a/crates/report/src/reporter_event.rs b/crates/report/src/reporter_event.rs index c2df136a..59aa9c7a 100644 --- a/crates/report/src/reporter_event.rs +++ b/crates/report/src/reporter_event.rs @@ -21,7 +21,7 @@ pub enum ReporterEvent { }, /// An event sent by the reporter once an entire metadata file and mode combination has - /// finished standalone compilation. + /// finished pre-link-only compilation. MetadataFileModeCombinationCompilationCompleted { metadata_file_path: MetadataFilePath, compilation_status: BTreeMap, diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs index cd4e67e8..dd497398 100644 --- a/crates/report/src/runner_event.rs +++ b/crates/report/src/runner_event.rs @@ -17,7 +17,7 @@ use tokio::sync::{broadcast, oneshot}; use crate::MinedBlockInformation; use crate::TransactionInformation; use crate::{ - CompilationSpecifier, ExecutionSpecifier, ReporterEvent, StandaloneCompilationSpecifier, + CompilationSpecifier, ExecutionSpecifier, PreLinkCompilationSpecifier, ReporterEvent, TestSpecifier, common::MetadataFilePath, }; @@ -188,7 +188,7 @@ macro_rules! __report_gen_scan_for_specifier { $variant_ident, compilation_specifier, specifier, - $crate::CompilationSpecifier::Standalone; + $crate::CompilationSpecifier::PreLink; $( $before : $bty, )* ; $( $after : $aty, )* ); }; @@ -351,11 +351,11 @@ macro_rules! define_event { } } - pub fn compilation_specific_reporter( + pub fn pre_link_compilation_specific_reporter( &self, - compilation_specifier: impl Into> - ) -> [< $ident StandaloneCompilationSpecificReporter >] { - [< $ident StandaloneCompilationSpecificReporter >] { + compilation_specifier: impl Into> + ) -> [< $ident PreLinkCompilationSpecificReporter >] { + [< $ident PreLinkCompilationSpecificReporter >] { reporter: self.clone(), compilation_specifier: compilation_specifier.into(), } @@ -458,12 +458,12 @@ macro_rules! define_event { /// A reporter that's tied to a specific compilation. #[derive(Clone, Debug)] - pub struct [< $ident StandaloneCompilationSpecificReporter >] { + pub struct [< $ident PreLinkCompilationSpecificReporter >] { $vis reporter: [< $ident Reporter >], - $vis compilation_specifier: std::sync::Arc, + $vis compilation_specifier: std::sync::Arc, } - impl [< $ident StandaloneCompilationSpecificReporter >] { + impl [< $ident PreLinkCompilationSpecificReporter >] { fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { self.reporter.report(event) } @@ -502,10 +502,10 @@ define_event! { /// A specifier for the test that was discovered. test_specifier: Arc, }, - /// An event emitted by the runners when they discover a standalone compilation. - StandaloneCompilationDiscovery { + /// An event emitted by the runners when they discover a pre-link-only compilation. + PreLinkCompilationDiscovery { /// A specifier for the compilation that was discovered. - compilation_specifier: Arc, + compilation_specifier: Arc, }, /// An event emitted by the runners when a test case is ignored. TestIgnored { @@ -603,10 +603,10 @@ define_event! { /// The failure reason. reason: String, }, - /// An event emitted by the runners when a compilation is ignored. - StandaloneContractsCompilationIgnored { + /// An event emitted by the runners when a pre-link-only compilation is ignored. + PreLinkContractsCompilationIgnored { /// A specifier for the compilation that has been ignored. - compilation_specifier: Arc, + compilation_specifier: Arc, /// A reason for the compilation to be ignored. reason: String, /// Additional fields that describe more information on why the compilation was ignored. @@ -674,10 +674,10 @@ impl RunnerEventReporter { pub type Reporter = RunnerEventReporter; pub type TestSpecificReporter = RunnerEventTestSpecificReporter; pub type ExecutionSpecificReporter = RunnerEventExecutionSpecificReporter; -pub type StandaloneCompilationSpecificReporter = RunnerEventStandaloneCompilationSpecificReporter; +pub type PreLinkCompilationSpecificReporter = RunnerEventPreLinkCompilationSpecificReporter; /// A wrapper that allows functions to accept either reporter type for compilation events. pub enum CompilationReporter<'a> { Execution(&'a ExecutionSpecificReporter), - Standalone(&'a StandaloneCompilationSpecificReporter), + PreLink(&'a PreLinkCompilationSpecificReporter), } From 6c680ffaa4d1099cee4e24400a5148d5e812be8f Mon Sep 17 00:00:00 2001 From: elle-j Date: Wed, 25 Feb 2026 17:03:34 +0100 Subject: [PATCH 20/25] Update match cases in cached compiler. --- crates/core/src/helpers/cached_compiler.rs | 138 ++++++++++----------- 1 file changed, 67 insertions(+), 71 deletions(-) diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index 63bf2abf..c9cac865 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -158,27 +158,15 @@ impl<'a> CachedCompiler<'a> { .expect("Can't happen"); } CompilationReporter::Execution(reporter) => { - if deployed_libraries.is_some() { - reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ) - .expect("Can't happen"); - } else { - reporter - .report_pre_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ) - .expect("Can't happen"); - } + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ) + .expect("Can't happen"); } } cache_value.compiler_output @@ -256,9 +244,25 @@ async fn compile_contracts( let input = compilation.input().clone(); let output = compilation.try_build(compiler).await; - match reporter { - CompilationReporter::PreLink(reporter) => match output.as_ref() { - Ok(output) => { + match (output.as_ref(), deployed_libraries.is_some()) { + (Ok(output), true) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + CompilationReporter::PreLink(_) => { + unreachable!(); + } + }, + (Ok(output), false) => match reporter { + CompilationReporter::Execution(reporter) => { reporter .report_pre_link_contracts_compilation_succeeded_event( compiler.version().clone(), @@ -269,9 +273,22 @@ async fn compile_contracts( ) .expect("Can't happen"); } - Err(err) => { + CompilationReporter::PreLink(reporter) => { reporter - .report_pre_link_contracts_compilation_failed_event( + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + }, + (Err(err), true) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_post_link_contracts_compilation_failed_event( compiler.version().clone(), compiler.path().to_path_buf(), input, @@ -279,53 +296,32 @@ async fn compile_contracts( ) .expect("Can't happen"); } + CompilationReporter::PreLink(_) => { + unreachable!(); + } }, - CompilationReporter::Execution(reporter) => { - match (output.as_ref(), deployed_libraries.is_some()) { - (Ok(output), true) => { - reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ) - .expect("Can't happen"); - } - (Ok(output), false) => { - reporter - .report_pre_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ) - .expect("Can't happen"); - } - (Err(err), true) => { - reporter - .report_post_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ) - .expect("Can't happen"); - } - (Err(err), false) => { - reporter - .report_pre_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ) - .expect("Can't happen"); - } + (Err(err), false) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_pre_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); } - } + CompilationReporter::PreLink(reporter) => { + reporter + .report_pre_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); + } + }, } output From ab8346a494d6a77c6dc3bd635bcf00ac225056aa Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 2 Mar 2026 17:41:16 +0100 Subject: [PATCH 21/25] Share orchestration logic between tests and compilations. --- crates/config/src/lib.rs | 3 +- crates/core/src/compilations/entry_point.rs | 155 +++++----- .../src/differential_tests/entry_point.rs | 291 ++++++------------ .../helpers/corpus_definition_processor.rs | 228 ++++++++++++++ crates/core/src/helpers/mod.rs | 2 + 5 files changed, 406 insertions(+), 273 deletions(-) create mode 100644 crates/core/src/helpers/corpus_definition_processor.rs diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 3ade7d0c..cba3d88b 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -105,6 +105,7 @@ mod context { pub output_format: OutputFormatConfiguration, pub working_directory: WorkingDirectoryConfiguration, pub corpus: CorpusCompilationConfiguration, + pub fail_fast: FailFastConfiguration, pub solc: SolcConfiguration, pub resolc: ResolcConfiguration, pub concurrency: ConcurrencyConfiguration, @@ -214,7 +215,7 @@ mod context { #[serde_with::serde_as] #[configuration(key = "corpus")] pub struct CorpusCompilationConfiguration { - /// A list of compilation specifiers for the compilations that the tool should run. + /// A list of compilation specifiers for the pre-link-only compilations that the tool should run. /// /// Compile specifiers follow the following format: /// diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index 38998e50..f59c437b 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -1,28 +1,82 @@ //! The main entry point into compiling in pre-link-only mode without any test execution. use std::{ - collections::BTreeSet, io::{BufWriter, Write, stderr}, - sync::Arc, - time::{Duration, Instant}, + time::Instant, }; use ansi_term::{ANSIStrings, Color}; use anyhow::Context as _; -use futures::{FutureExt, StreamExt}; +use futures::StreamExt; +use indexmap::IndexMap; use revive_dt_compiler::{Mode, ModeOptimizerSetting, ModePipeline}; +use revive_dt_config::{ + Compile, Context, FailFastConfiguration, OutputFormat, OutputFormatConfiguration, +}; use revive_dt_format::corpus::Corpus; -use tokio::sync::{RwLock, Semaphore, broadcast}; -use tracing::{Instrument, error, info, info_span, instrument}; - -use revive_dt_config::{Compile, Context, OutputFormat, OutputFormatConfiguration}; use revive_dt_report::{CompilationStatus, Reporter, ReporterEvent}; +use tokio::sync::broadcast; +use tracing::{info, info_span, instrument}; use crate::{ compilations::Driver, - helpers::{CachedCompiler, create_compilation_definitions_stream}, + helpers::{ + CachedCompiler, CompilationDefinition, CorpusDefinitionProcessor, + create_compilation_definitions_stream, process_corpus, + }, }; +/// The definition processor for compilations. +struct CompilationDefinitionProcessor; + +impl CorpusDefinitionProcessor for CompilationDefinitionProcessor { + type Definition<'a> = CompilationDefinition<'a>; + type ProcessResult = (); + type State = (); + + async fn process_definition<'a>( + definition: &'a Self::Definition<'a>, + cached_compiler: &'a CachedCompiler<'a>, + _state: Self::State, + ) -> anyhow::Result { + Driver::new(definition).compile_all(cached_compiler).await?; + Ok(()) + } + + /* `on_success` and `on_failure` use the default no-op implementations as reporting already happens by the cached compiler. */ + + fn on_ignored(definition: &Self::Definition<'_>, reason: String) -> anyhow::Result<()> { + definition + .reporter + .report_pre_link_contracts_compilation_ignored_event(reason, IndexMap::new())?; + Ok(()) + } + + fn create_fail_fast_action( + definition: &Self::Definition<'_>, + fail_fast: &FailFastConfiguration, + ) -> Option> { + fail_fast.fail_fast.then(|| { + let reporter = definition.reporter.clone(); + Box::new(move || { + let _ = reporter.report_pre_link_contracts_compilation_ignored_event( + "Aborted due to fail-fast".to_string(), + IndexMap::new(), + ); + }) as Box + }) + } + + fn create_span(task_id: usize, definition: &Self::Definition<'_>) -> tracing::Span { + info_span!( + "Compiling Related Files", + compilation_id = task_id, + metadata_file_path = %definition.metadata_file_path.display(), + mode = %definition.mode, + ) + } +} + /// Handles the compilations according to the information defined in the context. #[instrument(level = "info", err(Debug), skip_all)] pub async fn handle_compilations(context: Compile, reporter: Reporter) -> anyhow::Result<()> { @@ -75,82 +129,33 @@ pub async fn handle_compilations(context: Compile, reporter: Reporter) -> anyhow context.compilation.invalidate_cache, ) .await - .map(Arc::new) .context("Failed to initialize cached compiler")?; - // Creating the driver and compiling all of the contracts. - let semaphore = context - .concurrency - .concurrency_limit() - .map(Semaphore::new) - .map(Arc::new); - let running_task_list = Arc::new(RwLock::new(BTreeSet::::new())); - let driver_task = futures::future::join_all(compilation_definitions.iter().enumerate().map( - |(compilation_id, compilation_definition)| { - let running_task_list = running_task_list.clone(); - let semaphore = semaphore.clone(); - - let cached_compiler = cached_compiler.clone(); - let mode = compilation_definition.mode.clone(); - let span = info_span!( - "Compiling Related Files", - compilation_id, - metadata_file_path = %compilation_definition.metadata_file_path.display(), - mode = %mode, - ); - async move { - let permit = match semaphore.as_ref() { - Some(semaphore) => Some(semaphore.acquire().await.expect("Can't fail")), - None => None, - }; - - running_task_list.write().await.insert(compilation_id); + let cli_reporting_task = tokio::spawn(start_cli_reporting_task( + context.output_format.clone(), + aggregator_events_rx, + )); - let driver = Driver::new(compilation_definition); - match driver.compile_all(&cached_compiler).await { - Ok(()) => { /* Reporting already happens by the cached compiler. */ } - Err(_) => { - /* Reporting already happens by the cached compiler. */ - error!("Compilation Failed"); - } - }; - info!("Finished the compilation of the contracts"); - drop(permit); - running_task_list.write().await.remove(&compilation_id); - } - .instrument(span) - }, - )) - .inspect(|_| { - info!("Finished compiling all contracts"); - reporter_clone - .report_completion_event() - .expect("Can't fail") - }); - - let cli_reporting_task = start_cli_reporting_task(&context.output_format, aggregator_events_rx); - - tokio::task::spawn(async move { - loop { - let remaining_tasks = running_task_list.read().await; - info!( - count = remaining_tasks.len(), - ?remaining_tasks, - "Remaining Tasks" - ); - drop(remaining_tasks); - tokio::time::sleep(Duration::from_secs(10)).await - } - }); + process_corpus::( + &compilation_definitions, + &cached_compiler, + (), + &context.concurrency, + &context.fail_fast, + reporter_clone, + ) + .await; - futures::future::join(driver_task, cli_reporting_task).await; + cli_reporting_task + .await + .expect("CLI reporting task panicked"); Ok(()) } #[allow(irrefutable_let_patterns, clippy::uninlined_format_args)] async fn start_cli_reporting_task( - output_format: &OutputFormatConfiguration, + output_format: OutputFormatConfiguration, mut aggregator_events_rx: broadcast::Receiver, ) { let start = Instant::now(); diff --git a/crates/core/src/differential_tests/entry_point.rs b/crates/core/src/differential_tests/entry_point.rs index ab852fe9..20800909 100644 --- a/crates/core/src/differential_tests/entry_point.rs +++ b/crates/core/src/differential_tests/entry_point.rs @@ -1,13 +1,10 @@ //! The main entry point into differential testing. use std::{ - collections::{BTreeMap, BTreeSet}, + collections::BTreeMap, io::{BufWriter, Write, stderr}, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, - time::{Duration, Instant}, + sync::Arc, + time::Instant, }; use ansi_term::{ANSIStrings, Color}; @@ -15,43 +12,96 @@ use anyhow::Context as _; use futures::StreamExt; use indexmap::IndexMap; use revive_dt_common::types::PrivateKeyAllocator; +use revive_dt_config::{ + Context, FailFastConfiguration, OutputFormat, OutputFormatConfiguration, Test, +}; use revive_dt_core::Platform; use revive_dt_format::corpus::Corpus; -use tokio::sync::{Mutex, Notify, RwLock, Semaphore}; -use tracing::{Instrument, error, info, info_span, instrument}; - -use revive_dt_config::{Context, OutputFormat, OutputFormatConfiguration, Test}; -use revive_dt_report::{Reporter, ReporterEvent, TestCaseStatus, TestSpecificReporter}; +use revive_dt_report::{Reporter, ReporterEvent, TestCaseStatus}; +use tokio::sync::Mutex; +use tracing::{error, info, info_span, instrument}; use crate::{ differential_tests::Driver, helpers::{ - CachedCompiler, NodePool, TestCaseIgnoreResolvedConfiguration, - create_test_definitions_stream, + CachedCompiler, CorpusDefinitionProcessor, NodePool, TestCaseIgnoreResolvedConfiguration, + TestDefinition, create_test_definitions_stream, process_corpus, }, }; -/// A guard that reports a test as ignored when dropped without a terminal status. -/// -/// When `--fail-fast` aborts in-flight tests via `select!`, the futures are dropped. This guard -/// ensures that each dropped test still sends an ignored event to the aggregator so the report -/// is complete. -struct FailFastGuard { - reporter: Option, +/// The number of test steps that were executed. +type StepsExecuted = usize; + +/// State for test definition processing. +#[derive(Clone)] +struct TestDefinitionProcessorState { + private_key_allocator: Arc>, } -impl FailFastGuard { - fn reported(&mut self) { - self.reporter = None; +/// The definition processor for tests. +struct TestDefinitionProcessor; + +impl CorpusDefinitionProcessor for TestDefinitionProcessor { + type Definition<'a> = TestDefinition<'a>; + type ProcessResult = StepsExecuted; + type State = TestDefinitionProcessorState; + + async fn process_definition<'a>( + definition: &'a Self::Definition<'a>, + cached_compiler: &'a CachedCompiler<'a>, + state: Self::State, + ) -> anyhow::Result { + let driver = + Driver::new_root(definition, state.private_key_allocator, cached_compiler).await?; + + driver.execute_all().await } -} -impl Drop for FailFastGuard { - fn drop(&mut self) { - if let Some(ref reporter) = self.reporter { - let _ = reporter - .report_test_ignored_event("Aborted due to fail-fast".to_string(), IndexMap::new()); - } + fn on_success( + definition: &Self::Definition<'_>, + steps_executed: StepsExecuted, + ) -> anyhow::Result<()> { + definition + .reporter + .report_test_succeeded_event(steps_executed)?; + Ok(()) + } + + fn on_failure(definition: &Self::Definition<'_>, error: String) -> anyhow::Result<()> { + definition.reporter.report_test_failed_event(error)?; + Ok(()) + } + + fn on_ignored(definition: &Self::Definition<'_>, reason: String) -> anyhow::Result<()> { + definition + .reporter + .report_test_ignored_event(reason, IndexMap::new())?; + Ok(()) + } + + fn create_fail_fast_action( + definition: &Self::Definition<'_>, + fail_fast: &FailFastConfiguration, + ) -> Option> { + fail_fast.fail_fast.then(|| { + let reporter = definition.reporter.clone(); + Box::new(move || { + let _ = reporter.report_test_ignored_event( + "Aborted due to fail-fast".to_string(), + IndexMap::new(), + ); + }) as Box + }) + } + + fn create_span(task_id: usize, definition: &Self::Definition<'_>) -> tracing::Span { + info_span!( + "Executing Test Case", + test_id = task_id, + metadata_file_path = %definition.metadata_file_path.display(), + case_idx = %definition.case_idx, + mode = %definition.mode, + ) } } @@ -135,179 +185,26 @@ pub async fn handle_differential_tests(context: Test, reporter: Reporter) -> any context.compilation.invalidate_cache, ) .await - .map(Arc::new) .context("Failed to initialize cached compiler")?; - let private_key_allocator = Arc::new(Mutex::new(PrivateKeyAllocator::new( - context.wallet.highest_private_key_exclusive(), - ))); - - // Creating the driver and executing all of the steps. - let semaphore = context - .concurrency - .concurrency_limit() - .map(Semaphore::new) - .map(Arc::new); - let running_task_list = Arc::new(RwLock::new(BTreeSet::::new())); - let fail_fast_triggered = Arc::new(AtomicBool::new(false)); - let fail_fast_notify = Arc::new(Notify::new()); - let driver_task = futures::future::join_all(test_definitions.iter().enumerate().map( - |(test_id, test_definition)| { - let running_task_list = running_task_list.clone(); - let semaphore = semaphore.clone(); - let fail_fast_triggered = fail_fast_triggered.clone(); - let fail_fast_notify = fail_fast_notify.clone(); - let fail_fast = context.fail_fast.fail_fast; - - let private_key_allocator = private_key_allocator.clone(); - let cached_compiler = cached_compiler.clone(); - let mode = test_definition.mode.clone(); - let span = info_span!( - "Executing Test Case", - test_id, - metadata_file_path = %test_definition.metadata_file_path.display(), - case_idx = %test_definition.case_idx, - mode = %mode, - ); - async move { - let mut fail_fast_guard = FailFastGuard { - reporter: fail_fast.then(|| test_definition.reporter.clone()), - }; - - if fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { - test_definition - .reporter - .report_test_ignored_event( - "Skipped due to fail-fast: a prior test failed".to_string(), - IndexMap::new(), - ) - .expect("aggregator task is joined later so the receiver is alive"); - fail_fast_guard.reported(); - return; - } - let permit = match semaphore.as_ref() { - Some(semaphore) => match semaphore.acquire().await { - Ok(permit) => Some(permit), - Err(_) => { - test_definition - .reporter - .report_test_ignored_event( - "Skipped due to fail-fast: a prior test failed".to_string(), - IndexMap::new(), - ) - .expect("aggregator task is joined later so the receiver is alive"); - fail_fast_guard.reported(); - return; - } - }, - None => None, - }; - - if fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { - test_definition - .reporter - .report_test_ignored_event( - "Skipped due to fail-fast: a prior test failed".to_string(), - IndexMap::new(), - ) - .expect("aggregator task is joined later so the receiver is alive"); - fail_fast_guard.reported(); - drop(permit); - return; - } + let state = TestDefinitionProcessorState { + private_key_allocator: Arc::new(Mutex::new(PrivateKeyAllocator::new( + context.wallet.highest_private_key_exclusive(), + ))), + }; - running_task_list.write().await.insert(test_id); - let driver = match Driver::new_root( - test_definition, - private_key_allocator, - &cached_compiler, - ) - .await - { - Ok(driver) => driver, - Err(error) => { - test_definition - .reporter - .report_test_failed_event(format!("{error:#}")) - .expect("Can't fail"); - fail_fast_guard.reported(); - if fail_fast { - fail_fast_triggered.store(true, Ordering::Relaxed); - if let Some(ref sem) = semaphore { - sem.close(); - } - fail_fast_notify.notify_one(); - } - error!("Test Case Failed"); - drop(permit); - running_task_list.write().await.remove(&test_id); - return; - } - }; - info!("Created the driver for the test case"); - - match driver.execute_all().await { - Ok(steps_executed) => test_definition - .reporter - .report_test_succeeded_event(steps_executed) - .expect("Can't fail"), - Err(error) => { - test_definition - .reporter - .report_test_failed_event(format!("{error:#}")) - .expect("Can't fail"); - if fail_fast { - fail_fast_triggered.store(true, Ordering::Relaxed); - if let Some(ref sem) = semaphore { - sem.close(); - } - fail_fast_notify.notify_one(); - } - error!("Test Case Failed"); - } - }; - fail_fast_guard.reported(); - info!("Finished the execution of the test case"); - drop(permit); - running_task_list.write().await.remove(&test_id); - } - .instrument(span) - }, - )); let cli_reporting_task = tokio::spawn(start_cli_reporting_task(context.output_format, reporter)); - tokio::task::spawn(async move { - loop { - let remaining_tasks = running_task_list.read().await; - info!( - count = remaining_tasks.len(), - ?remaining_tasks, - "Remaining Tests" - ); - drop(remaining_tasks); - tokio::time::sleep(Duration::from_secs(10)).await - } - }); - - if context.fail_fast.fail_fast { - tokio::pin!(driver_task); - tokio::select! { - biased; - _ = fail_fast_notify.notified() => { - info!("Fail-fast triggered, aborting remaining tests"); - } - _ = &mut driver_task => {} - } - } else { - driver_task.await; - } - - info!("Finished executing all test cases"); - reporter_clone - .report_completion_event() - .expect("Can't fail"); - drop(reporter_clone); + process_corpus::( + &test_definitions, + &cached_compiler, + state, + &context.concurrency, + &context.fail_fast, + reporter_clone, + ) + .await; cli_reporting_task .await diff --git a/crates/core/src/helpers/corpus_definition_processor.rs b/crates/core/src/helpers/corpus_definition_processor.rs new file mode 100644 index 00000000..d999d9a2 --- /dev/null +++ b/crates/core/src/helpers/corpus_definition_processor.rs @@ -0,0 +1,228 @@ +//! Shared corpus processing infrastructure. + +use std::{ + collections::BTreeSet, + future::Future, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + time::Duration, +}; + +use anyhow::Result; +use revive_dt_config::{ConcurrencyConfiguration, FailFastConfiguration}; +use revive_dt_report::Reporter; +use tokio::sync::{Notify, RwLock, Semaphore}; +use tracing::{Instrument, error, info}; + +use crate::helpers::CachedCompiler; + +/// A guard that invokes `action` when dropped without a terminal status, unless explicitly +/// disarmed via `reported()`. +/// +/// When `--fail-fast` aborts in-flight tasks via `select!`, the futures are dropped. This guard +/// ensures that each dropped task can still be reported (e.g. report an ignored event) to the +/// aggregator so that the report is complete. +struct FailFastGuard { + action: Option>, +} + +impl FailFastGuard { + fn reported(&mut self) { + self.action = None; + } +} + +impl Drop for FailFastGuard { + fn drop(&mut self) { + if let Some(action) = self.action.take() { + action(); + } + } +} + +/// Describes how to process a definition within a corpus. +pub trait CorpusDefinitionProcessor: Sized + 'static { + /// The definition type produced by the stream. + type Definition<'a>: 'a; + + /// The result type from processing a definition. + type ProcessResult; + + /// Additional context-specific state needed for processing. + type State: Clone; + + /// Processes a single definition. + fn process_definition<'a>( + definition: &'a Self::Definition<'a>, + cached_compiler: &'a CachedCompiler<'a>, + state: Self::State, + ) -> impl Future>; + + /// Called when a definition is processed successfully. + fn on_success(_definition: &Self::Definition<'_>, _result: Self::ProcessResult) -> Result<()> { + Ok(()) + } + + /// Called when a definition fails being processed. + fn on_failure(_definition: &Self::Definition<'_>, _error: String) -> Result<()> { + Ok(()) + } + + /// Called when a definition is ignored/aborted. + fn on_ignored(_definition: &Self::Definition<'_>, _reason: String) -> Result<()> { + Ok(()) + } + + /// Creates the action to run if this task is aborted due to fail-fast. + /// Returns `None` if fail-fast is disabled. + fn create_fail_fast_action( + definition: &Self::Definition<'_>, + fail_fast: &FailFastConfiguration, + ) -> Option>; + + /// Creates the tracing span for processing this definition. + fn create_span(task_id: usize, definition: &Self::Definition<'_>) -> tracing::Span; +} + +/// Processes a corpus of definitions using the provided processor. +pub async fn process_corpus<'a, P: CorpusDefinitionProcessor>( + definitions: &'a [P::Definition<'a>], + cached_compiler: &'a CachedCompiler<'a>, + state: P::State, + concurrency: &ConcurrencyConfiguration, + fail_fast: &FailFastConfiguration, + reporter: Reporter, +) { + let semaphore = concurrency + .concurrency_limit() + .map(Semaphore::new) + .map(Arc::new); + let running_task_list = Arc::new(RwLock::new(BTreeSet::::new())); + + let fail_fast_triggered = Arc::new(AtomicBool::new(false)); + let fail_fast_notify = Arc::new(Notify::new()); + + // Process all definitions concurrently. + let driver_task = + futures::future::join_all(definitions.iter().enumerate().map(|(task_id, definition)| { + let running_task_list = running_task_list.clone(); + let semaphore = semaphore.clone(); + let fail_fast_triggered = fail_fast_triggered.clone(); + let fail_fast_notify = fail_fast_notify.clone(); + let state = state.clone(); + let span = P::create_span(task_id, definition); + + async move { + let mut fail_fast_guard = FailFastGuard { + action: P::create_fail_fast_action(definition, fail_fast), + }; + + if fail_fast.fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { + P::on_ignored( + definition, + "Skipped due to fail-fast: a prior task failed".to_string(), + ) + .expect("aggregator task is joined later so the receiver is alive"); + fail_fast_guard.reported(); + return; + } + + let permit = match semaphore.as_ref() { + Some(semaphore) => match semaphore.acquire().await { + Ok(permit) => Some(permit), + Err(_) => { + P::on_ignored( + definition, + "Skipped due to fail-fast: a prior task failed".to_string(), + ) + .expect("aggregator task is joined later so the receiver is alive"); + fail_fast_guard.reported(); + return; + } + }, + None => None, + }; + + // Double-check fail-fast after acquiring permit. + if fail_fast.fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { + P::on_ignored( + definition, + "Skipped due to fail-fast: a prior task failed".to_string(), + ) + .expect("aggregator task is joined later so the receiver is alive"); + fail_fast_guard.reported(); + drop(permit); + return; + } + + running_task_list.write().await.insert(task_id); + + let result = P::process_definition(definition, cached_compiler, state).await; + + match result { + Ok(process_result) => { + P::on_success(definition, process_result) + .expect("aggregator task is joined later so the receiver is alive"); + } + Err(error) => { + P::on_failure(definition, format!("{error:#}")) + .expect("aggregator task is joined later so the receiver is alive"); + + if fail_fast.fail_fast { + fail_fast_triggered.store(true, Ordering::Relaxed); + if let Some(ref sem) = semaphore { + sem.close(); + } + fail_fast_notify.notify_one(); + } + error!("Task Failed"); + } + } + + fail_fast_guard.reported(); + + info!("Finished processing the corpus definition"); + drop(permit); + running_task_list.write().await.remove(&task_id); + } + .instrument(span) + })); + + // Spawn monitoring task that logs remaining tasks periodically. + tokio::task::spawn({ + let running_task_list = running_task_list.clone(); + async move { + loop { + let remaining_tasks = running_task_list.read().await; + info!( + count = remaining_tasks.len(), + ?remaining_tasks, + "Remaining Tasks" + ); + drop(remaining_tasks); + tokio::time::sleep(Duration::from_secs(10)).await; + } + } + }); + + // Wait for completion, with optional fail-fast abort. + if fail_fast.fail_fast { + tokio::pin!(driver_task); + tokio::select! { + biased; + _ = fail_fast_notify.notified() => { + info!("Fail-fast triggered, aborting remaining tasks"); + } + _ = &mut driver_task => {} + } + } else { + driver_task.await; + } + + info!("Finished processing all corpus definitions"); + reporter + .report_completion_event() + .expect("aggregator task is joined later so the receiver is alive"); +} diff --git a/crates/core/src/helpers/mod.rs b/crates/core/src/helpers/mod.rs index 236962fe..c3dd8580 100644 --- a/crates/core/src/helpers/mod.rs +++ b/crates/core/src/helpers/mod.rs @@ -1,9 +1,11 @@ mod cached_compiler; mod compile; +mod corpus_definition_processor; mod pool; mod test; pub use cached_compiler::*; pub use compile::*; +pub use corpus_definition_processor::*; pub use pool::*; pub use test::*; From 7c46dcdaea4e157c5d2ceee4f03d6ca1dab7b380 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 2 Mar 2026 18:29:05 +0100 Subject: [PATCH 22/25] Enable cache invalidation. --- crates/core/src/compilations/entry_point.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs index 19c69676..68da117c 100644 --- a/crates/core/src/compilations/entry_point.rs +++ b/crates/core/src/compilations/entry_point.rs @@ -16,7 +16,7 @@ use revive_dt_config::{ use revive_dt_format::corpus::Corpus; use revive_dt_report::{CompilationStatus, Reporter, ReporterEvent}; use tokio::sync::broadcast; -use tracing::{info, info_span, instrument}; +use tracing::{info, info_span, instrument, warn}; use crate::{ compilations::Driver, @@ -79,7 +79,14 @@ impl CorpusDefinitionProcessor for CompilationDefinitionProcessor { /// Handles the compilations according to the information defined in the context. #[instrument(level = "info", err(Debug), skip_all)] -pub async fn handle_compilations(context: Compile, reporter: Reporter) -> anyhow::Result<()> { +pub async fn handle_compilations(mut context: Compile, reporter: Reporter) -> anyhow::Result<()> { + if !context.compilation.invalidate_cache { + warn!( + "Cache invalidation enabled: The compile subcommand always invalidates cache to avoid incorrect results from different compiler binaries." + ); + context.compilation.invalidate_cache = true; + } + let reporter_clone = reporter.clone(); // Subscribe early, before stream collection, to capture all events including From 4b84f2733d75934d7df128e38b6a0ef9ac544fa9 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 2 Mar 2026 18:47:42 +0100 Subject: [PATCH 23/25] Don't panic clearing the cache if the cache directory does not exit. --- crates/core/src/helpers/cached_compiler.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index 9dcf12b9..be2b5a48 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -340,11 +340,15 @@ impl ArtifactsCache { #[instrument(level = "debug", skip_all, err)] pub async fn with_invalidated_cache(self) -> Result { - cacache::clear(self.path.as_path()) - .await - .map_err(Into::::into) - .with_context(|| format!("Failed to clear cache at {}", self.path.display()))?; - Ok(self) + match cacache::clear(self.path.as_path()).await { + Ok(()) => Ok(self), + Err(cacache::Error::IoError(err, _)) if err.kind() == std::io::ErrorKind::NotFound => { + Ok(self) + } + Err(err) => Err(err) + .map_err(Into::::into) + .with_context(|| format!("Failed to clear cache at {}", self.path.display())), + } } #[instrument(level = "debug", skip_all, err)] From b382150c03b5bc664e0c6f2bc4319ae6f04d7d8a Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 2 Mar 2026 18:53:41 +0100 Subject: [PATCH 24/25] Fix clippy. --- crates/core/src/helpers/cached_compiler.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index be2b5a48..dc34e16f 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -345,8 +345,7 @@ impl ArtifactsCache { Err(cacache::Error::IoError(err, _)) if err.kind() == std::io::ErrorKind::NotFound => { Ok(self) } - Err(err) => Err(err) - .map_err(Into::::into) + Err(err) => Err(Into::::into(err)) .with_context(|| format!("Failed to clear cache at {}", self.path.display())), } } From eb3aad987586ba9af9ef3b886370bcb618072a93 Mon Sep 17 00:00:00 2001 From: elle-j Date: Mon, 2 Mar 2026 19:12:08 +0100 Subject: [PATCH 25/25] Update naming. --- .../src/differential_tests/entry_point.rs | 8 +++---- ...ition_processor.rs => corpus_processor.rs} | 24 +++++++++---------- crates/core/src/helpers/mod.rs | 4 ++-- 3 files changed, 18 insertions(+), 18 deletions(-) rename crates/core/src/helpers/{corpus_definition_processor.rs => corpus_processor.rs} (91%) diff --git a/crates/core/src/differential_tests/entry_point.rs b/crates/core/src/differential_tests/entry_point.rs index 20800909..503106c9 100644 --- a/crates/core/src/differential_tests/entry_point.rs +++ b/crates/core/src/differential_tests/entry_point.rs @@ -51,10 +51,10 @@ impl CorpusDefinitionProcessor for TestDefinitionProcessor { cached_compiler: &'a CachedCompiler<'a>, state: Self::State, ) -> anyhow::Result { - let driver = - Driver::new_root(definition, state.private_key_allocator, cached_compiler).await?; - - driver.execute_all().await + Driver::new_root(definition, state.private_key_allocator, cached_compiler) + .await? + .execute_all() + .await } fn on_success( diff --git a/crates/core/src/helpers/corpus_definition_processor.rs b/crates/core/src/helpers/corpus_processor.rs similarity index 91% rename from crates/core/src/helpers/corpus_definition_processor.rs rename to crates/core/src/helpers/corpus_processor.rs index d999d9a2..5c60f126 100644 --- a/crates/core/src/helpers/corpus_definition_processor.rs +++ b/crates/core/src/helpers/corpus_processor.rs @@ -87,10 +87,10 @@ pub trait CorpusDefinitionProcessor: Sized + 'static { } /// Processes a corpus of definitions using the provided processor. -pub async fn process_corpus<'a, P: CorpusDefinitionProcessor>( - definitions: &'a [P::Definition<'a>], +pub async fn process_corpus<'a, Processor: CorpusDefinitionProcessor>( + definitions: &'a [Processor::Definition<'a>], cached_compiler: &'a CachedCompiler<'a>, - state: P::State, + state: Processor::State, concurrency: &ConcurrencyConfiguration, fail_fast: &FailFastConfiguration, reporter: Reporter, @@ -112,15 +112,15 @@ pub async fn process_corpus<'a, P: CorpusDefinitionProcessor>( let fail_fast_triggered = fail_fast_triggered.clone(); let fail_fast_notify = fail_fast_notify.clone(); let state = state.clone(); - let span = P::create_span(task_id, definition); + let span = Processor::create_span(task_id, definition); async move { let mut fail_fast_guard = FailFastGuard { - action: P::create_fail_fast_action(definition, fail_fast), + action: Processor::create_fail_fast_action(definition, fail_fast), }; if fail_fast.fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { - P::on_ignored( + Processor::on_ignored( definition, "Skipped due to fail-fast: a prior task failed".to_string(), ) @@ -133,7 +133,7 @@ pub async fn process_corpus<'a, P: CorpusDefinitionProcessor>( Some(semaphore) => match semaphore.acquire().await { Ok(permit) => Some(permit), Err(_) => { - P::on_ignored( + Processor::on_ignored( definition, "Skipped due to fail-fast: a prior task failed".to_string(), ) @@ -147,7 +147,7 @@ pub async fn process_corpus<'a, P: CorpusDefinitionProcessor>( // Double-check fail-fast after acquiring permit. if fail_fast.fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { - P::on_ignored( + Processor::on_ignored( definition, "Skipped due to fail-fast: a prior task failed".to_string(), ) @@ -159,15 +159,15 @@ pub async fn process_corpus<'a, P: CorpusDefinitionProcessor>( running_task_list.write().await.insert(task_id); - let result = P::process_definition(definition, cached_compiler, state).await; - + let result = + Processor::process_definition(definition, cached_compiler, state).await; match result { Ok(process_result) => { - P::on_success(definition, process_result) + Processor::on_success(definition, process_result) .expect("aggregator task is joined later so the receiver is alive"); } Err(error) => { - P::on_failure(definition, format!("{error:#}")) + Processor::on_failure(definition, format!("{error:#}")) .expect("aggregator task is joined later so the receiver is alive"); if fail_fast.fail_fast { diff --git a/crates/core/src/helpers/mod.rs b/crates/core/src/helpers/mod.rs index c3dd8580..25dbb5fe 100644 --- a/crates/core/src/helpers/mod.rs +++ b/crates/core/src/helpers/mod.rs @@ -1,11 +1,11 @@ mod cached_compiler; mod compile; -mod corpus_definition_processor; +mod corpus_processor; mod pool; mod test; pub use cached_compiler::*; pub use compile::*; -pub use corpus_definition_processor::*; +pub use corpus_processor::*; pub use pool::*; pub use test::*;