diff --git a/Cargo.lock b/Cargo.lock index d81075b..9a20397 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5782,6 +5782,7 @@ dependencies = [ "dashmap", "futures", "indexmap 2.13.0", + "regex", "revive-dt-common", "revive-dt-compiler", "revive-dt-config", diff --git a/crates/common/src/types/mod.rs b/crates/common/src/types/mod.rs index 4d52d08..c426a1a 100644 --- a/crates/common/src/types/mod.rs +++ b/crates/common/src/types/mod.rs @@ -1,5 +1,6 @@ mod identifiers; mod mode; +mod parsed_compilation_specifier; mod parsed_test_specifier; mod private_key_allocator; mod round_robin_pool; @@ -7,6 +8,7 @@ mod version_or_requirement; pub use identifiers::*; pub use mode::*; +pub use parsed_compilation_specifier::*; pub use parsed_test_specifier::*; pub use private_key_allocator::*; pub use round_robin_pool::*; diff --git a/crates/common/src/types/parsed_compilation_specifier.rs b/crates/common/src/types/parsed_compilation_specifier.rs new file mode 100644 index 0000000..e57c130 --- /dev/null +++ b/crates/common/src/types/parsed_compilation_specifier.rs @@ -0,0 +1,80 @@ +use std::{fmt::Display, path::PathBuf, str::FromStr}; + +use anyhow::Context as _; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ParsedCompilationSpecifier { + /// All of the contracts in the file should be compiled. + FileOrDirectory { + /// The path of the metadata file containing the contracts or the references to the contracts. + metadata_or_directory_file_path: PathBuf, + }, +} + +impl Display for ParsedCompilationSpecifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ParsedCompilationSpecifier::FileOrDirectory { + metadata_or_directory_file_path, + } => { + write!(f, "{}", metadata_or_directory_file_path.display()) + } + } + } +} + +impl FromStr for ParsedCompilationSpecifier { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let path = PathBuf::from(s) + .canonicalize() + .context("Failed to canonicalize the path of the contracts")?; + + Ok(Self::FileOrDirectory { + metadata_or_directory_file_path: path, + }) + } +} + +impl From for String { + fn from(value: ParsedCompilationSpecifier) -> Self { + value.to_string() + } +} + +impl TryFrom for ParsedCompilationSpecifier { + type Error = anyhow::Error; + + fn try_from(value: String) -> Result { + value.parse() + } +} + +impl TryFrom<&str> for ParsedCompilationSpecifier { + type Error = anyhow::Error; + + fn try_from(value: &str) -> Result { + value.parse() + } +} + +impl Serialize for ParsedCompilationSpecifier { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.to_string().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for ParsedCompilationSpecifier { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let string = String::deserialize(deserializer)?; + string.parse().map_err(serde::de::Error::custom) + } +} diff --git a/crates/common/src/types/parsed_test_specifier.rs b/crates/common/src/types/parsed_test_specifier.rs index 2710bf0..1944aa4 100644 --- a/crates/common/src/types/parsed_test_specifier.rs +++ b/crates/common/src/types/parsed_test_specifier.rs @@ -11,7 +11,7 @@ use crate::types::Mode; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ParsedTestSpecifier { - /// All of the test cases in the file should be ran across all of the specified modes + /// All of the test cases in the file should be ran across all of the specified modes. FileOrDirectory { /// The path of the metadata file containing the test cases. metadata_or_directory_file_path: PathBuf, diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 104342d..cba3d88 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -17,7 +17,9 @@ use alloy::{ }; use anyhow::Context as _; use clap::{Parser, ValueEnum, ValueHint}; -use revive_dt_common::types::{ParsedTestSpecifier, PlatformIdentifier}; +use revive_dt_common::types::{ + ParsedCompilationSpecifier, ParsedTestSpecifier, PlatformIdentifier, +}; use semver::Version; use serde::{Deserialize, Serialize, Serializer}; use strum::{AsRefStr, Display, EnumString, IntoStaticStr}; @@ -39,7 +41,7 @@ mod context { pub output_format: OutputFormatConfiguration, pub platforms: PlatformConfiguration, pub working_directory: WorkingDirectoryConfiguration, - pub corpus: CorpusConfiguration, + pub corpus: CorpusExecutionConfiguration, pub fail_fast: FailFastConfiguration, pub solc: SolcConfiguration, pub resolc: ResolcConfiguration, @@ -65,7 +67,7 @@ mod context { pub platforms: PlatformConfiguration, pub working_directory: WorkingDirectoryConfiguration, pub benchmark_run: BenchmarkRunConfiguration, - pub corpus: CorpusConfiguration, + pub corpus: CorpusExecutionConfiguration, pub solc: SolcConfiguration, pub resolc: ResolcConfiguration, pub polkadot_parachain: PolkadotParachainConfiguration, @@ -97,6 +99,20 @@ mod context { #[subcommand] pub struct ExportJsonSchema; + /// Compiles contracts for pre-link compilations only without executing any tests. + #[subcommand] + pub struct Compile { + pub output_format: OutputFormatConfiguration, + pub working_directory: WorkingDirectoryConfiguration, + pub corpus: CorpusCompilationConfiguration, + pub fail_fast: FailFastConfiguration, + pub solc: SolcConfiguration, + pub resolc: ResolcConfiguration, + pub concurrency: ConcurrencyConfiguration, + pub compilation: CompilationConfiguration, + pub report: ReportConfiguration, + } + /// Configuration for the commandline profile. #[configuration] pub struct ProfileConfiguration { @@ -112,6 +128,9 @@ mod context { /// The output format to use for the tool's output. #[arg(short, long, default_value_t = OutputFormat::CargoTestLike)] pub output_format: OutputFormat, + /// If applicable, show verbose details when executing the tool. + #[arg(long)] + pub verbose: bool, } /// Configuration for the set of platforms. @@ -131,7 +150,7 @@ mod context { #[configuration] pub struct WorkingDirectoryConfiguration { /// The working directory that the program will use for all of the temporary artifacts - /// needed at runtime. + /// needed at runtime, as well as the generated report. /// /// If not specified, then a temporary directory will be created and used by the program /// for all temporary artifacts. @@ -170,10 +189,10 @@ mod context { pub fail_fast: bool, } - /// A set of configuration parameters for the corpus files to use for the execution. + /// A set of configuration parameters for the corpus files to use for the test execution. #[serde_with::serde_as] #[configuration(key = "corpus")] - pub struct CorpusConfiguration { + pub struct CorpusExecutionConfiguration { /// A list of test specifiers for the tests that the tool should run. /// /// Test specifiers follow the following format: @@ -192,6 +211,22 @@ mod context { pub test_specifiers: Vec, } + /// A set of configuration parameters for the corpus files to use for the pre-link-only compilation. + #[serde_with::serde_as] + #[configuration(key = "corpus")] + pub struct CorpusCompilationConfiguration { + /// A list of compilation specifiers for the pre-link-only compilations that the tool should run. + /// + /// Compile specifiers follow the following format: + /// + /// - `{directory_path|metadata_file_path}`: A path to a metadata file where all of the contracts, + /// or references to the contracts, live and should be compiled. Alternatively, it points to a + /// directory instructing the framework to discover the metadata files that live there and compile them. + #[serde_as(as = "Vec")] + #[arg(short = 'c', long = "compile", required = true)] + pub compilation_specifiers: Vec, + } + /// A set of configuration parameters for Solc. #[configuration(key = "solc")] pub struct SolcConfiguration { @@ -517,10 +552,9 @@ mod context { impl Context { pub fn update_for_profile(&mut self) { match self { - Context::Test(ctx) => ctx.update_for_profile(), - Context::Benchmark(ctx) => ctx.update_for_profile(), - Context::ExportJsonSchema(_) => {} - Context::ExportGenesis(_) => {} + Self::Test(ctx) => ctx.update_for_profile(), + Self::Benchmark(ctx) => ctx.update_for_profile(), + Self::ExportJsonSchema(_) | Self::ExportGenesis(..) | Self::Compile(..) => {} } } } @@ -610,6 +644,12 @@ mod context { Self::parse_from(["benchmark", "--test", "."]) } } + + impl Default for Compile { + fn default() -> Self { + Self::parse_from(["compile", "--compile", "."]) + } + } } /// Represents the working directory that the program uses. diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index c69b704..26e9c14 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -30,6 +30,7 @@ clap = { workspace = true } dashmap = { workspace = true } futures = { workspace = true } indexmap = { workspace = true } +regex = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } tracing-appender = { workspace = true } diff --git a/crates/core/src/compilations/driver.rs b/crates/core/src/compilations/driver.rs new file mode 100644 index 0000000..dceb254 --- /dev/null +++ b/crates/core/src/compilations/driver.rs @@ -0,0 +1,40 @@ +use anyhow::{Context as _, Result}; +use revive_dt_report::CompilationReporter; +use tracing::error; + +use crate::helpers::{CachedCompiler, CompilationDefinition}; + +/// The compilation driver. +pub struct Driver<'a> { + /// The definition of the compilation that the driver is instructed to execute. + compilation_definition: &'a CompilationDefinition<'a>, +} + +impl<'a> Driver<'a> { + /// Creates a new driver. + pub fn new(compilation_definition: &'a CompilationDefinition<'a>) -> Self { + Self { + compilation_definition, + } + } + + /// Compiles all contracts specified by the [`CompilationDefinition`]. + pub async fn compile_all(&self, cached_compiler: &CachedCompiler<'a>) -> Result<()> { + cached_compiler + .compile_contracts( + self.compilation_definition.metadata, + self.compilation_definition.metadata_file_path, + self.compilation_definition.mode.clone(), + None, + self.compilation_definition.compiler.as_ref(), + self.compilation_definition.compiler_identifier, + None, + &CompilationReporter::PreLink(&self.compilation_definition.reporter), + ) + .await + .inspect_err(|err| error!(?err, "Compilation failed")) + .context("Failed to produce the compiled contracts")?; + + Ok(()) + } +} diff --git a/crates/core/src/compilations/entry_point.rs b/crates/core/src/compilations/entry_point.rs new file mode 100644 index 0000000..68da117 --- /dev/null +++ b/crates/core/src/compilations/entry_point.rs @@ -0,0 +1,387 @@ +//! The main entry point into compiling in pre-link-only mode without any test execution. + +use std::{ + io::{BufWriter, Write, stderr}, + time::Instant, +}; + +use ansi_term::{ANSIStrings, Color}; +use anyhow::Context as _; +use futures::StreamExt; +use indexmap::IndexMap; +use revive_dt_compiler::{Mode, ModeOptimizerLevel, ModeOptimizerSetting, ModePipeline}; +use revive_dt_config::{ + Compile, Context, FailFastConfiguration, OutputFormat, OutputFormatConfiguration, +}; +use revive_dt_format::corpus::Corpus; +use revive_dt_report::{CompilationStatus, Reporter, ReporterEvent}; +use tokio::sync::broadcast; +use tracing::{info, info_span, instrument, warn}; + +use crate::{ + compilations::Driver, + helpers::{ + CachedCompiler, CompilationDefinition, CorpusDefinitionProcessor, + create_compilation_definitions_stream, process_corpus, + }, +}; + +/// The definition processor for compilations. +struct CompilationDefinitionProcessor; + +impl CorpusDefinitionProcessor for CompilationDefinitionProcessor { + type Definition<'a> = CompilationDefinition<'a>; + type ProcessResult = (); + type State = (); + + async fn process_definition<'a>( + definition: &'a Self::Definition<'a>, + cached_compiler: &'a CachedCompiler<'a>, + _state: Self::State, + ) -> anyhow::Result { + Driver::new(definition).compile_all(cached_compiler).await?; + Ok(()) + } + + /* `on_success` and `on_failure` use the default no-op implementations as reporting already happens by the cached compiler. */ + + fn on_ignored(definition: &Self::Definition<'_>, reason: String) -> anyhow::Result<()> { + definition + .reporter + .report_pre_link_contracts_compilation_ignored_event(reason, IndexMap::new())?; + Ok(()) + } + + fn create_fail_fast_action( + definition: &Self::Definition<'_>, + fail_fast: &FailFastConfiguration, + ) -> Option> { + fail_fast.fail_fast.then(|| { + let reporter = definition.reporter.clone(); + Box::new(move || { + let _ = reporter.report_pre_link_contracts_compilation_ignored_event( + "Aborted due to fail-fast".to_string(), + IndexMap::new(), + ); + }) as Box + }) + } + + fn create_span(task_id: usize, definition: &Self::Definition<'_>) -> tracing::Span { + info_span!( + "Compiling Related Files", + compilation_id = task_id, + metadata_file_path = %definition.metadata_file_path.display(), + mode = %definition.mode, + ) + } +} + +/// Handles the compilations according to the information defined in the context. +#[instrument(level = "info", err(Debug), skip_all)] +pub async fn handle_compilations(mut context: Compile, reporter: Reporter) -> anyhow::Result<()> { + if !context.compilation.invalidate_cache { + warn!( + "Cache invalidation enabled: The compile subcommand always invalidates cache to avoid incorrect results from different compiler binaries." + ); + context.compilation.invalidate_cache = true; + } + + let reporter_clone = reporter.clone(); + + // Subscribe early, before stream collection, to capture all events including + // ignored compilations determined during compatibility checks. + let aggregator_events_rx = reporter.subscribe().await.expect("Can't fail"); + + // Discover all of the metadata files that are defined in the context. + let corpus = context + .corpus + .compilation_specifiers + .clone() + .into_iter() + .try_fold(Corpus::default(), Corpus::with_compilation_specifier) + .context("Failed to parse the compilation corpus")?; + info!( + len = corpus.metadata_file_count(), + "Discovered metadata files" + ); + + let full_context = Context::Compile(Box::new(context.clone())); + let compilation_definitions = create_compilation_definitions_stream( + &full_context, + &corpus, + // TODO (temporarily always using `z`): Accept mode(s) via CLI. + Mode { + pipeline: ModePipeline::ViaYulIR, + optimize_setting: ModeOptimizerSetting { + solc_optimizer_enabled: true, + level: ModeOptimizerLevel::Mz, + }, + solc_version: None, + }, + reporter.clone(), + ) + .await + .collect::>() + .await; + drop(reporter); + info!( + len = compilation_definitions.len(), + "Created compilation definitions" + ); + + let cached_compiler = CachedCompiler::new( + context + .working_directory + .working_directory + .as_path() + .join("compilation_cache"), + context.compilation.invalidate_cache, + ) + .await + .context("Failed to initialize cached compiler")?; + + let cli_reporting_task = tokio::spawn(start_cli_reporting_task( + context.output_format.clone(), + aggregator_events_rx, + )); + + process_corpus::( + &compilation_definitions, + &cached_compiler, + (), + &context.concurrency, + &context.fail_fast, + reporter_clone, + ) + .await; + + cli_reporting_task + .await + .expect("CLI reporting task panicked"); + + Ok(()) +} + +#[allow(irrefutable_let_patterns, clippy::uninlined_format_args)] +async fn start_cli_reporting_task( + output_format: OutputFormatConfiguration, + mut aggregator_events_rx: broadcast::Receiver, +) { + let start = Instant::now(); + + let mut global_success_count = 0; + let mut global_failure_count = 0; + let mut global_ignore_count = 0; + + let mut buf = BufWriter::new(stderr()); + while let Ok(event) = aggregator_events_rx.recv().await { + let ReporterEvent::MetadataFileModeCombinationCompilationCompleted { + metadata_file_path, + compilation_status, + } = event + else { + continue; + }; + + match output_format.output_format { + OutputFormat::Legacy => { + let _ = write!(buf, "{}", metadata_file_path.display()); + for (mode, status) in compilation_status { + let _ = write!(buf, "\tMode {}: ", mode); + let _ = match &status { + CompilationStatus::Success { + is_cached, + compiled_contracts_info, + .. + } => { + global_success_count += 1; + let contract_count: usize = compiled_contracts_info + .values() + .map(|contracts| contracts.len()) + .sum(); + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Green.bold().paint("Compilation Succeeded"), + Color::Green.paint(format!( + " - Contracts compiled: {}, Cached: {}", + contract_count, + if *is_cached { "yes" } else { "no" } + )), + ]) + ) + } + CompilationStatus::Failure { reason, .. } => { + global_failure_count += 1; + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Red.bold().paint("Compilation Failed"), + Color::Red.paint(format!(" - Reason: {}", reason.trim())), + ]) + ) + } + CompilationStatus::Ignored { reason, .. } => { + global_ignore_count += 1; + writeln!( + buf, + "{}", + ANSIStrings(&[ + Color::Yellow.bold().paint("Compilation Ignored"), + Color::Yellow.paint(format!(" - Reason: {}", reason.trim())), + ]) + ) + } + }; + } + let _ = writeln!(buf); + } + OutputFormat::CargoTestLike => { + let mut success_count = 0; + let mut failure_count = 0; + let mut ignored_count = 0; + + for (mode, status) in compilation_status { + match &status { + CompilationStatus::Success { + compiled_contracts_info, + .. + } => { + success_count += 1; + global_success_count += 1; + let contract_count: usize = compiled_contracts_info + .values() + .map(|contracts| contracts.len()) + .sum(); + + if output_format.verbose { + // Verbose: show header + per-contract lines + summary. + writeln!( + buf, + "\t{} {} - {}\n", + Color::Green.paint("Compiling"), + metadata_file_path.display(), + mode + ) + .unwrap(); + writeln!(buf, "compiling {} contracts", contract_count).unwrap(); + + for (source_path, contracts) in compiled_contracts_info { + for contract_name in contracts.keys() { + writeln!( + buf, + "compile {}::{} ... {}", + source_path.display(), + contract_name, + Color::Green.paint("ok") + ) + .unwrap(); + } + } + writeln!(buf).unwrap(); + + writeln!( + buf, + "compile result: {}. {} contracts compiled", + Color::Green.paint("ok"), + contract_count + ) + .unwrap(); + writeln!(buf).unwrap(); + } else { + // Non-verbose: single line with contract count. + writeln!( + buf, + "compile {} ({}) ... {} ({} contracts)", + metadata_file_path.display(), + mode, + Color::Green.paint("ok"), + contract_count + ) + .unwrap(); + } + } + CompilationStatus::Failure { reason, .. } => { + failure_count += 1; + global_failure_count += 1; + writeln!( + buf, + "compile {} ({}) ... {}", + metadata_file_path.display(), + mode, + Color::Red.paint(format!("FAILED, {}", reason.trim())) + ) + .unwrap(); + } + CompilationStatus::Ignored { reason, .. } => { + ignored_count += 1; + global_ignore_count += 1; + writeln!( + buf, + "compile {} ({}) ... {}", + metadata_file_path.display(), + mode, + Color::Yellow.paint(format!("ignored, {}", reason.trim())) + ) + .unwrap(); + } + } + } + + let status = if failure_count > 0 { + Color::Red.paint("FAILED") + } else { + Color::Green.paint("ok") + }; + writeln!( + buf, + "compile result: {}. {} succeeded; {} failed; {} ignored", + status, success_count, failure_count, ignored_count, + ) + .unwrap(); + writeln!(buf).unwrap(); + + if aggregator_events_rx.is_empty() { + buf = tokio::task::spawn_blocking(move || { + buf.flush().unwrap(); + buf + }) + .await + .unwrap(); + } + } + } + } + info!("Aggregator Broadcast Channel Closed"); + + // Summary at the end. + let total = global_success_count + global_failure_count + global_ignore_count; + match output_format.output_format { + OutputFormat::Legacy => { + writeln!( + buf, + "{} compilations: {} succeeded, {} failed, {} ignored in {} seconds", + total, + Color::Green.paint(global_success_count.to_string()), + Color::Red.paint(global_failure_count.to_string()), + Color::Yellow.paint(global_ignore_count.to_string()), + start.elapsed().as_secs() + ) + .unwrap(); + } + OutputFormat::CargoTestLike => { + writeln!( + buf, + "\nrun finished. {} succeeded; {} failed; {} ignored; finished in {}s", + global_success_count, + global_failure_count, + global_ignore_count, + start.elapsed().as_secs() + ) + .unwrap(); + } + } +} diff --git a/crates/core/src/compilations/mod.rs b/crates/core/src/compilations/mod.rs new file mode 100644 index 0000000..4741118 --- /dev/null +++ b/crates/core/src/compilations/mod.rs @@ -0,0 +1,9 @@ +//! This module contains all of the code responsible for performing compilations, +//! including the driver implementation and the core logic that allows for contracts +//! to be compiled in pre-link-only mode without any test execution. + +mod driver; +mod entry_point; + +pub use driver::*; +pub use entry_point::*; diff --git a/crates/core/src/differential_benchmarks/driver.rs b/crates/core/src/differential_benchmarks/driver.rs index 95b77cd..f1fe49a 100644 --- a/crates/core/src/differential_benchmarks/driver.rs +++ b/crates/core/src/differential_benchmarks/driver.rs @@ -33,6 +33,7 @@ use revive_dt_format::{ }, traits::{ResolutionContext, ResolverApi}, }; +use revive_dt_report::CompilationReporter; use tokio::{ sync::{Mutex, OnceCell, RwLock, mpsc::UnboundedSender}, time::{interval, timeout}, @@ -137,8 +138,9 @@ where self.test_definition.mode.clone(), None, self.platform_information.compiler.as_ref(), - self.platform_information.platform, - &self.platform_information.reporter, + self.platform_information.platform.compiler_identifier(), + Some(self.platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&self.platform_information.reporter), ) .await .inspect_err(|err| error!(?err, "Pre-linking compilation failed")) @@ -213,8 +215,9 @@ where self.test_definition.mode.clone(), deployed_libraries.as_ref(), self.platform_information.compiler.as_ref(), - self.platform_information.platform, - &self.platform_information.reporter, + self.platform_information.platform.compiler_identifier(), + Some(self.platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&self.platform_information.reporter), ) .await .inspect_err(|err| error!(?err, "Post-linking compilation failed")) diff --git a/crates/core/src/differential_tests/driver.rs b/crates/core/src/differential_tests/driver.rs index 852b64b..536b6db 100644 --- a/crates/core/src/differential_tests/driver.rs +++ b/crates/core/src/differential_tests/driver.rs @@ -30,6 +30,7 @@ use revive_dt_format::{ }, traits::ResolutionContext, }; +use revive_dt_report::CompilationReporter; use subxt::{ext::codec::Decode, metadata::Metadata, tx::Payload}; use tokio::sync::Mutex; use tracing::{error, info, instrument}; @@ -186,8 +187,9 @@ where test_definition.mode.clone(), None, platform_information.compiler.as_ref(), - platform_information.platform, - &platform_information.reporter, + platform_information.platform.compiler_identifier(), + Some(platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&platform_information.reporter), ) .await .inspect_err(|err| { @@ -269,8 +271,9 @@ where test_definition.mode.clone(), deployed_libraries.as_ref(), platform_information.compiler.as_ref(), - platform_information.platform, - &platform_information.reporter, + platform_information.platform.compiler_identifier(), + Some(platform_information.platform.platform_identifier()), + &CompilationReporter::Execution(&platform_information.reporter), ) .await .inspect_err(|err| { diff --git a/crates/core/src/differential_tests/entry_point.rs b/crates/core/src/differential_tests/entry_point.rs index 38af03a..503106c 100644 --- a/crates/core/src/differential_tests/entry_point.rs +++ b/crates/core/src/differential_tests/entry_point.rs @@ -1,13 +1,10 @@ //! The main entry point into differential testing. use std::{ - collections::{BTreeMap, BTreeSet}, + collections::BTreeMap, io::{BufWriter, Write, stderr}, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, - time::{Duration, Instant}, + sync::Arc, + time::Instant, }; use ansi_term::{ANSIStrings, Color}; @@ -15,43 +12,96 @@ use anyhow::Context as _; use futures::StreamExt; use indexmap::IndexMap; use revive_dt_common::types::PrivateKeyAllocator; +use revive_dt_config::{ + Context, FailFastConfiguration, OutputFormat, OutputFormatConfiguration, Test, +}; use revive_dt_core::Platform; use revive_dt_format::corpus::Corpus; -use tokio::sync::{Mutex, Notify, RwLock, Semaphore}; -use tracing::{Instrument, error, info, info_span, instrument}; - -use revive_dt_config::{Context, OutputFormat, Test}; -use revive_dt_report::{Reporter, ReporterEvent, TestCaseStatus, TestSpecificReporter}; +use revive_dt_report::{Reporter, ReporterEvent, TestCaseStatus}; +use tokio::sync::Mutex; +use tracing::{error, info, info_span, instrument}; use crate::{ differential_tests::Driver, helpers::{ - CachedCompiler, NodePool, TestCaseIgnoreResolvedConfiguration, - create_test_definitions_stream, + CachedCompiler, CorpusDefinitionProcessor, NodePool, TestCaseIgnoreResolvedConfiguration, + TestDefinition, create_test_definitions_stream, process_corpus, }, }; -/// A guard that reports a test as ignored when dropped without a terminal status. -/// -/// When `--fail-fast` aborts in-flight tests via `select!`, the futures are dropped. This guard -/// ensures that each dropped test still sends an ignored event to the aggregator so the report -/// is complete. -struct FailFastGuard { - reporter: Option, +/// The number of test steps that were executed. +type StepsExecuted = usize; + +/// State for test definition processing. +#[derive(Clone)] +struct TestDefinitionProcessorState { + private_key_allocator: Arc>, } -impl FailFastGuard { - fn reported(&mut self) { - self.reporter = None; +/// The definition processor for tests. +struct TestDefinitionProcessor; + +impl CorpusDefinitionProcessor for TestDefinitionProcessor { + type Definition<'a> = TestDefinition<'a>; + type ProcessResult = StepsExecuted; + type State = TestDefinitionProcessorState; + + async fn process_definition<'a>( + definition: &'a Self::Definition<'a>, + cached_compiler: &'a CachedCompiler<'a>, + state: Self::State, + ) -> anyhow::Result { + Driver::new_root(definition, state.private_key_allocator, cached_compiler) + .await? + .execute_all() + .await } -} -impl Drop for FailFastGuard { - fn drop(&mut self) { - if let Some(ref reporter) = self.reporter { - let _ = reporter - .report_test_ignored_event("Aborted due to fail-fast".to_string(), IndexMap::new()); - } + fn on_success( + definition: &Self::Definition<'_>, + steps_executed: StepsExecuted, + ) -> anyhow::Result<()> { + definition + .reporter + .report_test_succeeded_event(steps_executed)?; + Ok(()) + } + + fn on_failure(definition: &Self::Definition<'_>, error: String) -> anyhow::Result<()> { + definition.reporter.report_test_failed_event(error)?; + Ok(()) + } + + fn on_ignored(definition: &Self::Definition<'_>, reason: String) -> anyhow::Result<()> { + definition + .reporter + .report_test_ignored_event(reason, IndexMap::new())?; + Ok(()) + } + + fn create_fail_fast_action( + definition: &Self::Definition<'_>, + fail_fast: &FailFastConfiguration, + ) -> Option> { + fail_fast.fail_fast.then(|| { + let reporter = definition.reporter.clone(); + Box::new(move || { + let _ = reporter.report_test_ignored_event( + "Aborted due to fail-fast".to_string(), + IndexMap::new(), + ); + }) as Box + }) + } + + fn create_span(task_id: usize, definition: &Self::Definition<'_>) -> tracing::Span { + info_span!( + "Executing Test Case", + test_id = task_id, + metadata_file_path = %definition.metadata_file_path.display(), + case_idx = %definition.case_idx, + mode = %definition.mode, + ) } } @@ -135,181 +185,26 @@ pub async fn handle_differential_tests(context: Test, reporter: Reporter) -> any context.compilation.invalidate_cache, ) .await - .map(Arc::new) .context("Failed to initialize cached compiler")?; - let private_key_allocator = Arc::new(Mutex::new(PrivateKeyAllocator::new( - context.wallet.highest_private_key_exclusive(), - ))); - - // Creating the driver and executing all of the steps. - let semaphore = context - .concurrency - .concurrency_limit() - .map(Semaphore::new) - .map(Arc::new); - let running_task_list = Arc::new(RwLock::new(BTreeSet::::new())); - let fail_fast_triggered = Arc::new(AtomicBool::new(false)); - let fail_fast_notify = Arc::new(Notify::new()); - let driver_task = futures::future::join_all(test_definitions.iter().enumerate().map( - |(test_id, test_definition)| { - let running_task_list = running_task_list.clone(); - let semaphore = semaphore.clone(); - let fail_fast_triggered = fail_fast_triggered.clone(); - let fail_fast_notify = fail_fast_notify.clone(); - let fail_fast = context.fail_fast.fail_fast; - - let private_key_allocator = private_key_allocator.clone(); - let cached_compiler = cached_compiler.clone(); - let mode = test_definition.mode.clone(); - let span = info_span!( - "Executing Test Case", - test_id, - metadata_file_path = %test_definition.metadata_file_path.display(), - case_idx = %test_definition.case_idx, - mode = %mode, - ); - async move { - let mut fail_fast_guard = FailFastGuard { - reporter: fail_fast.then(|| test_definition.reporter.clone()), - }; - - if fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { - test_definition - .reporter - .report_test_ignored_event( - "Skipped due to fail-fast: a prior test failed".to_string(), - IndexMap::new(), - ) - .expect("aggregator task is joined later so the receiver is alive"); - fail_fast_guard.reported(); - return; - } - let permit = match semaphore.as_ref() { - Some(semaphore) => match semaphore.acquire().await { - Ok(permit) => Some(permit), - Err(_) => { - test_definition - .reporter - .report_test_ignored_event( - "Skipped due to fail-fast: a prior test failed".to_string(), - IndexMap::new(), - ) - .expect("aggregator task is joined later so the receiver is alive"); - fail_fast_guard.reported(); - return; - } - }, - None => None, - }; + let state = TestDefinitionProcessorState { + private_key_allocator: Arc::new(Mutex::new(PrivateKeyAllocator::new( + context.wallet.highest_private_key_exclusive(), + ))), + }; - if fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { - test_definition - .reporter - .report_test_ignored_event( - "Skipped due to fail-fast: a prior test failed".to_string(), - IndexMap::new(), - ) - .expect("aggregator task is joined later so the receiver is alive"); - fail_fast_guard.reported(); - drop(permit); - return; - } + let cli_reporting_task = + tokio::spawn(start_cli_reporting_task(context.output_format, reporter)); - running_task_list.write().await.insert(test_id); - let driver = match Driver::new_root( - test_definition, - private_key_allocator, - &cached_compiler, - ) - .await - { - Ok(driver) => driver, - Err(error) => { - test_definition - .reporter - .report_test_failed_event(format!("{error:#}")) - .expect("Can't fail"); - fail_fast_guard.reported(); - if fail_fast { - fail_fast_triggered.store(true, Ordering::Relaxed); - if let Some(ref sem) = semaphore { - sem.close(); - } - fail_fast_notify.notify_one(); - } - error!("Test Case Failed"); - drop(permit); - running_task_list.write().await.remove(&test_id); - return; - } - }; - info!("Created the driver for the test case"); - - match driver.execute_all().await { - Ok(steps_executed) => test_definition - .reporter - .report_test_succeeded_event(steps_executed) - .expect("Can't fail"), - Err(error) => { - test_definition - .reporter - .report_test_failed_event(format!("{error:#}")) - .expect("Can't fail"); - if fail_fast { - fail_fast_triggered.store(true, Ordering::Relaxed); - if let Some(ref sem) = semaphore { - sem.close(); - } - fail_fast_notify.notify_one(); - } - error!("Test Case Failed"); - } - }; - fail_fast_guard.reported(); - info!("Finished the execution of the test case"); - drop(permit); - running_task_list.write().await.remove(&test_id); - } - .instrument(span) - }, - )); - let cli_reporting_task = tokio::spawn(start_cli_reporting_task( - context.output_format.output_format, - reporter, - )); - - tokio::task::spawn(async move { - loop { - let remaining_tasks = running_task_list.read().await; - info!( - count = remaining_tasks.len(), - ?remaining_tasks, - "Remaining Tests" - ); - drop(remaining_tasks); - tokio::time::sleep(Duration::from_secs(10)).await - } - }); - - if context.fail_fast.fail_fast { - tokio::pin!(driver_task); - tokio::select! { - biased; - _ = fail_fast_notify.notified() => { - info!("Fail-fast triggered, aborting remaining tests"); - } - _ = &mut driver_task => {} - } - } else { - driver_task.await; - } - - info!("Finished executing all test cases"); - reporter_clone - .report_completion_event() - .expect("Can't fail"); - drop(reporter_clone); + process_corpus::( + &test_definitions, + &cached_compiler, + state, + &context.concurrency, + &context.fail_fast, + reporter_clone, + ) + .await; cli_reporting_task .await @@ -319,7 +214,7 @@ pub async fn handle_differential_tests(context: Test, reporter: Reporter) -> any } #[allow(irrefutable_let_patterns, clippy::uninlined_format_args)] -async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporter) { +async fn start_cli_reporting_task(output_format: OutputFormatConfiguration, reporter: Reporter) { let mut aggregator_events_rx = reporter.subscribe().await.expect("Can't fail"); drop(reporter); @@ -340,7 +235,7 @@ async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporte continue; }; - match output_format { + match output_format.output_format { OutputFormat::Legacy => { let _ = writeln!(buf, "{} - {}", mode, metadata_file_path.display()); for (case_idx, case_status) in case_status.into_iter() { @@ -447,7 +342,7 @@ async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporte info!("Aggregator Broadcast Channel Closed"); // Summary at the end. - match output_format { + match output_format.output_format { OutputFormat::Legacy => { writeln!( buf, diff --git a/crates/core/src/helpers/cached_compiler.rs b/crates/core/src/helpers/cached_compiler.rs index 4773c5a..dc34e16 100644 --- a/crates/core/src/helpers/cached_compiler.rs +++ b/crates/core/src/helpers/cached_compiler.rs @@ -9,14 +9,16 @@ use std::{ }; use futures::FutureExt; -use revive_dt_common::{iterators::FilesWithExtensionIterator, types::CompilerIdentifier}; +use revive_dt_common::{ + iterators::FilesWithExtensionIterator, + types::{CompilerIdentifier, PlatformIdentifier}, +}; use revive_dt_compiler::{Compiler, CompilerOutput, Mode, SolidityCompiler}; -use revive_dt_core::Platform; use revive_dt_format::metadata::{ContractIdent, ContractInstance, Metadata}; use alloy::{hex::ToHexExt, json_abi::JsonAbi, primitives::Address}; use anyhow::{Context as _, Error, Result}; -use revive_dt_report::ExecutionSpecificReporter; +use revive_dt_report::CompilationReporter; use semver::Version; use serde::{Deserialize, Serialize}; use tokio::sync::{Mutex, RwLock, Semaphore}; @@ -55,7 +57,8 @@ impl<'a> CachedCompiler<'a> { fields( metadata_file_path = %metadata_file_path.display(), %mode, - platform = %platform.platform_identifier() + compiler = %compiler_identifier, + platform = ?platform_identifier, ), err )] @@ -66,11 +69,12 @@ impl<'a> CachedCompiler<'a> { mode: Cow<'a, Mode>, deployed_libraries: Option<&HashMap>, compiler: &dyn SolidityCompiler, - platform: &dyn Platform, - reporter: &ExecutionSpecificReporter, + compiler_identifier: CompilerIdentifier, + platform_identifier: Option, + reporter: &CompilationReporter<'_>, ) -> Result { let cache_key = CacheKey { - compiler_identifier: platform.compiler_identifier(), + compiler_identifier, compiler_version: compiler.version().clone(), metadata_file_path, compiler_mode: mode.clone(), @@ -141,26 +145,29 @@ impl<'a> CachedCompiler<'a> { match self.artifacts_cache.get(&cache_key).await { Some(cache_value) => { - if deployed_libraries.is_some() { - reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ) - .expect("Can't happen"); - } else { - reporter - .report_pre_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - true, - None, - cache_value.compiler_output.clone(), - ) - .expect("Can't happen"); + match reporter { + CompilationReporter::PreLink(reporter) => { + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ) + .expect("Can't happen"); + } + CompilationReporter::Execution(reporter) => { + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + true, + None, + cache_value.compiler_output.clone(), + ) + .expect("Can't happen"); + } } cache_value.compiler_output } @@ -196,7 +203,7 @@ async fn compile_contracts( mode: &Mode, deployed_libraries: Option<&HashMap>, compiler: &dyn SolidityCompiler, - reporter: &ExecutionSpecificReporter, + reporter: &CompilationReporter<'_>, ) -> Result { // Puts a limit on how many compilations we can perform at any given instance which helps us // with some of the errors we've been seeing with high concurrency on MacOS (we have not tried @@ -238,48 +245,83 @@ async fn compile_contracts( let output = compilation.try_build(compiler).await; match (output.as_ref(), deployed_libraries.is_some()) { - (Ok(output), true) => { - reporter - .report_post_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ) - .expect("Can't happen"); - } - (Ok(output), false) => { - reporter - .report_pre_link_contracts_compilation_succeeded_event( - compiler.version().clone(), - compiler.path(), - false, - input, - output.clone(), - ) - .expect("Can't happen"); - } - (Err(err), true) => { - reporter - .report_post_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ) - .expect("Can't happen"); - } - (Err(err), false) => { - reporter - .report_pre_link_contracts_compilation_failed_event( - compiler.version().clone(), - compiler.path().to_path_buf(), - input, - format!("{err:#}"), - ) - .expect("Can't happen"); - } + (Ok(output), true) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + CompilationReporter::PreLink(_) => { + unreachable!(); + } + }, + (Ok(output), false) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + CompilationReporter::PreLink(reporter) => { + reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler.version().clone(), + compiler.path(), + false, + input, + output.clone(), + ) + .expect("Can't happen"); + } + }, + (Err(err), true) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_post_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); + } + CompilationReporter::PreLink(_) => { + unreachable!(); + } + }, + (Err(err), false) => match reporter { + CompilationReporter::Execution(reporter) => { + reporter + .report_pre_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); + } + CompilationReporter::PreLink(reporter) => { + reporter + .report_pre_link_contracts_compilation_failed_event( + compiler.version().clone(), + compiler.path().to_path_buf(), + input, + format!("{err:#}"), + ) + .expect("Can't happen"); + } + }, } output @@ -298,11 +340,14 @@ impl ArtifactsCache { #[instrument(level = "debug", skip_all, err)] pub async fn with_invalidated_cache(self) -> Result { - cacache::clear(self.path.as_path()) - .await - .map_err(Into::::into) - .with_context(|| format!("Failed to clear cache at {}", self.path.display()))?; - Ok(self) + match cacache::clear(self.path.as_path()).await { + Ok(()) => Ok(self), + Err(cacache::Error::IoError(err, _)) if err.kind() == std::io::ErrorKind::NotFound => { + Ok(self) + } + Err(err) => Err(Into::::into(err)) + .with_context(|| format!("Failed to clear cache at {}", self.path.display())), + } } #[instrument(level = "debug", skip_all, err)] diff --git a/crates/core/src/helpers/compile.rs b/crates/core/src/helpers/compile.rs new file mode 100644 index 0000000..c938f67 --- /dev/null +++ b/crates/core/src/helpers/compile.rs @@ -0,0 +1,303 @@ +use std::sync::{Arc, LazyLock}; +use std::{borrow::Cow, path::Path}; + +use futures::{Stream, StreamExt, stream}; +use indexmap::{IndexMap, indexmap}; +use regex::Regex; +use revive_dt_common::{cached_fs::read_to_string, types::CompilerIdentifier}; +use revive_dt_compiler::{Mode, SolidityCompiler, revive_resolc::Resolc}; +use revive_dt_config::Context; +use revive_dt_format::{corpus::Corpus, metadata::MetadataFile}; +use revive_dt_report::{PreLinkCompilationSpecificReporter, PreLinkCompilationSpecifier, Reporter}; +use semver::VersionReq; +use serde_json::{self, json}; +use tracing::{debug, error, info}; + +/// This is a full description of a compilation to run alongside the full metadata file +/// and the specific mode to compile with. +pub struct CompilationDefinition<'a> { + pub metadata: &'a MetadataFile, + pub metadata_file_path: &'a Path, + pub mode: Cow<'a, Mode>, + pub compiler_identifier: CompilerIdentifier, + pub compiler: Box, + pub reporter: PreLinkCompilationSpecificReporter, +} + +impl<'a> CompilationDefinition<'a> { + /// Checks if this compilation can be run with the current configuration. + pub fn check_compatibility(&self) -> CompilationCheckFunctionResult { + self.check_compiler_compatibility()?; + self.check_pragma_solidity_compatibility()?; + Ok(()) + } + + /// Checks if the compiler supports the provided mode. + fn check_compiler_compatibility(&self) -> CompilationCheckFunctionResult { + let mut error_map = indexmap! {}; + let is_compatible = self.compiler.supports_mode(&self.mode); + error_map.insert(self.compiler_identifier.into(), json!(is_compatible)); + + if is_compatible { + Ok(()) + } else { + Err(("The compiler does not support this mode.", error_map)) + } + } + + /// Checks if the file-specified Solidity version is compatible with the configured version. + fn check_pragma_solidity_compatibility(&self) -> CompilationCheckFunctionResult { + let files_to_compile = self.metadata.files_to_compile().map_err(|e| { + ( + "Failed to enumerate files to compile.", + indexmap! { + "metadata_file_path" => json!(self.metadata_file_path.display().to_string()), + "error" => json!(e.to_string()), + }, + ) + })?; + let mut incompatible_files: Vec = Vec::new(); + + for source_path in files_to_compile { + let source = read_to_string(&source_path).map_err(|e| { + ( + "Failed to read source file.", + indexmap! { + "source_path" => json!(source_path.display().to_string()), + "error" => json!(e.to_string()), + }, + ) + })?; + + if let Some(version_requirement) = Self::parse_pragma_solidity_requirement(&source) { + if !version_requirement.matches(self.compiler.version()) { + incompatible_files.push(json!({ + "source_path": source_path.display().to_string(), + "pragma": version_requirement.to_string(), + })); + } + } + } + + if incompatible_files.is_empty() { + Ok(()) + } else { + Err(( + "Source pragma is incompatible with the Solidity compiler version.", + indexmap! { + "compiler_version" => json!(self.compiler.version().to_string()), + "incompatible_files" => json!(incompatible_files), + }, + )) + } + } + + /// Parses the Solidity version requirement from `source`. + /// Returns `None` if no pragma is found or if it cannot be parsed. + fn parse_pragma_solidity_requirement(source: &str) -> Option { + static PRAGMA_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"pragma\s+solidity\s+(?P[^;]+);").unwrap()); + + let caps = PRAGMA_REGEX.captures(source)?; + let solidity_version_format = caps.name("version")?.as_str().trim(); + let semver_format = Self::solidity_version_to_semver(solidity_version_format); + + VersionReq::parse(&semver_format).ok() + } + + /// Converts Solidity version constraints to semver-compatible format. + /// Example: + /// ```txt + /// Solidity: ">=0.8.0 <0.9.0" or "^0.8.0" or "0.8.33" + /// semver: ">=0.8.0, <0.9.0" or "^0.8.0" or "=0.8.33" + /// ``` + fn solidity_version_to_semver(version: &str) -> String { + version + .split_whitespace() + .map(|part| { + let is_exact_version = part.starts_with(|c: char| c.is_ascii_digit()); + if is_exact_version { + format!("={}", part) + } else { + part.to_string() + } + }) + .collect::>() + .join(", ") + } +} + +type CompilationCheckFunctionResult = + Result<(), (&'static str, IndexMap<&'static str, serde_json::Value>)>; + +/// Creates a stream of [`CompilationDefinition`]s for the contracts to be compiled. +pub async fn create_compilation_definitions_stream<'a>( + context: &Context, + corpus: &'a Corpus, + mode: Mode, + reporter: Reporter, +) -> impl Stream> { + let cloned_reporter = reporter.clone(); + stream::iter( + corpus + .compilation_metadata_files_iterator() + .inspect(move |metadata_file| { + cloned_reporter + .report_metadata_file_discovery_event( + metadata_file.metadata_file_path.clone(), + metadata_file.content.clone(), + ) + .unwrap(); + }) + .map(move |metadata_file| { + let reporter = reporter.clone(); + + ( + metadata_file, + Cow::<'_, Mode>::Owned(mode.clone()), + reporter.pre_link_compilation_specific_reporter(Arc::new( + PreLinkCompilationSpecifier { + compiler_mode: mode.clone(), + metadata_file_path: metadata_file.metadata_file_path.clone(), + }, + )), + ) + }) + .inspect(|(_, _, reporter)| { + reporter + .report_pre_link_compilation_discovery_event() + .expect("Can't fail"); + }), + ) + // Creating the `CompilationDefinition` objects from all of the various objects we have. + .filter_map(move |(metadata_file, mode, reporter)| async move { + // NOTE: Currently always specifying the resolc compiler. + let compiler = Resolc::new(context.clone(), mode.solc_version.clone().map(Into::into)) + .await + .map(|compiler| Box::new(compiler) as Box) + .inspect_err(|err| error!(?err, "Failed to instantiate the compiler")) + .ok()?; + + Some(CompilationDefinition { + metadata: metadata_file, + metadata_file_path: metadata_file.metadata_file_path.as_path(), + mode: mode.clone(), + // NOTE: Currently always specifying the resolc compiler. + compiler_identifier: CompilerIdentifier::Resolc, + compiler, + reporter, + }) + }) + // Filter out the compilations which are incompatible. + .filter_map(move |compilation| async move { + match compilation.check_compatibility() { + Ok(()) => Some(compilation), + Err((reason, additional_information)) => { + debug!( + metadata_file_path = %compilation.metadata.metadata_file_path.display(), + mode = %compilation.mode, + reason, + additional_information = + serde_json::to_string(&additional_information).unwrap(), + "Ignoring Compilation" + ); + compilation + .reporter + .report_pre_link_contracts_compilation_ignored_event( + reason.to_string(), + additional_information + .into_iter() + .map(|(k, v)| (k.into(), v)) + .collect::>(), + ) + .expect("Can't fail"); + None + } + } + }) + .inspect(|compilation| { + info!( + metadata_file_path = %compilation.metadata_file_path.display(), + mode = %compilation.mode, + "Created a compilation definition" + ); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use semver::Version; + + #[test] + fn test_parse_pragma_compound_constraint() { + let source = r#" + // SPDX-License-Identifier: MIT + pragma solidity >=0.8.0 <0.9.0; + + contract Test {} + "#; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse(">=0.8.0, <0.9.0").unwrap()); + assert!(req.matches(&Version::new(0, 8, 0))); + assert!(req.matches(&Version::new(0, 8, 99))); + assert!(!req.matches(&Version::new(0, 7, 99))); + assert!(!req.matches(&Version::new(0, 9, 0))); + } + + #[test] + fn test_parse_pragma_exact_version() { + let source = r#" + // SPDX-License-Identifier: MIT + pragma solidity 0.8.19; + + contract Test {} + "#; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("=0.8.19").unwrap()); + assert!(req.matches(&Version::new(0, 8, 19))); + assert!(!req.matches(&Version::new(0, 8, 20))); + } + + #[test] + fn test_parse_pragma_caret_version() { + let source = "pragma solidity ^0.8.0;"; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("^0.8.0").unwrap()); + assert!(req.matches(&Version::new(0, 8, 0))); + assert!(req.matches(&Version::new(0, 8, 33))); + assert!(!req.matches(&Version::new(0, 9, 0))); + assert!(!req.matches(&Version::new(0, 7, 0))); + } + + #[test] + fn test_parse_pragma_tilde_version() { + let source = "pragma solidity ~0.8.19;"; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("~0.8.19").unwrap()); + assert!(req.matches(&Version::new(0, 8, 19))); + assert!(req.matches(&Version::new(0, 8, 33))); + assert!(!req.matches(&Version::new(0, 8, 18))); + assert!(!req.matches(&Version::new(0, 9, 0))); + } + + #[test] + fn test_parse_pragma_upper_bound_version() { + let source = "pragma solidity <=0.4.21;"; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source).unwrap(); + assert_eq!(req, VersionReq::parse("<=0.4.21").unwrap()); + assert!(req.matches(&Version::new(0, 4, 21))); + assert!(req.matches(&Version::new(0, 4, 20))); + assert!(!req.matches(&Version::new(0, 8, 33))); + } + + #[test] + fn test_parse_pragma_missing() { + let source = r#" + // SPDX-License-Identifier: MIT + contract Test {} + "#; + let req = CompilationDefinition::parse_pragma_solidity_requirement(source); + assert!(req.is_none()); + } +} diff --git a/crates/core/src/helpers/corpus_processor.rs b/crates/core/src/helpers/corpus_processor.rs new file mode 100644 index 0000000..5c60f12 --- /dev/null +++ b/crates/core/src/helpers/corpus_processor.rs @@ -0,0 +1,228 @@ +//! Shared corpus processing infrastructure. + +use std::{ + collections::BTreeSet, + future::Future, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + time::Duration, +}; + +use anyhow::Result; +use revive_dt_config::{ConcurrencyConfiguration, FailFastConfiguration}; +use revive_dt_report::Reporter; +use tokio::sync::{Notify, RwLock, Semaphore}; +use tracing::{Instrument, error, info}; + +use crate::helpers::CachedCompiler; + +/// A guard that invokes `action` when dropped without a terminal status, unless explicitly +/// disarmed via `reported()`. +/// +/// When `--fail-fast` aborts in-flight tasks via `select!`, the futures are dropped. This guard +/// ensures that each dropped task can still be reported (e.g. report an ignored event) to the +/// aggregator so that the report is complete. +struct FailFastGuard { + action: Option>, +} + +impl FailFastGuard { + fn reported(&mut self) { + self.action = None; + } +} + +impl Drop for FailFastGuard { + fn drop(&mut self) { + if let Some(action) = self.action.take() { + action(); + } + } +} + +/// Describes how to process a definition within a corpus. +pub trait CorpusDefinitionProcessor: Sized + 'static { + /// The definition type produced by the stream. + type Definition<'a>: 'a; + + /// The result type from processing a definition. + type ProcessResult; + + /// Additional context-specific state needed for processing. + type State: Clone; + + /// Processes a single definition. + fn process_definition<'a>( + definition: &'a Self::Definition<'a>, + cached_compiler: &'a CachedCompiler<'a>, + state: Self::State, + ) -> impl Future>; + + /// Called when a definition is processed successfully. + fn on_success(_definition: &Self::Definition<'_>, _result: Self::ProcessResult) -> Result<()> { + Ok(()) + } + + /// Called when a definition fails being processed. + fn on_failure(_definition: &Self::Definition<'_>, _error: String) -> Result<()> { + Ok(()) + } + + /// Called when a definition is ignored/aborted. + fn on_ignored(_definition: &Self::Definition<'_>, _reason: String) -> Result<()> { + Ok(()) + } + + /// Creates the action to run if this task is aborted due to fail-fast. + /// Returns `None` if fail-fast is disabled. + fn create_fail_fast_action( + definition: &Self::Definition<'_>, + fail_fast: &FailFastConfiguration, + ) -> Option>; + + /// Creates the tracing span for processing this definition. + fn create_span(task_id: usize, definition: &Self::Definition<'_>) -> tracing::Span; +} + +/// Processes a corpus of definitions using the provided processor. +pub async fn process_corpus<'a, Processor: CorpusDefinitionProcessor>( + definitions: &'a [Processor::Definition<'a>], + cached_compiler: &'a CachedCompiler<'a>, + state: Processor::State, + concurrency: &ConcurrencyConfiguration, + fail_fast: &FailFastConfiguration, + reporter: Reporter, +) { + let semaphore = concurrency + .concurrency_limit() + .map(Semaphore::new) + .map(Arc::new); + let running_task_list = Arc::new(RwLock::new(BTreeSet::::new())); + + let fail_fast_triggered = Arc::new(AtomicBool::new(false)); + let fail_fast_notify = Arc::new(Notify::new()); + + // Process all definitions concurrently. + let driver_task = + futures::future::join_all(definitions.iter().enumerate().map(|(task_id, definition)| { + let running_task_list = running_task_list.clone(); + let semaphore = semaphore.clone(); + let fail_fast_triggered = fail_fast_triggered.clone(); + let fail_fast_notify = fail_fast_notify.clone(); + let state = state.clone(); + let span = Processor::create_span(task_id, definition); + + async move { + let mut fail_fast_guard = FailFastGuard { + action: Processor::create_fail_fast_action(definition, fail_fast), + }; + + if fail_fast.fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { + Processor::on_ignored( + definition, + "Skipped due to fail-fast: a prior task failed".to_string(), + ) + .expect("aggregator task is joined later so the receiver is alive"); + fail_fast_guard.reported(); + return; + } + + let permit = match semaphore.as_ref() { + Some(semaphore) => match semaphore.acquire().await { + Ok(permit) => Some(permit), + Err(_) => { + Processor::on_ignored( + definition, + "Skipped due to fail-fast: a prior task failed".to_string(), + ) + .expect("aggregator task is joined later so the receiver is alive"); + fail_fast_guard.reported(); + return; + } + }, + None => None, + }; + + // Double-check fail-fast after acquiring permit. + if fail_fast.fail_fast && fail_fast_triggered.load(Ordering::Relaxed) { + Processor::on_ignored( + definition, + "Skipped due to fail-fast: a prior task failed".to_string(), + ) + .expect("aggregator task is joined later so the receiver is alive"); + fail_fast_guard.reported(); + drop(permit); + return; + } + + running_task_list.write().await.insert(task_id); + + let result = + Processor::process_definition(definition, cached_compiler, state).await; + match result { + Ok(process_result) => { + Processor::on_success(definition, process_result) + .expect("aggregator task is joined later so the receiver is alive"); + } + Err(error) => { + Processor::on_failure(definition, format!("{error:#}")) + .expect("aggregator task is joined later so the receiver is alive"); + + if fail_fast.fail_fast { + fail_fast_triggered.store(true, Ordering::Relaxed); + if let Some(ref sem) = semaphore { + sem.close(); + } + fail_fast_notify.notify_one(); + } + error!("Task Failed"); + } + } + + fail_fast_guard.reported(); + + info!("Finished processing the corpus definition"); + drop(permit); + running_task_list.write().await.remove(&task_id); + } + .instrument(span) + })); + + // Spawn monitoring task that logs remaining tasks periodically. + tokio::task::spawn({ + let running_task_list = running_task_list.clone(); + async move { + loop { + let remaining_tasks = running_task_list.read().await; + info!( + count = remaining_tasks.len(), + ?remaining_tasks, + "Remaining Tasks" + ); + drop(remaining_tasks); + tokio::time::sleep(Duration::from_secs(10)).await; + } + } + }); + + // Wait for completion, with optional fail-fast abort. + if fail_fast.fail_fast { + tokio::pin!(driver_task); + tokio::select! { + biased; + _ = fail_fast_notify.notified() => { + info!("Fail-fast triggered, aborting remaining tasks"); + } + _ = &mut driver_task => {} + } + } else { + driver_task.await; + } + + info!("Finished processing all corpus definitions"); + reporter + .report_completion_event() + .expect("aggregator task is joined later so the receiver is alive"); +} diff --git a/crates/core/src/helpers/mod.rs b/crates/core/src/helpers/mod.rs index d2948af..25dbb5f 100644 --- a/crates/core/src/helpers/mod.rs +++ b/crates/core/src/helpers/mod.rs @@ -1,7 +1,11 @@ mod cached_compiler; +mod compile; +mod corpus_processor; mod pool; mod test; pub use cached_compiler::*; +pub use compile::*; +pub use corpus_processor::*; pub use pool::*; pub use test::*; diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index 407c022..f41414b 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -1,10 +1,11 @@ +mod compilations; mod differential_benchmarks; mod differential_tests; mod helpers; use anyhow::{Context as _, bail}; use clap::Parser; -use revive_dt_report::{ReportAggregator, TestCaseStatus}; +use revive_dt_report::{CompilationStatus, ReportAggregator, TestCaseStatus}; use schemars::schema_for; use tracing::{info, level_filters::LevelFilter}; use tracing_subscriber::{EnvFilter, FmtSubscriber}; @@ -14,7 +15,7 @@ use revive_dt_core::Platform; use revive_dt_format::metadata::Metadata; use crate::{ - differential_benchmarks::handle_differential_benchmarks, + compilations::handle_compilations, differential_benchmarks::handle_differential_benchmarks, differential_tests::handle_differential_tests, }; @@ -123,5 +124,31 @@ fn main() -> anyhow::Result<()> { Ok(()) } + Context::Compile(context) => tokio::runtime::Builder::new_multi_thread() + .worker_threads(context.concurrency.number_of_threads) + .enable_all() + .build() + .expect("Failed building the Runtime") + .block_on(async move { + let compilations_handling_task = handle_compilations(*context, reporter); + + let (_, report) = + futures::future::try_join(compilations_handling_task, report_aggregator_task) + .await?; + + let contains_failure = report + .execution_information + .values() + .flat_map(|metadata_file_report| { + metadata_file_report.compilation_reports.values() + }) + .any(|report| matches!(report.status, Some(CompilationStatus::Failure { .. }))); + + if contains_failure { + bail!("Some compilations failed") + } + + Ok(()) + }), } } diff --git a/crates/format/src/corpus.rs b/crates/format/src/corpus.rs index 83cc847..178a9fd 100644 --- a/crates/format/src/corpus.rs +++ b/crates/format/src/corpus.rs @@ -7,7 +7,7 @@ use std::{ use itertools::Itertools; use revive_dt_common::{ iterators::{EitherIter, FilesWithExtensionIterator}, - types::{Mode, ParsedMode, ParsedTestSpecifier}, + types::{Mode, ParsedCompilationSpecifier, ParsedMode, ParsedTestSpecifier}, }; use tracing::{debug, warn}; @@ -19,6 +19,7 @@ use crate::{ #[derive(Default)] pub struct Corpus { test_specifiers: HashMap>, + compilation_specifiers: HashMap>, metadata_files: HashMap, } @@ -59,6 +60,32 @@ impl Corpus { Ok(self) } + pub fn with_compilation_specifier( + mut self, + compilation_specifier: ParsedCompilationSpecifier, + ) -> anyhow::Result { + match &compilation_specifier { + ParsedCompilationSpecifier::FileOrDirectory { + metadata_or_directory_file_path: metadata_file_path, + } => { + let metadata_files = enumerate_metadata_files(metadata_file_path); + self.compilation_specifiers.insert( + compilation_specifier, + metadata_files + .iter() + .map(|metadata_file| metadata_file.metadata_file_path.clone()) + .collect(), + ); + for metadata_file in metadata_files.into_iter() { + self.metadata_files + .insert(metadata_file.metadata_file_path.clone(), metadata_file); + } + } + }; + + Ok(self) + } + pub fn cases_iterator( &self, ) -> impl Iterator)> + '_ { @@ -153,6 +180,17 @@ impl Corpus { iterator.unique_by(|item| (&item.0.metadata_file_path, item.1, item.3.clone())) } + /// Iterator over the metadata files for the compilation specifiers. + pub fn compilation_metadata_files_iterator( + &self, + ) -> impl Iterator + '_ { + self.compilation_specifiers + .values() + .flatten() + .map(|path| self.metadata_files.get(path).expect("Must succeed")) + .unique_by(|metadata_file| &metadata_file.metadata_file_path) + } + pub fn metadata_file_count(&self) -> usize { self.metadata_files.len() } diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs index 3a8db70..8bf5c2c 100644 --- a/crates/report/src/aggregator.rs +++ b/crates/report/src/aggregator.rs @@ -37,6 +37,7 @@ pub struct ReportAggregator { /* Internal Report State */ report: Report, remaining_cases: HashMap>>, + remaining_compilation_modes: HashMap>, /* Channels */ runner_tx: Option>, runner_rx: UnboundedReceiver, @@ -54,9 +55,11 @@ impl ReportAggregator { Context::Test(ref context) => context.report.file_name.clone(), Context::Benchmark(ref context) => context.report.file_name.clone(), Context::ExportJsonSchema(_) | Context::ExportGenesis(..) => None, + Context::Compile(ref context) => context.report.file_name.clone(), }, report: Report::new(context), remaining_cases: Default::default(), + remaining_compilation_modes: Default::default(), runner_tx: Some(runner_tx), runner_rx, listener_tx, @@ -87,6 +90,9 @@ impl ReportAggregator { RunnerEvent::TestCaseDiscovery(event) => { self.handle_test_case_discovery(*event); } + RunnerEvent::PreLinkCompilationDiscovery(event) => { + self.handle_pre_link_compilation_discovery(*event); + } RunnerEvent::TestSucceeded(event) => { self.handle_test_succeeded_event(*event); } @@ -111,6 +117,9 @@ impl ReportAggregator { RunnerEvent::PostLinkContractsCompilationFailed(event) => { self.handle_post_link_contracts_compilation_failed_event(*event) } + RunnerEvent::PreLinkContractsCompilationIgnored(event) => { + self.handle_pre_link_contracts_compilation_ignored_event(*event); + } RunnerEvent::LibrariesDeployed(event) => { self.handle_libraries_deployed_event(*event); } @@ -186,14 +195,22 @@ impl ReportAggregator { .insert(event.test_specifier.case_idx); } + fn handle_pre_link_compilation_discovery(&mut self, event: PreLinkCompilationDiscoveryEvent) { + self.remaining_compilation_modes + .entry( + event + .compilation_specifier + .metadata_file_path + .clone() + .into(), + ) + .or_default() + .insert(event.compilation_specifier.compiler_mode.clone()); + } + fn handle_test_succeeded_event(&mut self, event: TestSucceededEvent) { // Remove this from the set of cases we're tracking since it has completed. - self.remaining_cases - .entry(event.test_specifier.metadata_file_path.clone().into()) - .or_default() - .entry(event.test_specifier.compiler_mode.clone()) - .or_default() - .remove(&event.test_specifier.case_idx); + self.remove_remaining_case(&event.test_specifier); // Add information on the fact that the case was ignored to the report. let test_case_report = self.test_case_report(&event.test_specifier); @@ -205,12 +222,7 @@ impl ReportAggregator { fn handle_test_failed_event(&mut self, event: TestFailedEvent) { // Remove this from the set of cases we're tracking since it has completed. - self.remaining_cases - .entry(event.test_specifier.metadata_file_path.clone().into()) - .or_default() - .entry(event.test_specifier.compiler_mode.clone()) - .or_default() - .remove(&event.test_specifier.case_idx); + self.remove_remaining_case(&event.test_specifier); // Add information on the fact that the case was ignored to the report. let test_case_report = self.test_case_report(&event.test_specifier); @@ -222,12 +234,7 @@ impl ReportAggregator { fn handle_test_ignored_event(&mut self, event: TestIgnoredEvent) { // Remove this from the set of cases we're tracking since it has completed. - self.remaining_cases - .entry(event.test_specifier.metadata_file_path.clone().into()) - .or_default() - .entry(event.test_specifier.compiler_mode.clone()) - .or_default() - .remove(&event.test_specifier.case_idx); + self.remove_remaining_case(&event.test_specifier); // Add information on the fact that the case was ignored to the report. let test_case_report = self.test_case_report(&event.test_specifier); @@ -295,98 +302,153 @@ impl ReportAggregator { &mut self, event: PreLinkContractsCompilationSucceededEvent, ) { - let include_input = self - .report - .context - .as_report_configuration() - .include_compiler_input; - let include_output = self - .report - .context - .as_report_configuration() - .include_compiler_output; - - let execution_information = self.execution_information(&event.execution_specifier); - - let compiler_input = if include_input { + let report_configuration = self.report.context.as_report_configuration(); + let compiler_input = if report_configuration.include_compiler_input { event.compiler_input } else { None }; - execution_information.pre_link_compilation_status = Some(CompilationStatus::Success { + let status = CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input, compiled_contracts_info: Self::generate_compiled_contracts_info( event.compiler_output, - include_output, + report_configuration.include_compiler_output, ), - }); + }; + + match &event.specifier { + CompilationSpecifier::Execution(specifier) => { + let execution_information = self.execution_information(specifier); + execution_information.pre_link_compilation_status = Some(status); + } + CompilationSpecifier::PreLink(specifier) => { + let report = self.pre_link_compilation_report(specifier); + report.status = Some(status); + self.handle_post_pre_link_contracts_compilation_status_update(specifier); + } + } } fn handle_post_link_contracts_compilation_succeeded_event( &mut self, event: PostLinkContractsCompilationSucceededEvent, ) { - let include_input = self - .report - .context - .as_report_configuration() - .include_compiler_input; - let include_output = self - .report - .context - .as_report_configuration() - .include_compiler_output; - - let execution_information = self.execution_information(&event.execution_specifier); - - let compiler_input = if include_input { + let report_configuration = self.report.context.as_report_configuration(); + let compiler_input = if report_configuration.include_compiler_input { event.compiler_input } else { None }; - execution_information.post_link_compilation_status = Some(CompilationStatus::Success { + let status = CompilationStatus::Success { is_cached: event.is_cached, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input, compiled_contracts_info: Self::generate_compiled_contracts_info( event.compiler_output, - include_output, + report_configuration.include_compiler_output, ), - }); + }; + + let execution_information = self.execution_information(&event.execution_specifier); + execution_information.post_link_compilation_status = Some(status); } fn handle_pre_link_contracts_compilation_failed_event( &mut self, event: PreLinkContractsCompilationFailedEvent, ) { - let execution_information = self.execution_information(&event.execution_specifier); - - execution_information.pre_link_compilation_status = Some(CompilationStatus::Failure { + let status = CompilationStatus::Failure { reason: event.reason, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input: event.compiler_input, - }); + }; + + match &event.specifier { + CompilationSpecifier::Execution(specifier) => { + let execution_information = self.execution_information(specifier); + execution_information.pre_link_compilation_status = Some(status); + } + CompilationSpecifier::PreLink(specifier) => { + let report = self.pre_link_compilation_report(specifier); + report.status = Some(status); + self.handle_post_pre_link_contracts_compilation_status_update(specifier); + } + } } fn handle_post_link_contracts_compilation_failed_event( &mut self, event: PostLinkContractsCompilationFailedEvent, ) { - let execution_information = self.execution_information(&event.execution_specifier); - - execution_information.post_link_compilation_status = Some(CompilationStatus::Failure { + let status = CompilationStatus::Failure { reason: event.reason, compiler_version: event.compiler_version, compiler_path: event.compiler_path, compiler_input: event.compiler_input, - }); + }; + + let execution_information = self.execution_information(&event.execution_specifier); + execution_information.post_link_compilation_status = Some(status); + } + + fn handle_pre_link_contracts_compilation_ignored_event( + &mut self, + event: PreLinkContractsCompilationIgnoredEvent, + ) { + let status = CompilationStatus::Ignored { + reason: event.reason, + additional_fields: event.additional_fields, + }; + + let report = self.pre_link_compilation_report(&event.compilation_specifier); + report.status = Some(status.clone()); + self.handle_post_pre_link_contracts_compilation_status_update(&event.compilation_specifier); + } + + fn handle_post_pre_link_contracts_compilation_status_update( + &mut self, + specifier: &PreLinkCompilationSpecifier, + ) { + // Remove this from the set we're tracking since it has completed. + self.remove_remaining_compilation_mode(specifier); + + let remaining_modes = self + .remaining_compilation_modes + .entry(specifier.metadata_file_path.clone().into()) + .or_default(); + if !remaining_modes.is_empty() { + return; + } + + let status_per_mode = self + .report + .execution_information + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .compilation_reports + .iter() + .flat_map(|(mode, report)| { + let status = report.status.clone().expect("Can't be uninitialized"); + Some((mode.clone(), status)) + }) + .collect::>(); + + let event = ReporterEvent::MetadataFileModeCombinationCompilationCompleted { + metadata_file_path: specifier.metadata_file_path.clone().into(), + compilation_status: status_per_mode, + }; + + // According to the documentation on send, the sending fails if there are no more receiver + // handles. Therefore, this isn't an error that we want to bubble up or anything. If we fail + // to send then we ignore the error. + let _ = self.listener_tx.send(event); } fn handle_libraries_deployed_event(&mut self, event: LibrariesDeployedEvent) { @@ -563,6 +625,19 @@ impl ReportAggregator { .get_or_insert_default() } + fn pre_link_compilation_report( + &mut self, + specifier: &PreLinkCompilationSpecifier, + ) -> &mut PreLinkCompilationReport { + self.report + .execution_information + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .compilation_reports + .entry(specifier.compiler_mode.clone()) + .or_default() + } + /// Generates the compiled contract information for each contract at each path. fn generate_compiled_contracts_info( compiler_output: CompilerOutput, @@ -613,9 +688,26 @@ impl ReportAggregator { Err(_) => (false, B256::from_slice(&Sha256::digest(input.as_bytes()))), } } + + /// Removes the case specified by the `specifier` from the tracked remaining cases. + fn remove_remaining_case(&mut self, specifier: &TestSpecifier) { + self.remaining_cases + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .entry(specifier.compiler_mode.clone()) + .or_default() + .remove(&specifier.case_idx); + } + + /// Removes the compilation mode specified by the `specifier` from the tracked remaining compilation modes. + fn remove_remaining_compilation_mode(&mut self, specifier: &PreLinkCompilationSpecifier) { + self.remaining_compilation_modes + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .remove(&specifier.compiler_mode); + } } -#[serde_as] #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Report { /// The context that the tool was started up with. @@ -625,7 +717,8 @@ pub struct Report { /// Metrics from the execution. #[serde(default, skip_serializing_if = "Option::is_none")] pub metrics: Option, - /// Information relating to each test case. + /// Information relating to each metadata file after executing the tool. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub execution_information: BTreeMap, } @@ -640,13 +733,19 @@ impl Report { } } +#[serde_as] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct MetadataFileReport { /// Metrics from the execution. #[serde(default, skip_serializing_if = "Option::is_none")] pub metrics: Option, /// The report of each case keyed by the case idx. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub case_reports: BTreeMap, + /// The [`CompilationReport`] for each of the [`Mode`]s. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + #[serde_as(as = "BTreeMap")] + pub compilation_reports: BTreeMap, } #[serde_as] @@ -741,6 +840,14 @@ pub struct ExecutionInformation { pub deployed_contracts: Option>, } +/// The pre-link-only compilation report. +#[derive(Clone, Debug, Serialize, Deserialize, Default)] +pub struct PreLinkCompilationReport { + /// The compilation status. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub status: Option, +} + /// Information related to compilation #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(tag = "status")] @@ -777,6 +884,14 @@ pub enum CompilationStatus { #[serde(default, skip_serializing_if = "Option::is_none")] compiler_input: Option, }, + /// The compilation was ignored. + Ignored { + /// The reason behind the compilation being ignored. + reason: String, + /// Additional fields that describe more information on why the compilation is ignored. + #[serde(flatten)] + additional_fields: IndexMap, + }, } /// Information about the compiled contract. diff --git a/crates/report/src/common.rs b/crates/report/src/common.rs index 0180b25..00e886a 100644 --- a/crates/report/src/common.rs +++ b/crates/report/src/common.rs @@ -35,3 +35,19 @@ pub struct StepExecutionSpecifier { pub execution_specifier: Arc, pub step_idx: StepPath, } + +/// An absolute specifier for pre-link-only compilation. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct PreLinkCompilationSpecifier { + pub compiler_mode: Mode, + pub metadata_file_path: PathBuf, +} + +/// An absolute specifier for compilation events depending on the context. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum CompilationSpecifier { + /// Compilation happening as part of test execution. + Execution(Arc), + /// Pre-link-only compilation happening without test execution. + PreLink(Arc), +} diff --git a/crates/report/src/reporter_event.rs b/crates/report/src/reporter_event.rs index 0211e64..59aa9c7 100644 --- a/crates/report/src/reporter_event.rs +++ b/crates/report/src/reporter_event.rs @@ -5,7 +5,7 @@ use std::collections::BTreeMap; use revive_dt_compiler::Mode; use revive_dt_format::case::CaseIdx; -use crate::{MetadataFilePath, TestCaseStatus}; +use crate::{CompilationStatus, MetadataFilePath, TestCaseStatus}; #[derive(Clone, Debug)] pub enum ReporterEvent { @@ -19,4 +19,11 @@ pub enum ReporterEvent { /// The status of each one of the cases. case_status: BTreeMap, }, + + /// An event sent by the reporter once an entire metadata file and mode combination has + /// finished pre-link-only compilation. + MetadataFileModeCombinationCompilationCompleted { + metadata_file_path: MetadataFilePath, + compilation_status: BTreeMap, + }, } diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs index fe4155a..dd49739 100644 --- a/crates/report/src/runner_event.rs +++ b/crates/report/src/runner_event.rs @@ -16,13 +16,30 @@ use tokio::sync::{broadcast, oneshot}; use crate::MinedBlockInformation; use crate::TransactionInformation; -use crate::{ExecutionSpecifier, ReporterEvent, TestSpecifier, common::MetadataFilePath}; +use crate::{ + CompilationSpecifier, ExecutionSpecifier, PreLinkCompilationSpecifier, ReporterEvent, + TestSpecifier, common::MetadataFilePath, +}; + +/// Conditionally wraps a value, or returns it as is. +macro_rules! __maybe_wrap { + ($value:expr, $wrapper:path) => { + $wrapper($value) + }; + ($value:expr) => { + $value + }; +} -macro_rules! __report_gen_emit_test_specific { +/// Generates a report method that emits an event, auto-filling the specifier from self. +/// Optionally wraps the specifier in if a wrapper path is provided. +macro_rules! __report_gen_emit_with_specifier { ( $ident:ident, $variant_ident:ident, - $skip_field:ident; + $specifier_field_on_self:ident, + $specifier_field_on_event:ident + $(, $specifier_wrapper:path)?; $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* @@ -34,7 +51,10 @@ macro_rules! __report_gen_emit_test_specific { $(, $aname: impl Into<$aty> )* ) -> anyhow::Result<()> { self.report([< $variant_ident Event >] { - $skip_field: self.test_specifier.clone() + $specifier_field_on_event: __maybe_wrap!( + self.$specifier_field_on_self.clone() + $(, $specifier_wrapper)? + ) $(, $bname: $bname.into() )* $(, $aname: $aname.into() )* }) @@ -43,259 +63,189 @@ macro_rules! __report_gen_emit_test_specific { }; } -macro_rules! __report_gen_emit_test_specific_by_parse { +/// Scans event fields looking for a matching specifier field name. +/// +/// Each MATCH arm maps a specifier field on `self` (the reporter) to a specifier field +/// on the event enum variant. This allows for the event's field to have a different name +/// than the reporter's specifier field if needed (e.g., `specifier` instead of `test_specifier`). +/// +/// To support a new specifier field, just add a corresponding MATCH arm. +macro_rules! __report_gen_scan_for_specifier { + // MATCH: test_specifier (on self) -> test_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_test_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before { - ( - $ident:ident, $variant_ident:ident; + test_specifier; $( $before:ident : $bty:ty, )* ; test_specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_test_specific_by_parse!( - $ident, $variant_ident, test_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + test_specifier, + test_specifier; $( $before : $bty, )* ; $( $after : $aty, )* ); }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* - ; - ) => { - __report_gen_scan_before!( - $ident, $variant_ident; - $( $before : $bty, )* $name : $ty, - ; - $( $after : $aty, )* - ; - ); - }; - ( - $ident:ident, $variant_ident:ident; - $( $before:ident : $bty:ty, )* - ; - ; - ) => {}; -} - -macro_rules! __report_gen_for_variant { - ( - $ident:ident, - $variant_ident:ident; - ) => {}; - ( - $ident:ident, - $variant_ident:ident; - $( $field_ident:ident : $field_ty:ty ),+ $(,)? - ) => { - __report_gen_scan_before!( - $ident, $variant_ident; - ; - $( $field_ident : $field_ty, )* - ; - ); - }; -} -macro_rules! __report_gen_emit_execution_specific { + // MATCH: execution_specifier (on self) -> execution_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* - ; - $( $aname:ident : $aty:ty, )* - ) => { - paste::paste! { - pub fn [< report_ $variant_ident:snake _event >]( - &self - $(, $bname: impl Into<$bty> )* - $(, $aname: impl Into<$aty> )* - ) -> anyhow::Result<()> { - self.report([< $variant_ident Event >] { - $skip_field: self.execution_specifier.clone() - $(, $bname: $bname.into() )* - $(, $aname: $aname.into() )* - }) - } - } - }; -} - -macro_rules! __report_gen_emit_execution_specific_by_parse { - ( - $ident:ident, - $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_execution_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before_exec { - ( - $ident:ident, $variant_ident:ident; + execution_specifier; $( $before:ident : $bty:ty, )* ; execution_specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_execution_specific_by_parse!( - $ident, $variant_ident, execution_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + execution_specifier, + execution_specifier; $( $before : $bty, )* ; $( $after : $aty, )* ); }; + + // MATCH: execution_specifier (on self) -> specifier (on event). ( - $ident:ident, $variant_ident:ident; + $ident:ident, + $variant_ident:ident, + execution_specifier; $( $before:ident : $bty:ty, )* ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_scan_before_exec!( - $ident, $variant_ident; - $( $before : $bty, )* $name : $ty, - ; - $( $after : $aty, )* - ; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + execution_specifier, + specifier, + $crate::CompilationSpecifier::Execution; + $( $before : $bty, )* ; $( $after : $aty, )* ); }; + + // MATCH: step_specifier (on self) -> step_specifier (on event). ( - $ident:ident, $variant_ident:ident; + $ident:ident, + $variant_ident:ident, + step_specifier; $( $before:ident : $bty:ty, )* ; + step_specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* ; - ) => {}; -} - -macro_rules! __report_gen_for_variant_exec { - ( - $ident:ident, - $variant_ident:ident; - ) => {}; - ( - $ident:ident, - $variant_ident:ident; - $( $field_ident:ident : $field_ty:ty ),+ $(,)? ) => { - __report_gen_scan_before_exec!( - $ident, $variant_ident; - ; - $( $field_ident : $field_ty, )* - ; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + step_specifier, + step_specifier; + $( $before : $bty, )* ; $( $after : $aty, )* ); }; -} -macro_rules! __report_gen_emit_step_execution_specific { + // MATCH: compilation_specifier (on self) -> compilation_specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* + compilation_specifier; + $( $before:ident : $bty:ty, )* + ; + compilation_specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* ; - $( $aname:ident : $aty:ty, )* ) => { - paste::paste! { - pub fn [< report_ $variant_ident:snake _event >]( - &self - $(, $bname: impl Into<$bty> )* - $(, $aname: impl Into<$aty> )* - ) -> anyhow::Result<()> { - self.report([< $variant_ident Event >] { - $skip_field: self.step_specifier.clone() - $(, $bname: $bname.into() )* - $(, $aname: $aname.into() )* - }) - } - } + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + compilation_specifier, + compilation_specifier; + $( $before : $bty, )* ; $( $after : $aty, )* + ); }; -} -macro_rules! __report_gen_emit_step_execution_specific_by_parse { + // MATCH: compilation_specifier (on self) -> specifier (on event). ( $ident:ident, $variant_ident:ident, - $skip_field:ident; - $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* - ) => { - __report_gen_emit_step_execution_specific!( - $ident, $variant_ident, $skip_field; - $( $bname : $bty, )* ; $( $aname : $aty, )* - ); - }; -} - -macro_rules! __report_gen_scan_before_step { - ( - $ident:ident, $variant_ident:ident; + compilation_specifier; $( $before:ident : $bty:ty, )* ; - step_specifier : $skip_ty:ty, + specifier : $skip_ty:ty, $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_emit_step_execution_specific_by_parse!( - $ident, $variant_ident, step_specifier; + __report_gen_emit_with_specifier!( + $ident, + $variant_ident, + compilation_specifier, + specifier, + $crate::CompilationSpecifier::PreLink; $( $before : $bty, )* ; $( $after : $aty, )* ); }; + + // RECURSIVE: Field doesn't match, continue scanning. ( - $ident:ident, $variant_ident:ident; + $ident:ident, + $variant_ident:ident, + $specifier_field_on_self:ident; $( $before:ident : $bty:ty, )* ; - $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + $name:ident : $ty:ty, + $( $after:ident : $aty:ty, )* ; ) => { - __report_gen_scan_before_step!( - $ident, $variant_ident; + __report_gen_scan_for_specifier!( + $ident, + $variant_ident, + $specifier_field_on_self; $( $before : $bty, )* $name : $ty, ; $( $after : $aty, )* ; ); }; + + // TERMINAL: No matching specifier found. ( - $ident:ident, $variant_ident:ident; + $ident:ident, + $variant_ident:ident, + $specifier_field_on_self:ident; $( $before:ident : $bty:ty, )* ; ; ) => {}; } -macro_rules! __report_gen_for_variant_step { +/// Entry point: Processes a variant and starts scanning for specifier fields. +macro_rules! __report_gen_for_variant { + // Empty variant - no fields. ( $ident:ident, - $variant_ident:ident; + $variant_ident:ident, + $specifier_field_on_self:ident; ) => {}; + + // Variant with fields - start scanning. ( $ident:ident, - $variant_ident:ident; + $variant_ident:ident, + $specifier_field_on_self:ident; $( $field_ident:ident : $field_ty:ty ),+ $(,)? ) => { - __report_gen_scan_before_step!( - $ident, $variant_ident; + __report_gen_scan_for_specifier!( + $ident, + $variant_ident, + $specifier_field_on_self; ; $( $field_ident : $field_ty, )* ; @@ -401,6 +351,16 @@ macro_rules! define_event { } } + pub fn pre_link_compilation_specific_reporter( + &self, + compilation_specifier: impl Into> + ) -> [< $ident PreLinkCompilationSpecificReporter >] { + [< $ident PreLinkCompilationSpecificReporter >] { + reporter: self.clone(), + compilation_specifier: compilation_specifier.into(), + } + } + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { self.0.send(event.into()).map_err(Into::into) } @@ -442,7 +402,12 @@ macro_rules! define_event { } $( - __report_gen_for_variant! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + test_specifier; + $( $field_ident : $field_ty ),* + } )* } @@ -460,7 +425,12 @@ macro_rules! define_event { } $( - __report_gen_for_variant_exec! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + execution_specifier; + $( $field_ident : $field_ty ),* + } )* } @@ -477,7 +447,34 @@ macro_rules! define_event { } $( - __report_gen_for_variant_step! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + __report_gen_for_variant! { + $ident, + $variant_ident, + step_specifier; + $( $field_ident : $field_ty ),* + } + )* + } + + /// A reporter that's tied to a specific compilation. + #[derive(Clone, Debug)] + pub struct [< $ident PreLinkCompilationSpecificReporter >] { + $vis reporter: [< $ident Reporter >], + $vis compilation_specifier: std::sync::Arc, + } + + impl [< $ident PreLinkCompilationSpecificReporter >] { + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { + self.reporter.report(event) + } + + $( + __report_gen_for_variant! { + $ident, + $variant_ident, + compilation_specifier; + $( $field_ident : $field_ty ),* + } )* } } @@ -505,6 +502,11 @@ define_event! { /// A specifier for the test that was discovered. test_specifier: Arc, }, + /// An event emitted by the runners when they discover a pre-link-only compilation. + PreLinkCompilationDiscovery { + /// A specifier for the compilation that was discovered. + compilation_specifier: Arc, + }, /// An event emitted by the runners when a test case is ignored. TestIgnored { /// A specifier for the test that's been ignored. @@ -542,14 +544,13 @@ define_event! { /// An event emitted by the runners when the compilation of the contracts has succeeded /// on the pre-link contracts. PreLinkContractsCompilationSucceeded { - /// A specifier for the execution that's taking place. - execution_specifier: Arc, + /// A specifier for the compilation taking place. + specifier: CompilationSpecifier, /// The version of the compiler used to compile the contracts. compiler_version: Version, /// The path of the compiler used to compile the contracts. compiler_path: PathBuf, - /// A flag of whether the contract bytecode and ABI were cached or if they were compiled - /// anew. + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled anew. is_cached: bool, /// The input provided to the compiler - this is optional and not provided if the /// contracts were obtained from the cache. @@ -560,14 +561,13 @@ define_event! { /// An event emitted by the runners when the compilation of the contracts has succeeded /// on the post-link contracts. PostLinkContractsCompilationSucceeded { - /// A specifier for the execution that's taking place. + /// A specifier for the compilation taking place in an execution context. execution_specifier: Arc, /// The version of the compiler used to compile the contracts. compiler_version: Version, /// The path of the compiler used to compile the contracts. compiler_path: PathBuf, - /// A flag of whether the contract bytecode and ABI were cached or if they were compiled - /// anew. + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled anew. is_cached: bool, /// The input provided to the compiler - this is optional and not provided if the /// contracts were obtained from the cache. @@ -575,11 +575,10 @@ define_event! { /// The output of the compiler. compiler_output: CompilerOutput }, - /// An event emitted by the runners when the compilation of the pre-link contract has - /// failed. + /// An event emitted by the runners when the compilation of the pre-link contract has failed. PreLinkContractsCompilationFailed { - /// A specifier for the execution that's taking place. - execution_specifier: Arc, + /// A specifier for the compilation taking place. + specifier: CompilationSpecifier, /// The version of the compiler used to compile the contracts. compiler_version: Option, /// The path of the compiler used to compile the contracts. @@ -590,10 +589,9 @@ define_event! { /// The failure reason. reason: String, }, - /// An event emitted by the runners when the compilation of the post-link contract has - /// failed. + /// An event emitted by the runners when the compilation of the post-link contract has failed. PostLinkContractsCompilationFailed { - /// A specifier for the execution that's taking place. + /// A specifier for the compilation taking place in an execution context. execution_specifier: Arc, /// The version of the compiler used to compile the contracts. compiler_version: Option, @@ -605,6 +603,15 @@ define_event! { /// The failure reason. reason: String, }, + /// An event emitted by the runners when a pre-link-only compilation is ignored. + PreLinkContractsCompilationIgnored { + /// A specifier for the compilation that has been ignored. + compilation_specifier: Arc, + /// A reason for the compilation to be ignored. + reason: String, + /// Additional fields that describe more information on why the compilation was ignored. + additional_fields: IndexMap + }, /// An event emitted by the runners when a library has been deployed. LibrariesDeployed { /// A specifier for the execution that's taking place. @@ -667,3 +674,10 @@ impl RunnerEventReporter { pub type Reporter = RunnerEventReporter; pub type TestSpecificReporter = RunnerEventTestSpecificReporter; pub type ExecutionSpecificReporter = RunnerEventExecutionSpecificReporter; +pub type PreLinkCompilationSpecificReporter = RunnerEventPreLinkCompilationSpecificReporter; + +/// A wrapper that allows functions to accept either reporter type for compilation events. +pub enum CompilationReporter<'a> { + Execution(&'a ExecutionSpecificReporter), + PreLink(&'a PreLinkCompilationSpecificReporter), +}