From 5160e9d5cbd645b59c7f9afdac632bc9473ac27c Mon Sep 17 00:00:00 2001 From: Matej Urbas Date: Sun, 17 Mar 2024 10:58:06 +0000 Subject: [PATCH] rix now focuses only on nix language evaluation --- README.md | 246 +----------------- src/building/mod.rs | 122 --------- src/cmd/build_derivation.rs | 62 ----- src/cmd/hash.rs | 98 ------- src/cmd/mod.rs | 3 - src/cmd/show_derivation.rs | 48 ---- src/derivations.rs | 194 -------------- src/hashes.rs | 413 ------------------------------ src/lib.rs | 6 - src/main.rs | 8 +- src/parsers/derivations.rs | 360 -------------------------- src/parsers/mod.rs | 1 - src/sandbox/mod.rs | 126 --------- src/store/api.rs | 7 - src/store/mod.rs | 2 - src/store/nix_delegation_store.rs | 41 --- tests/cmd/build_derivation.rs | 251 ------------------ tests/cmd/hash.rs | 196 -------------- tests/cmd/mod.rs | 3 - tests/cmd/show_derivation.rs | 41 --- 20 files changed, 14 insertions(+), 2214 deletions(-) delete mode 100644 src/building/mod.rs delete mode 100644 src/cmd/build_derivation.rs delete mode 100644 src/cmd/hash.rs delete mode 100644 src/cmd/show_derivation.rs delete mode 100644 src/derivations.rs delete mode 100644 src/hashes.rs delete mode 100644 src/parsers/derivations.rs delete mode 100644 src/parsers/mod.rs delete mode 100644 src/sandbox/mod.rs delete mode 100644 src/store/api.rs delete mode 100644 src/store/mod.rs delete mode 100644 src/store/nix_delegation_store.rs delete mode 100644 tests/cmd/build_derivation.rs delete mode 100644 tests/cmd/hash.rs delete mode 100644 tests/cmd/show_derivation.rs diff --git a/README.md b/README.md index 254d94c..ff99d85 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![builder](https://github.com/urbas/rix/actions/workflows/build.yml/badge.svg)](https://github.com/urbas/rix/actions/workflows/build.yml) -A reimplementation of `nix` in Rust. +Nix language interpreter. # Trying it out @@ -15,242 +15,22 @@ implemented. # Notable design choices -1. Nix expressions are transpiled to JavaScript and evaluated with V8. The idea - is to leverage all the great work around the JS ecosystem (such as debuggers, - fast JIT compilers, profilers, libraries, compiled code caching, source - mapping, just to name a few). - -2. Use plain-old files and directories to store metadata (instead of a central - SQLite database). The idea is to have trully immutable stores, composable - stores, avoid the central sqlite choke-point, and be more transparent (allow - users to browse the store's metadata without having to learn about SQLite). - -3. Shard directories that contain huge amounts of hash-prefixed files (i.e., use - paths like `/nix/store/ca/fe/xxzzxjyhvbll1c7bkswwy36nlafx-foo-1.2.3`). +Rix transpiles Nix expressions to JavaScript and evaluates them with V8. The idea +is to leverage all the great work in the JS ecosystem (such as debuggers, +fast JIT compilers, profilers, libraries, compiled code caching, and source +mapping just to name a few). # Progress -## New sub-commands - -- 🌗 `build-derivation`: builds a derivation in a sandbox. - - - 🌕 stage 0: creates a sandbox. - - 🌕 stage 1: builds derivations without dependencies. - - 🌗 stage 2: builds derivations with dependencies. - - TODO: prevent internet access. - - 🌑 stage 3: builds fixed derivations (with internet access). - - 🌑 stage 4: builds X% of derivations in `nixpkgs` (assuming all dependencies - are present). - -- `transpile`: converts the given nix expression into JavaScript and prints it - to stdout. - -## Nix sub-commands - -- 🌘 `eval` - - - 🌕 stage 0: evaluate basic expressions, rec attrsets, let bindings, `with` - statement, functions - - 🌕 stage 1: lazy evaluation - - 🌘 stage 2: - - 🌘 built-in functions (progress: 3 out of 111) - - 🌑 derivations (hello world derivation) - - 🌑 stage 3: full implementation (all derivations in nixpkgs, nice error - messages, etc.) - -- 🌘 `show-derivation` - - - 🌕 stage 1 (MVP): parse .drv files and dump JSON - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌕 `hash to-base32` - - - 🌕 stage 1 (MVP): conversions of non-SRI hashes - - 🌕 stage 2: most common use cases - - 🌕 stage 3: full implementation - -- 🌕 `hash to-base64` - - - 🌕 stage 1 (MVP): conversions of non-SRI hashes - - 🌕 stage 2: most common use cases - - 🌕 stage 3: full implementation - -- 🌕 `hash to-base16` - - - 🌕 stage 1 (MVP): conversions of non-SRI hashes - - 🌕 stage 2: most common use cases - - 🌕 stage 3: full implementation - -- 🌕 `hash to-sri` - - - 🌕 stage 1 (MVP) - - 🌕 stage 2: most common use cases - - 🌕 stage 3: full implementation - -- 🌑 `hash file` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `hash path` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `build` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `develop` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `flake` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `help` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `profile` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `repl` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `run` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `search` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `shell` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `bundle` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `copy` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `edit` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `log` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `path-info` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `registry` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `why-depends` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `daemon` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `describe-stores` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `key` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `nar` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `print-dev-env` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `realisation` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `show-config` - - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation - -- 🌑 `store` +- 🌕 stage 0: evaluate basic expressions, rec attrsets, let bindings, `with` + statement, functions - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation +- 🌕 stage 1: lazy evaluation -- 🌑 `doctor` +- 🌘 stage 2: - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation + - 🌘 built-in functions (progress: 3 out of 111) + - 🌑 derivations (hello world derivation) -- 🌑 `upgrade-nix` - - 🌑 stage 1 (MVP) - - 🌑 stage 2: most common use cases - - 🌑 stage 3: full implementation +- 🌑 stage 3: full implementation (all derivations in nixpkgs, nice error + messages, etc.) diff --git a/src/building/mod.rs b/src/building/mod.rs deleted file mode 100644 index fc85518..0000000 --- a/src/building/mod.rs +++ /dev/null @@ -1,122 +0,0 @@ -use crate::derivations::{load_derivation, Derivation}; -use crate::sandbox; -use crate::store::api::DepsInfo; -use std::collections::HashSet; -use std::fs::File; -use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; -use std::os::unix::process::CommandExt; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; - -pub struct BuildConfig<'a> { - build_dir: &'a Path, - deps_info: &'a dyn DepsInfo, - derivation: &'a Derivation, - stderr: Option<&'a File>, - stdout: Option<&'a File>, -} - -impl<'a> BuildConfig<'a> { - pub fn new( - derivation: &'a Derivation, - build_dir: &'a Path, - deps_info: &'a dyn DepsInfo, - ) -> BuildConfig<'a> { - BuildConfig { - build_dir, - deps_info, - derivation, - stderr: None, - stdout: None, - } - } - - pub fn stdout_to_file(&mut self, file: &'a File) { - self.stdout = Some(file); - } - - pub fn stderr_to_file(&mut self, file: &'a File) { - self.stderr = Some(file); - } -} - -pub fn build_derivation_sandboxed(config: &BuildConfig) -> Result { - // this function assumes all derivation inputs are present and won't be - // GC'd for the duration of this build - let stdout_fd = config.stdout.map(|file| file.as_raw_fd()); - let stderr_fd = config.stderr.map(|file| file.as_raw_fd()); - // we have to find mount paths (e.g.: input derivation output paths and their - // runtime dependencies) before we enter the sandbox. That's because in the - // sandbox we won't have access to pretty much anything. - let mount_paths = get_mount_paths(config)?; - // return value is the error code of the builder or 255 if anything went - // wrong and we failed to execute the builder - sandbox::run_in_sandbox( - config.build_dir, - || prepare_sandbox(config, &mount_paths), - || run_build(config, stdout_fd, stderr_fd), - ) -} - -pub fn build_derivation_command(derivation: &Derivation, build_dir: &Path) -> Command { - // This function assumes that the sandbox is already fully set up - let mut cmd = Command::new(&derivation.builder); - cmd.args(&derivation.args) - .envs(&derivation.env) - .current_dir(build_dir); - cmd -} - -fn prepare_sandbox(config: &BuildConfig, mount_paths: &HashSet) -> Result<(), String> { - mount_standard_paths(config)?; - mount_input_drvs(config, mount_paths)?; - sandbox::mount_paths( - config.derivation.input_srcs.iter().map(Path::new), - config.build_dir, - ) -} - -fn run_build(config: &BuildConfig, stdout_fd: Option, stderr_fd: Option) -> isize { - let mut cmd = build_derivation_command(config.derivation, Path::new("/")); - if let Some(stdout_fd) = stdout_fd { - cmd.stdout(unsafe { Stdio::from_raw_fd(stdout_fd) }); - } - if let Some(stderr_fd) = stderr_fd { - cmd.stderr(unsafe { Stdio::from_raw_fd(stderr_fd) }); - } - let exec_error = cmd.exec(); - // we should never get here because we exec into the builder above (i.e. the builder - // process takes over). So, it's an error no matter what if we get here. - eprintln!("Error executing builder: {}", exec_error); - 255 -} - -fn mount_input_drvs(config: &BuildConfig, mount_paths: &HashSet) -> Result<(), String> { - for path in mount_paths { - sandbox::mount_path(path, config.build_dir)?; - } - Ok(()) -} - -fn get_mount_paths(config: &BuildConfig) -> Result, String> { - let mut mount_paths = HashSet::new(); - for (drv_path, outputs) in &config.derivation.input_drvs { - let derivation = load_derivation(drv_path)?; - for output in &outputs.outputs { - let drv_output = derivation.outputs.get(output).ok_or_else(|| { - format!("Could not find output '{output}' of derivation {drv_path}") - })?; - let drv_output_path = PathBuf::from(&drv_output.path); - // We have to include direct runtime dependencies of input derivations. We don't need - // to recurse transitively into input derivations of input derivations as these shouldn't - // be needed. - mount_paths.extend(config.deps_info.get_runtime_deps(&drv_output_path)?); - mount_paths.insert(drv_output_path); - } - } - Ok(mount_paths) -} - -fn mount_standard_paths(config: &BuildConfig) -> Result<(), String> { - sandbox::mount_path(Path::new("/dev/null"), config.build_dir) -} diff --git a/src/cmd/build_derivation.rs b/src/cmd/build_derivation.rs deleted file mode 100644 index 5ca52b6..0000000 --- a/src/cmd/build_derivation.rs +++ /dev/null @@ -1,62 +0,0 @@ -use crate::building::{build_derivation_sandboxed, BuildConfig}; -use crate::cmd::{to_cmd_err, RixSubCommand}; -use crate::derivations; -use crate::store::nix_delegation_store::NixDelegationStore; -use clap::{Arg, ArgAction, ArgMatches}; -use std::fs::File; -use std::path::PathBuf; -use tempfile::tempdir; - -pub fn cmd() -> RixSubCommand { - RixSubCommand { - name: "build-derivation", - handler: |args| to_cmd_err(handle_cmd(args)), - cmd: |subcommand| { - subcommand - .about("builds the derivation assuming all dependencies are present in the store and won't be GC'd") - .arg(Arg::new("DERIVATION").required(true).help( - "The path of the derivation to build.", - )) - .arg(Arg::new("build-dir").long("build-dir").action(ArgAction::Set).help("The directory in which to run the build process.")) - .arg(Arg::new("stdout").long("stdout").action(ArgAction::Set).help("The file to which to redirect the standard output of the build")) - .arg(Arg::new("stderr").long("stderr").action(ArgAction::Set).help("The file to which to redirect the error output of the build")) - }, - } -} - -pub fn handle_cmd(parsed_args: &ArgMatches) -> Result<(), String> { - let derivation_path = parsed_args - .get_one::("DERIVATION") - .ok_or("You must specify a derivation.")?; - let build_dir = parsed_args - .get_one::("build-dir") - .map_or_else(create_build_dir, |str| Ok(PathBuf::from(str)))?; - let stdout_file = parsed_args - .get_one::("stdout") - .map(File::create) - .transpose() - .map_err(|err| format!("Could not create the stdout file. Error: {}", err))?; - let stderr_file = parsed_args - .get_one::("stderr") - .map(File::create) - .transpose() - .map_err(|err| format!("Could not create the stderr file. Error: {}", err))?; - let derivation = derivations::load_derivation(derivation_path)?; - let nix_delegation_store = NixDelegationStore::default(); - let mut build_config = BuildConfig::new(&derivation, &build_dir, &nix_delegation_store); - if let Some(stdout_file) = stdout_file.as_ref() { - build_config.stdout_to_file(stdout_file); - } - if let Some(stderr_file) = stderr_file.as_ref() { - build_config.stderr_to_file(stderr_file); - } - let result_code = build_derivation_sandboxed(&build_config)?; - println!("{}", build_dir.to_str().unwrap()); - std::process::exit(result_code); -} - -fn create_build_dir() -> Result { - tempdir() - .map_err(|err| format!("Could not create the build directory. Error: {}", err)) - .map(|tmp_dir| tmp_dir.into_path()) -} diff --git a/src/cmd/hash.rs b/src/cmd/hash.rs deleted file mode 100644 index 9327af7..0000000 --- a/src/cmd/hash.rs +++ /dev/null @@ -1,98 +0,0 @@ -use crate::cmd::{to_cmd_err, RixSubCommand}; -use crate::hashes; -use clap::{Arg, ArgAction, ArgMatches, Command}; - -pub fn cmd() -> RixSubCommand { - RixSubCommand { - name: "hash", - handler: |args| to_cmd_err(handle_cmd(args)), - cmd: |subcommand| { - subcommand - .about("compute and convert cryptographic hashes") - .subcommand( - to_base_cmd("to-base16").about("convert hashes to base-16 representation"), - ) - .subcommand( - to_base_cmd("to-base32") - .about("convert hashes to the Nix base-32 representation"), - ) - .subcommand( - to_base_cmd("to-base64").about("convert hashes to base-64 representation"), - ) - .subcommand( - to_base_cmd("to-sri").about("convert hashes to SRI base-64 representation"), - ) - }, - } -} - -pub fn handle_cmd(parent_args: &ArgMatches) -> Result<(), String> { - if let Some(args) = parent_args.subcommand_matches("to-base16") { - handle_to_base_cmd(args, hashes::to_base16) - } else if let Some(args) = parent_args.subcommand_matches("to-base32") { - handle_to_base_cmd(args, hashes::to_base32) - } else if let Some(args) = parent_args.subcommand_matches("to-base64") { - handle_to_base_cmd(args, hashes::to_base64) - } else if let Some(args) = parent_args.subcommand_matches("to-sri") { - handle_to_base_cmd(args, hashes::to_sri) - } else { - Err("operation not supported".to_owned()) - } -} - -fn to_base_cmd(name: &'static str) -> Command { - Command::new(name) - .arg( - Arg::new("HASHES") - .action(ArgAction::Append) - .help("A list of hashes to convert."), - ) - .arg( - Arg::new("type") - .long("type") - .value_name("hash-algo") - .value_parser(["md5", "sha1", "sha256", "sha512"]) - .help("Hash algorithm of input HASHES. Optional as can also be extracted from SRI hash itself."), - ) -} - -fn handle_to_base_cmd(args: &clap::ArgMatches, to_base_fn: F) -> Result<(), String> -where - F: Fn(&hashes::Hash) -> String, -{ - let mut hash_strs = args - .get_many::("HASHES") - .ok_or("Please specify some hashes.")?; - let type_arg = args - .get_one::("type") - .map(|s| s.as_str()) - .unwrap_or("sri"); - - if let Ok(hash_type) = type_arg.parse() { - return hash_strs.try_for_each(|hash_str| print_hash(hash_str, hash_type, &to_base_fn)); - } else if type_arg == "sri" { - return sri_to_base(hash_strs, &to_base_fn); - } - Err("hash type not supported".to_owned()) -} - -fn sri_to_base<'a, F>( - mut hash_strs: impl Iterator, - to_base_fn: F, -) -> Result<(), String> -where - F: Fn(&hashes::Hash) -> String, -{ - hash_strs.try_for_each(|hash_str| { - let (hash_type, hash_str) = hashes::sri_hash_components(hash_str)?; - let hash_type: hashes::HashType = hash_type.parse()?; - print_hash(hash_str, hash_type, &to_base_fn) - }) -} - -fn print_hash(hash_str: &str, hash_type: hashes::HashType, to_base_fn: F) -> Result<(), String> -where - F: Fn(&hashes::Hash) -> String, -{ - hashes::parse(hash_str, hash_type).map(|hash| println!("{}", to_base_fn(&hash))) -} diff --git a/src/cmd/mod.rs b/src/cmd/mod.rs index cf0ee13..210938b 100644 --- a/src/cmd/mod.rs +++ b/src/cmd/mod.rs @@ -1,7 +1,4 @@ -pub mod build_derivation; pub mod eval; -pub mod hash; -pub mod show_derivation; pub mod transpile; use clap::{ArgMatches, Command}; use colored::*; diff --git a/src/cmd/show_derivation.rs b/src/cmd/show_derivation.rs deleted file mode 100644 index f53bcbc..0000000 --- a/src/cmd/show_derivation.rs +++ /dev/null @@ -1,48 +0,0 @@ -use crate::cmd::{to_cmd_err, RixSubCommand}; -use crate::derivations::load_derivation; -use clap::{Arg, ArgAction, ArgMatches}; -use serde::ser::{SerializeMap, Serializer}; -use serde_json; - -pub fn cmd() -> RixSubCommand { - RixSubCommand { - name: "show-derivation", - handler: |args| to_cmd_err(handle_cmd(args)), - cmd: |subcommand| { - subcommand - .about("show the contents of a store derivation") - .arg(Arg::new("INSTALLABLES").action(ArgAction::Append).help( - "A list of derivation files. Other types of installables are not yet supported.", - )) - }, - } -} - -pub fn handle_cmd(parsed_args: &ArgMatches) -> Result<(), String> { - let installables = parsed_args - .get_many::("INSTALLABLES") - .ok_or("Please specify some derivation files.")? - .map(|string| string.as_str()); - show_derivations(installables) -} - -fn show_derivations<'a>(mut drv_paths: impl Iterator) -> Result<(), String> { - let mut json_serializer = serde_json::Serializer::new(std::io::stdout()); - let mut map_serializer = json_serializer - .serialize_map(None) - .map_err(|_| "Failed to initialize JSON serialization.")?; - - let error_maybe = - drv_paths.try_for_each(|drv_path| show_derivation(&mut map_serializer, drv_path)); - - // this makes sure we produce valid JSON even if there's a failure while dumping the derivations above - map_serializer.end().unwrap(); - - error_maybe -} - -fn show_derivation(serializer: &mut impl SerializeMap, drv_path: &str) -> Result<(), String> { - serializer - .serialize_entry(drv_path, &load_derivation(drv_path)?) - .map_err(|_| format!("Failed to serialize derivation '{}' to JSON.", drv_path)) -} diff --git a/src/derivations.rs b/src/derivations.rs deleted file mode 100644 index bb83dd0..0000000 --- a/src/derivations.rs +++ /dev/null @@ -1,194 +0,0 @@ -use crate::parsers::derivations::parse_derivation; -use serde::{Deserialize, Serialize}; -use std::collections::{BTreeMap, BTreeSet}; -use std::fs; -use std::io::Write; - -#[derive(Deserialize, Serialize, Debug, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct Derivation { - pub args: Vec, - pub builder: String, - pub env: BTreeMap, - pub input_drvs: BTreeMap, - pub input_srcs: BTreeSet, - pub outputs: BTreeMap, - pub system: String, -} - -#[derive(Deserialize, Serialize, Debug, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct DerivationOutput { - pub hash: Option, - pub hash_algo: Option, - pub path: String, -} - -#[derive(Deserialize, Serialize, Debug, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct InputDrv { - pub dynamic_outputs: BTreeMap, - pub outputs: BTreeSet, -} - -pub fn load_derivation(drv_path: &str) -> Result { - let content = fs::read_to_string(drv_path) - .map_err(|err| format!("Failed to read '{}': {}", drv_path, err))?; - parse_derivation(&content) - .map(|(_, derivation)| derivation) - .map_err(|err| format!("Failed to parse '{}': {}", drv_path, err)) -} - -pub fn save_derivation(writer: &mut impl Write, derivation: &Derivation) -> std::io::Result<()> { - write!(writer, "Derive(")?; - write_outputs(writer, &derivation.outputs)?; - write!(writer, ",")?; - write_input_drvs(writer, &derivation.input_drvs)?; - write!(writer, ",")?; - write_iter(writer, &mut derivation.input_srcs.iter(), write_string)?; - write!(writer, ",")?; - write_string(writer, &derivation.system)?; - write!(writer, ",")?; - write_string(writer, &derivation.builder)?; - write!(writer, ",")?; - write_iter(writer, &mut derivation.args.iter(), write_string)?; - write!(writer, ",")?; - write_iter( - writer, - &mut derivation.env.iter(), - |writer, (key, value)| { - write!(writer, "(")?; - write_string(writer, key)?; - write!(writer, ",")?; - write_string(writer, value)?; - write!(writer, ")") - }, - )?; - write!(writer, ")") -} - -fn write_outputs( - writer: &mut impl Write, - outputs: &BTreeMap, -) -> std::io::Result<()> { - write_iter(writer, &mut outputs.iter(), |writer, entry| { - write_output(writer, entry.0, entry.1) - }) -} - -fn write_input_drvs( - writer: &mut impl Write, - input_drvs: &BTreeMap, -) -> std::io::Result<()> { - write_iter(writer, &mut input_drvs.iter(), |writer, entry| { - let (drv_path, input_drv) = entry; - write!(writer, "(")?; - write_string(writer, drv_path)?; - write!(writer, ",")?; - write_iter(writer, &mut input_drv.outputs.iter(), write_string)?; - write!(writer, ")") - }) -} - -fn write_iter( - writer: &mut W, - iter: &mut impl Iterator, - write_value: F, -) -> std::io::Result<()> -where - W: Write, - F: Fn(&mut W, T) -> std::io::Result<()>, -{ - write!(writer, "[")?; - if let Some(entry) = iter.next() { - write_value(writer, entry)?; - } - for entry in iter.by_ref() { - write!(writer, ",")?; - write_value(writer, entry)?; - } - write!(writer, "]")?; - Ok(()) -} - -fn write_output( - writer: &mut impl Write, - output_name: &String, - output: &DerivationOutput, -) -> std::io::Result<()> { - write!(writer, "(")?; - write_string(writer, output_name)?; - write!(writer, ",")?; - write_string(writer, &output.path)?; - write!(writer, ",")?; - write_string(writer, output.hash_algo.as_ref().unwrap_or(&String::new()))?; - write!(writer, ",")?; - write_string(writer, output.hash.as_ref().unwrap_or(&String::new()))?; - write!(writer, ")") -} - -fn write_string(writer: &mut impl Write, string: &String) -> std::io::Result<()> { - let mut escaped_string = String::with_capacity(2 * string.capacity()); - for character in string.chars() { - match character { - '\t' => escaped_string.push_str("\\t"), - '\n' => escaped_string.push_str("\\n"), - '\r' => escaped_string.push_str("\\r"), - '\\' => escaped_string.push_str("\\\\"), - '"' => escaped_string.push_str("\\\""), - character => escaped_string.push(character), - } - } - write!(writer, "\"{}\"", escaped_string) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs::File; - use tempfile::tempdir; - - #[test] - fn test_save_and_load() { - let tmp_dir = tempdir().unwrap(); - let derivation = sample_derivation(); - let derivation_path = tmp_dir.path().join("foo.drv"); - let mut derivation_file = File::create(&derivation_path).unwrap(); - save_derivation(&mut derivation_file, &derivation).unwrap(); - let derivation_from_file = load_derivation(&derivation_path.to_str().unwrap()).unwrap(); - assert_eq!(derivation_from_file, derivation); - } - - #[test] - fn test_save_and_load_json() { - let derivation = sample_derivation(); - let derivation_json_str = serde_json::to_string(&derivation).unwrap(); - let derivation_from_json: Derivation = serde_json::from_str(&derivation_json_str).unwrap(); - assert_eq!(derivation, derivation_from_json); - } - - fn sample_derivation() -> Derivation { - Derivation { - args: vec!["foo".to_owned(), "bar".to_owned()], - builder: "foo.sh".to_owned(), - env: BTreeMap::from([("var1".to_owned(), "val1".to_owned())]), - input_drvs: BTreeMap::from([( - "foo.drv".to_owned(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: BTreeSet::from(["out".to_owned()]), - }, - )]), - input_srcs: BTreeSet::from(["/foo.txt".to_owned()]), - outputs: BTreeMap::from([( - "out".to_owned(), - DerivationOutput { - hash: None, - hash_algo: Some("foo".to_owned()), - path: "/foo.out".to_owned(), - }, - )]), - system: "foo-x64".to_owned(), - } - } -} diff --git a/src/hashes.rs b/src/hashes.rs deleted file mode 100644 index 32cdc2a..0000000 --- a/src/hashes.rs +++ /dev/null @@ -1,413 +0,0 @@ -#[derive(Copy, Clone, Debug, PartialEq)] -pub enum HashType { - Md5, - Sha1, - Sha256, - Sha512, -} - -#[derive(Debug, PartialEq)] -pub struct Hash { - pub hash_type: HashType, - pub bytes: Vec, -} - -impl HashType { - pub fn size(&self) -> usize { - match self { - HashType::Md5 => 16, - HashType::Sha1 => 20, - HashType::Sha256 => 32, - HashType::Sha512 => 64, - } - } -} - -impl std::str::FromStr for HashType { - type Err = String; - - fn from_str(hash_type: &str) -> Result { - match hash_type { - "md5" => Ok(HashType::Md5), - "sha1" => Ok(HashType::Sha1), - "sha256" => Ok(HashType::Sha256), - "sha512" => Ok(HashType::Sha512), - _ => Err(format!("Unknown hash type '{hash_type}'.")), - } - } -} - -impl std::fmt::Display for HashType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let hash_type = match self { - HashType::Md5 => "md5", - HashType::Sha1 => "sha1", - HashType::Sha256 => "sha256", - HashType::Sha512 => "sha512", - }; - write!(f, "{}", hash_type) - } -} - -pub fn parse(hash_str: &str, hash_type: HashType) -> Result { - let hash_str_len = hash_str.as_bytes().len(); - let hash_size = hash_type.size(); - if hash_str_len == 2 * hash_size { - from_base16(hash_str, hash_type) - } else if hash_str_len == to_base32_len(hash_size) { - from_base32(hash_str, hash_type) - } else if hash_str_len == to_base64_len(hash_size) { - from_base64(hash_str, hash_type) - } else { - Err(format!("hash '{}' with unexpected length.", hash_str)) - } -} - -pub fn sri_hash_components(hash_str: &str) -> Result<(&str, &str), String> { - hash_str - .split_once('-') - .or_else(|| hash_str.split_once(':')) - .ok_or(format!("Failed to parse '{}'. Not an SRI hash.", hash_str)) -} - -pub fn to_base16(hash: &Hash) -> String { - let bytes = &hash.bytes; - let mut out_string = String::with_capacity(2 * bytes.len()); - for byte in bytes { - out_string.push(nibble_to_base16(byte >> 4)); - out_string.push(nibble_to_base16(byte & 0x0f)); - } - out_string -} - -pub fn from_base16(base16_str: &str, hash_type: HashType) -> Result { - let base16_str_bytes = base16_str.as_bytes(); - let mut bytes = vec![0; hash_type.size()]; - for idx in 0..bytes.len() { - bytes[idx] = parse_base16_digit(base16_str_bytes[idx * 2])? << 4 - | parse_base16_digit(base16_str_bytes[idx * 2 + 1])?; - } - Ok(Hash { hash_type, bytes }) -} - -pub fn to_base32(hash: &Hash) -> String { - let bytes = &hash.bytes; - let bytes_len = bytes.len(); - let len = to_base32_len(bytes_len); - let mut out_string = String::with_capacity(len); - - for idx in (0..len).rev() { - let b = idx * 5; - let i = b / 8; - let j = b % 8; - let carry = if i >= bytes_len - 1 { - 0 - } else { - bytes[i + 1].checked_shl(8 - j as u32).unwrap_or(0) - }; - let c = (bytes[i] >> j) | carry; - out_string.push(nibble_to_base32(c & 0x1f)); - } - - out_string -} - -pub fn from_base32(base32_str: &str, hash_type: HashType) -> Result { - let mut bytes = vec![0; hash_type.size()]; - let base32_str_bytes = base32_str.as_bytes(); - let str_len = base32_str_bytes.len(); - for idx in 0..to_base32_len(bytes.len()) { - let digit = parse_base32_digit(base32_str_bytes[str_len - idx - 1])?; - let b = idx * 5; - let i = b / 8; - let j = b % 8; - bytes[i] |= digit << j; - - let carry = digit.checked_shr(8 - j as u32).unwrap_or(0); - if i < bytes.len() - 1 { - bytes[i + 1] |= carry; - } else if carry != 0 { - return Err(format!("Invalid base-32 string '{}'", base32_str)); - } - } - Ok(Hash { hash_type, bytes }) -} - -pub fn to_base64(hash: &Hash) -> String { - let bytes = &hash.bytes; - let mut out_string = String::with_capacity(to_base64_len(bytes.len())); - let mut data: usize = 0; - let mut nbits: usize = 0; - - for byte in bytes { - data = data << 8 | (*byte as usize); - nbits += 8; - while nbits >= 6 { - nbits -= 6; - out_string.push(BASE_64_CHARS[data >> nbits & 0x3f] as char); - } - } - - if nbits > 0 { - out_string.push(BASE_64_CHARS[data << (6 - nbits) & 0x3f] as char); - } - - while out_string.len() % 4 > 0 { - out_string.push('='); - } - - out_string -} - -pub fn from_base64(base64_str: &str, hash_type: HashType) -> Result { - let mut bytes = vec![0; hash_type.size()]; - let base64_str_bytes = base64_str.as_bytes(); - let mut d: u32 = 0; - let mut bits: u32 = 0; - let mut byte = 0; - - for chr in base64_str_bytes { - if *chr == b'=' { - break; - } - let digit = BASE_64_CHAR_VALUES[*chr as usize]; - if digit == INVALID_CHAR_VALUE { - return Err(format!( - "Character '{}' is not a valid base-64 character.", - *chr as char - )); - } - bits += 6; - d = d << 6 | digit as u32; - if bits >= 8 { - bytes[byte] = (d >> (bits - 8) & 0xff) as u8; - bits -= 8; - byte += 1; - } - } - Ok(Hash { hash_type, bytes }) -} - -pub fn to_sri(hash: &Hash) -> String { - format!("{}-{}", hash.hash_type, to_base64(hash)) -} - -fn nibble_to_base16(nibble: u8) -> char { - if nibble < 10 { - return (b'0' + nibble) as char; - } - (b'a' + nibble - 10) as char -} - -fn parse_base16_digit(chr: u8) -> Result { - match chr { - b'0'..=b'9' => Ok(chr - b'0'), - b'A'..=b'F' => Ok(chr - b'A' + 10), - b'a'..=b'f' => Ok(chr - b'a' + 10), - _ => Err("Not a hex numeral.".to_owned()), - } -} - -fn to_base32_len(bytes_count: usize) -> usize { - (bytes_count * 8 - 1) / 5 + 1 -} - -fn nibble_to_base32(nibble: u8) -> char { - if nibble < 10 { - return (b'0' + nibble) as char; - } else if nibble < 14 { - return (b'a' + nibble - 10) as char; - } else if nibble < 23 { - return (b'f' + nibble - 14) as char; - } else if nibble < 27 { - return (b'p' + nibble - 23) as char; - } - (b'v' + nibble - 27) as char -} - -fn parse_base32_digit(chr: u8) -> Result { - match chr { - b'0'..=b'9' => Ok(chr - b'0'), - b'a'..=b'd' => Ok(chr - b'a' + 10), - b'f'..=b'n' => Ok(chr - b'f' + 14), - b'p'..=b's' => Ok(chr - b'p' + 23), - b'v'..=b'z' => Ok(chr - b'v' + 27), - _ => Err(format!( - "Character '{}' is not a valid base-32 character.", - chr as char - )), - } -} - -fn to_base64_len(bytes_count: usize) -> usize { - ((4 * bytes_count / 3) + 3) & !3 -} - -const BASE_64_CHARS: &[u8] = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".as_bytes(); -const BASE_64_CHAR_VALUES: [u8; 256] = compute_base64_char_values(); -const INVALID_CHAR_VALUE: u8 = 255; - -const fn compute_base64_char_values() -> [u8; 256] { - let mut char_values: [u8; 256] = [INVALID_CHAR_VALUE; 256]; - let mut idx = 0; - while idx < 64 { - char_values[BASE_64_CHARS[idx] as usize] = idx as u8; - idx += 1; - } - char_values -} - -#[cfg(test)] -mod tests { - use std::str::FromStr; - - use super::*; - - fn sha256_sample() -> Hash { - Hash { - hash_type: HashType::Sha256, - bytes: vec![ - 0xd5, 0x31, 0x38, 0x62, 0x85, 0x6f, 0x77, 0x70, 0xbd, 0xff, 0xed, 0x2d, 0xfe, 0x8c, - 0x41, 0x7a, 0x84, 0xf3, 0xf6, 0xd5, 0xe1, 0x1c, 0x3b, 0x5c, 0x19, 0x42, 0x0f, 0x21, - 0x30, 0x76, 0x6f, 0x81, - ], - } - } - - fn sha512_sample() -> Hash { - Hash { - hash_type: HashType::Sha512, - bytes: vec![ - 0xfb, 0x2e, 0x19, 0x9d, 0xe3, 0xe9, 0xbd, 0x6b, 0x35, 0x7d, 0xcf, 0xcb, 0x85, 0x94, - 0x53, 0x1e, 0x44, 0xde, 0xb1, 0xb5, 0xe4, 0xc8, 0x16, 0x2e, 0x38, 0x1f, 0xb9, 0x0b, - 0x2a, 0x1d, 0x66, 0xaa, 0xc4, 0xb8, 0x44, 0xd7, 0x8b, 0x7c, 0xce, 0x55, 0xfa, 0x40, - 0x40, 0x87, 0x60, 0x0b, 0x79, 0x57, 0x6c, 0x72, 0xd3, 0x0c, 0x6f, 0x5d, 0x42, 0x8b, - 0x31, 0x47, 0xd0, 0x61, 0xbc, 0xb2, 0x83, 0x2d, - ], - } - } - - #[test] - fn test_hash_type_size() { - assert_eq!(HashType::Md5.size(), 16); - assert_eq!(HashType::Sha1.size(), 20); - assert_eq!(HashType::Sha256.size(), 32); - assert_eq!(HashType::Sha512.size(), 64); - } - - #[test] - fn test_hash_type_from_str() { - assert_eq!(HashType::from_str("md5"), Ok(HashType::Md5)); - assert_eq!(HashType::from_str("sha1"), Ok(HashType::Sha1)); - assert_eq!(HashType::from_str("sha256"), Ok(HashType::Sha256)); - assert_eq!(HashType::from_str("sha512"), Ok(HashType::Sha512)); - assert_eq!( - HashType::from_str("foobar"), - Err("Unknown hash type 'foobar'.".to_owned()) - ); - } - - #[test] - fn test_parse_sha256_base16() { - assert_eq!( - parse( - "d5313862856f7770bdffed2dfe8c417a84f3f6d5e11c3b5c19420f2130766f81", - HashType::Sha256, - ), - Ok(sha256_sample()), - ); - } - - #[test] - fn test_parse_sha256_base32() { - assert_eq!( - parse( - "10bgfqq223s235f3n771spvg713s866gwbgdzyyp0xvghmi3hcfm", - HashType::Sha256, - ), - Ok(sha256_sample()), - ); - } - - #[test] - fn test_parse_sha256_base64() { - assert_eq!( - parse( - "1TE4YoVvd3C9/+0t/oxBeoTz9tXhHDtcGUIPITB2b4E=", - HashType::Sha256, - ), - Ok(sha256_sample()), - ); - } - - #[test] - fn test_parse_sha256_invalid() { - assert_eq!( - parse("foobar", HashType::Sha256), - Err("hash 'foobar' with unexpected length.".to_owned()), - ); - } - - #[test] - fn test_parse_sha512_base64() { - assert_eq!( - parse("+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ==", HashType::Sha512), - Ok(sha512_sample()), - ); - } - - #[test] - fn test_to_base16() { - assert_eq!( - to_base16(&sha256_sample()), - "d5313862856f7770bdffed2dfe8c417a84f3f6d5e11c3b5c19420f2130766f81" - ); - } - - #[test] - fn test_to_base3() { - assert_eq!( - to_base32(&sha256_sample()), - "10bgfqq223s235f3n771spvg713s866gwbgdzyyp0xvghmi3hcfm" - ); - } - - #[test] - fn test_to_base64() { - assert_eq!( - to_base64(&sha256_sample()), - "1TE4YoVvd3C9/+0t/oxBeoTz9tXhHDtcGUIPITB2b4E=" - ); - } - - #[test] - fn test_from_base32_invalid_char() { - assert_eq!( - from_base32(")", HashType::Sha256), - Err("Character ')' is not a valid base-32 character.".to_owned()), - ); - } - - #[test] - fn test_from_base64_invalid_char() { - assert_eq!( - from_base64(")", HashType::Sha256), - Err("Character ')' is not a valid base-64 character.".to_owned()), - ); - } - - #[test] - fn test_sri_hash_components() { - assert_eq!(sri_hash_components("md5-foobar"), Ok(("md5", "foobar"))); - assert_eq!(sri_hash_components("sha256:abc"), Ok(("sha256", "abc")),); - } - - #[test] - fn test_sri_hash_components_fail() { - assert_eq!( - sri_hash_components("md5foobar"), - Err("Failed to parse 'md5foobar'. Not an SRI hash.".to_owned()) - ); - } -} diff --git a/src/lib.rs b/src/lib.rs index bcf5db8..489c097 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,8 +1,2 @@ -pub mod building; pub mod cmd; -pub mod derivations; pub mod eval; -pub mod hashes; -pub mod parsers; -pub mod sandbox; -pub mod store; diff --git a/src/main.rs b/src/main.rs index 981901f..17e932b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,13 +7,7 @@ fn main() -> ExitCode { .version("0.0.1") .about("Rix is another nix."); - let subcommands = &[ - &cmd::build_derivation::cmd(), - &cmd::eval::cmd(), - &cmd::hash::cmd(), - &cmd::show_derivation::cmd(), - &cmd::transpile::cmd(), - ]; + let subcommands = &[&cmd::eval::cmd(), &cmd::transpile::cmd()]; for subcommand in subcommands { cmd = cmd.subcommand((subcommand.cmd)(Command::new(subcommand.name))); diff --git a/src/parsers/derivations.rs b/src/parsers/derivations.rs deleted file mode 100644 index 555c1c7..0000000 --- a/src/parsers/derivations.rs +++ /dev/null @@ -1,360 +0,0 @@ -use crate::derivations::{Derivation, DerivationOutput, InputDrv}; -use nom::branch::alt; -use nom::bytes::complete::{is_not, tag}; -use nom::character::complete::char; -use nom::combinator::{map, opt, value, verify}; -use nom::multi::{fold_many0, separated_list0}; -use nom::sequence::{delimited, pair, preceded, tuple}; -use nom::IResult; -use std::collections::{BTreeMap, BTreeSet}; - -pub fn parse_derivation(input: &str) -> IResult<&str, Derivation> { - delimited(tag("Derive("), parse_derivation_args, char(')'))(input) -} - -fn parse_derivation_args(input: &str) -> IResult<&str, Derivation> { - let (input, (outputs, _, input_drvs, _, input_srcs, _, system, _, builder, _, args, _, env)) = - tuple(( - parse_derivation_outputs, - char(','), - parse_input_derivations, - char(','), - parse_string_set, - char(','), - parse_string, - char(','), - parse_string, - char(','), - parse_strings, - char(','), - parse_env, - ))(input)?; - Ok(( - input, - Derivation { - args, - builder, - env, - input_drvs, - input_srcs, - outputs, - system, - }, - )) -} - -fn parse_derivation_outputs(input: &str) -> IResult<&str, BTreeMap> { - let derivation_outputs = fold_many0( - pair(parse_derivation_output, opt(char(','))), - BTreeMap::new, - |mut drv_outputs, ((name, drv_output), _)| { - drv_outputs.insert(name, drv_output); - drv_outputs - }, - ); - delimited(char('['), derivation_outputs, char(']'))(input) -} - -fn parse_derivation_output(input: &str) -> IResult<&str, (String, DerivationOutput)> { - let (input, (_, derivation_name, _, path, _, hash_algo, _, hash, _)) = tuple(( - char('('), - parse_string, - char(','), - parse_string, - char(','), - parse_string, - char(','), - parse_string, - char(')'), - ))(input)?; - Ok(( - input, - ( - derivation_name, - DerivationOutput { - hash: if hash.is_empty() { None } else { Some(hash) }, - hash_algo: if hash_algo.is_empty() { - None - } else { - Some(hash_algo) - }, - path, - }, - ), - )) -} - -fn parse_string(input: &str) -> IResult<&str, String> { - delimited(char('"'), parse_string_inside_quotes, char('"'))(input) -} - -fn parse_input_derivations(input: &str) -> IResult<&str, BTreeMap> { - let input_derivations = fold_many0( - tuple(( - char('('), - parse_string, - char(','), - parse_string_set, - char(')'), - opt(char(',')), - )), - BTreeMap::new, - |mut input_drvs, (_, drv, _, input_type, _, _)| { - input_drvs.insert( - drv, - InputDrv { - dynamic_outputs: BTreeMap::new(), // TODO: add support for dynamic outputs - outputs: input_type, - }, - ); - input_drvs - }, - ); - delimited(char('['), input_derivations, char(']'))(input) -} - -fn parse_string_set(input: &str) -> IResult<&str, BTreeSet> { - let string_set = fold_many0( - pair(parse_string, opt(char(','))), - BTreeSet::new, - |mut strings, (string, _)| { - strings.insert(string); - strings - }, - ); - delimited(char('['), string_set, char(']'))(input) -} - -fn parse_strings(input: &str) -> IResult<&str, Vec> { - delimited( - char('['), - separated_list0(char(','), parse_string), - char(']'), - )(input) -} - -fn parse_env(input: &str) -> IResult<&str, BTreeMap> { - let env_vars = fold_many0( - tuple(( - char('('), - parse_string, - char(','), - parse_string, - char(')'), - opt(char(',')), - )), - BTreeMap::new, - |mut env_vars, (_, name, _, value, _, _)| { - env_vars.insert(name, value); - env_vars - }, - ); - delimited(char('['), env_vars, char(']'))(input) -} - -enum StringFragment<'a> { - Literal(&'a str), - EscapedChar(char), -} - -fn parse_string_inside_quotes(input: &str) -> IResult<&str, String> { - fold_many0( - parse_string_fragment, - String::new, - |mut string, fragment| { - match fragment { - StringFragment::Literal(str_literal) => string.push_str(str_literal), - StringFragment::EscapedChar(escaped_char) => string.push(escaped_char), - } - string - }, - )(input) -} - -fn parse_string_fragment(input: &str) -> IResult<&str, StringFragment> { - alt((parse_literal, parse_escaped_char))(input) -} - -fn parse_literal(input: &str) -> IResult<&str, StringFragment> { - let non_empty_literal = verify(is_not("\"\\"), |matched_str: &str| !matched_str.is_empty()); - map(non_empty_literal, StringFragment::Literal)(input) -} - -fn parse_escaped_char(input: &str) -> IResult<&str, StringFragment> { - let escaped_char = preceded( - char('\\'), - alt(( - char('"'), - char('\\'), - value('\n', char('n')), - value('\r', char('r')), - value('\t', char('t')), - )), - ); - map(escaped_char, StringFragment::EscapedChar)(input) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_parse_derivation() { - let expected_derivation = Derivation { - args: to_string_vec(&["-e", "/builder.sh"]), - builder: "/bash".to_owned(), - env: vec![ - ("ENV1".to_owned(), "val1".to_owned()), - ("ENV2".to_owned(), "val2".to_owned()), - ] - .into_iter() - .collect(), - input_drvs: vec![ - ( - "/drv1".to_owned(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: to_string_set(&["out"]), - }, - ), - ( - "/drv2".to_owned(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: to_string_set(&["dev"]), - }, - ), - ] - .into_iter() - .collect(), - input_srcs: to_string_set(&["/builder.sh"]), - outputs: vec![("out".to_owned(), to_drv_out("sha256", "abc", "/foo"))] - .into_iter() - .collect(), - system: "x86_64-linux".to_owned(), - }; - assert_eq!( - parse_derivation( - r#"Derive([("out","/foo","sha256","abc")],[("/drv1",["out"]),("/drv2",["dev"])],["/builder.sh"],"x86_64-linux","/bash",["-e","/builder.sh"],[("ENV1","val1"),("ENV2","val2")])"# - ), - Ok(("", expected_derivation,)), - ); - } - - #[test] - fn test_parse_string() { - assert_eq!(parse_string(r#""ab""#), Ok(("", "ab".to_owned()))); - assert_eq!(parse_string(r#""\"""#), Ok(("", "\"".to_owned()))); - assert_eq!(parse_string(r#""\\""#), Ok(("", "\\".to_owned()))); - assert_eq!(parse_string(r#""\n""#), Ok(("", "\n".to_owned()))); - assert_eq!(parse_string(r#""\r""#), Ok(("", "\r".to_owned()))); - assert_eq!(parse_string(r#""\t""#), Ok(("", "\t".to_owned()))); - - assert_eq!( - parse_string(r#""Foo\tbar\n\rmoo\\zar\"""#), - Ok(("", "Foo\tbar\n\rmoo\\zar\"".to_owned())) - ); - } - - #[test] - fn test_parse_string_invalid() { - assert_eq!( - parse_string("").unwrap_err(), - nom::Err::Error(nom::error::Error::new("", nom::error::ErrorKind::Char)), - "Parsing an empty input as a string literal must fail", - ); - assert_eq!( - parse_string("a").unwrap_err(), - nom::Err::Error(nom::error::Error::new("a", nom::error::ErrorKind::Char)), - "Parsing a string literal that doesn't start with a double-quote must fail", - ); - assert_eq!( - parse_string("\"").unwrap_err(), - nom::Err::Error(nom::error::Error::new("", nom::error::ErrorKind::Char)), - "Parsing an unclosed string literal should fail", - ); - } - - #[test] - fn test_parse_derivation_output() { - assert_eq!( - parse_derivation_output(r#"("foo","store_path","sha256","hash")"#), - Ok(( - "", - ("foo".to_owned(), to_drv_out("sha256", "hash", "store_path")), - )), - ); - } - - #[test] - fn test_parse_derivation_outputs() { - let actual = parse_derivation_outputs(r#"[("a","b","c","d"),("e","f","g","h")]"#); - let expected = vec![ - ("a".to_owned(), to_drv_out("c", "d", "b")), - ("e".to_owned(), to_drv_out("g", "h", "f")), - ]; - assert_eq!(actual, Ok(("", expected.into_iter().collect()))); - } - - #[test] - fn test_parse_input_derivations() { - let actual = parse_input_derivations(r#"[("a",["b","c"]),("e",["f","g"])]"#); - let expected = vec![ - ( - "a".to_owned(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: to_string_set(&["b", "c"]), - }, - ), - ( - "e".to_owned(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: to_string_set(&["f", "g"]), - }, - ), - ]; - assert_eq!(actual, Ok(("", expected.into_iter().collect()))); - } - - #[test] - fn test_parse_string_set() { - let actual = parse_string_set(r#"["a","b","b"]"#); - let expected = to_string_set(&["a", "b"]); - assert_eq!(actual, Ok(("", expected))); - } - - #[test] - fn test_parse_strings() { - let actual = parse_strings(r#"["a","b","a"]"#); - let expected = to_string_vec(&["a", "b", "a"]); - assert_eq!(actual, Ok(("", expected))); - } - - #[test] - fn test_parse_env() { - let actual = parse_env(r#"[("A","a"),("B","b")]"#); - let expected = vec![ - ("A".to_owned(), "a".to_owned()), - ("B".to_owned(), "b".to_owned()), - ]; - assert_eq!(actual, Ok(("", expected.into_iter().collect()))); - } - - fn to_drv_out(hash_algo: &str, hash: &str, path: &str) -> DerivationOutput { - DerivationOutput { - hash: Some(hash.to_owned()), - hash_algo: Some(hash_algo.to_owned()), - path: path.to_owned(), - } - } - - fn to_string_vec(strings: &[&str]) -> Vec { - strings.iter().cloned().map(String::from).collect() - } - - fn to_string_set(strings: &[&str]) -> BTreeSet { - strings.iter().cloned().map(String::from).collect() - } -} diff --git a/src/parsers/mod.rs b/src/parsers/mod.rs deleted file mode 100644 index 85c8e8b..0000000 --- a/src/parsers/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod derivations; diff --git a/src/sandbox/mod.rs b/src/sandbox/mod.rs deleted file mode 100644 index 4f5f501..0000000 --- a/src/sandbox/mod.rs +++ /dev/null @@ -1,126 +0,0 @@ -use nix::env::clearenv; -use nix::sys::wait; -use nix::{mount, sched, unistd}; -use std::fs; -use std::path::{Path, PathBuf}; -use uuid::Uuid; - -pub fn run_in_sandbox( - new_root: &Path, - prepare_sandbox: impl Fn() -> Result<(), String>, - run: impl Fn() -> isize, -) -> Result { - let forked_logic = || -> isize { - if let Err(err) = unsafe { clearenv() } { - eprintln!("Could not clear environment variables: {err}"); - return 255; - } - if let Err(err) = prepare_sandbox() { - eprintln!("Error preparing the sandbox: {err}"); - return 255; - } - if let Err(err) = pivot_root(new_root) { - eprintln!("Error setting up the root filesystem in the sandbox: {err}"); - return 255; - } - run() - }; - - let pid = sched::clone( - Box::new(forked_logic), - &mut vec![0u8; 1024 * 1024], - sched::CloneFlags::CLONE_NEWNS | sched::CloneFlags::CLONE_NEWUSER, - Some(libc::SIGCHLD), - ) - .map_err(|err| format!("Failed to start the process in the sandbox. Error: {err}"))?; - - match wait::waitpid(pid, None) { - Ok(wait::WaitStatus::Exited(_, exit_code)) => Ok(exit_code), - Ok(wait::WaitStatus::Signaled(_, signal, core_dumped)) => Err(format!( - "Sandboxed process killed by signal {signal} (core dumped: {core_dumped})" - )), - Ok(state) => Err(format!( - "Unexpected state of the sanboxed process: {state:?}" - )), - Err(err) => Err(format!("Error waiting for the sandboxed process: {err}")), - } -} - -pub fn mount_paths<'a>( - paths: impl Iterator, - new_root: &Path, -) -> Result<(), String> { - for path in paths { - mount_path(path, new_root)?; - } - Ok(()) -} - -pub fn mount_path(path: &Path, new_root: &Path) -> Result<(), String> { - let target_path = prepare_mount_path(path, new_root)?; - mount::mount( - Some(path), - &target_path, - None::<&str>, - mount::MsFlags::MS_BIND | mount::MsFlags::MS_REC, - None::<&str>, - ) - .map_err(|e| format!("Failed to bind mount {path:?} to {target_path:?}. Error: {e}")) -} - -pub fn pivot_root(new_root: &Path) -> Result<(), String> { - mount_rootfs(new_root)?; - let old_root_name = Uuid::new_v4().to_string(); - let old_root = new_root.join(&old_root_name); - let old_root_absolute = Path::new("/").join(old_root_name); - fs::create_dir_all(&old_root).map_err(|e| format!("Error creating oldroot: {e}"))?; - unistd::chdir(new_root).map_err(|e| format!("Error cd'ing to new root: {e}"))?; - unistd::pivot_root(".", &old_root).map_err(|e| format!("Error pivoting to new root: {e}"))?; - // It looks like we have to call `chroot` after `pivot_root`: https://superuser.com/questions/1575316/usage-of-chroot-after-pivot-root - unistd::chroot(".").map_err(|e| format!("Failed to chroot. {e}"))?; - mount::umount2(&old_root_absolute, mount::MntFlags::MNT_DETACH) - .map_err(|e| format!("Error unmounting old root: {e}"))?; - std::fs::remove_dir_all(&old_root_absolute).map_err(|e| format!("Error removing old root: {e}")) -} - -fn mount_rootfs(new_root: &Path) -> Result<(), String> { - // we have to mount the old root as part of requirements of the `pivot_root` syscall. - // For more info see: https://man7.org/linux/man-pages/man2/pivot_root.2.html - mount::mount( - Some("/"), - "/", - None::<&str>, - mount::MsFlags::MS_PRIVATE | mount::MsFlags::MS_REC, - None::<&str>, - ) - .map_err(|e| format!("Error mounting old root: {e}"))?; - - mount::mount( - Some(new_root), - new_root, - None::<&str>, - mount::MsFlags::MS_BIND | mount::MsFlags::MS_REC, - None::<&str>, - ) - .map_err(|e| format!("Failed to mount new root: {e}")) -} - -fn prepare_mount_path(source_path: &Path, new_root: &Path) -> Result { - let path_without_root = source_path - .strip_prefix("/") - .map_err(|e| format!("Could not remove '/' from path {source_path:?}. Error: {e}"))?; - let target_path = new_root.join(path_without_root); - if source_path.is_dir() { - fs::create_dir_all(&target_path) - .map_err(|e| format!("Error creating directory {:?}: {}", target_path, e))?; - } else { - if let Some(parent) = target_path.parent() { - fs::create_dir_all(parent).map_err(|e| { - format!("Error creating parent directories for {source_path:?}: {e}") - })?; - } - fs::write(&target_path, "") - .map_err(|e| format!("Error creating empty target file {source_path:?}: {e}"))?; - } - Ok(target_path) -} diff --git a/src/store/api.rs b/src/store/api.rs deleted file mode 100644 index 5742180..0000000 --- a/src/store/api.rs +++ /dev/null @@ -1,7 +0,0 @@ -use std::path::{Path, PathBuf}; - -/// Provides information about dependencies between packages. -pub trait DepsInfo { - /// Returns a list of runtime dependencies of the given path. - fn get_runtime_deps(&self, path: &Path) -> Result, String>; -} diff --git a/src/store/mod.rs b/src/store/mod.rs deleted file mode 100644 index 0e2008e..0000000 --- a/src/store/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod api; -pub mod nix_delegation_store; diff --git a/src/store/nix_delegation_store.rs b/src/store/nix_delegation_store.rs deleted file mode 100644 index 1c8ff1d..0000000 --- a/src/store/nix_delegation_store.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::{ - path::{Path, PathBuf}, - process::Command, -}; - -use super::api::DepsInfo; - -/// Invokes the `nix` tool to provide dependency information. -/// This will be used until Rix doesn't implement the entirety of Nix. -#[derive(Default)] -pub struct NixDelegationStore {} - -impl DepsInfo for NixDelegationStore { - fn get_runtime_deps(&self, path: &Path) -> Result, String> { - let path_as_str = path - .to_str() - .ok_or_else(|| format!("Failed to convert {path:?} to string,"))?; - let nix_args = vec!["--query", "--requisites", path_as_str]; - let _show_drv_out = Command::new("nix-store") - .args(&nix_args) - .output() - .map_err(|err| { - format!("Failed to execute `nix-store` with args {nix_args:?}. Error: {err}.") - })?; - - if !_show_drv_out.status.success() { - return Err(format!( - "Failed to get runtime dependencies of '{path_as_str}'. Error: {}.", - std::str::from_utf8(&_show_drv_out.stderr).unwrap_or("") - )); - } - - Ok(std::str::from_utf8(&_show_drv_out.stdout) - .map_err(|err| { - format!("Failed to decode the output of nix-store with UTF-8. Error: {err}") - })? - .lines() - .map(PathBuf::from) - .collect()) - } -} diff --git a/tests/cmd/build_derivation.rs b/tests/cmd/build_derivation.rs deleted file mode 100644 index ddf4e04..0000000 --- a/tests/cmd/build_derivation.rs +++ /dev/null @@ -1,251 +0,0 @@ -use crate::test_utils::tmp_file; -use assert_cmd::prelude::*; -use predicates::prelude::*; -use rix::derivations::{load_derivation, save_derivation, Derivation, DerivationOutput, InputDrv}; -use std::collections::{BTreeMap, BTreeSet}; -use std::fs::{read_to_string, File}; -use std::os::unix::fs::PermissionsExt; -use std::path::Path; -use std::process::Command; -use std::{fs, str, thread}; -use tempfile::tempdir; - -#[test] -fn help() { - assert_cmd(&["--help"]) - .success() - .stderr(predicate::str::is_empty()); -} - -#[test] -fn build_derivations() { - // We have to call nix in order to get some basic dependencies for the tests. - // Unfortunately, calling nix over and over again is expensive. This is why - // we call nix here upfront just once and then call test functions in parallel. - let test_data = TestData::new(); - thread::scope(|scope| { - scope.spawn(|| load_and_save_derivation_stable(&test_data)); - scope.spawn(|| build_derivation_success(&test_data)); - scope.spawn(|| build_derivation_missing_deps(&test_data)); - scope.spawn(|| build_derivation_sandboxed_input_drvs(&test_data)); - }); -} - -fn load_and_save_derivation_stable(test_data: &TestData) { - let parsed_derivation = load_derivation(&test_data.coreutils_drv_path).unwrap(); - let mut derivation_bytes = Vec::new(); - save_derivation(&mut derivation_bytes, &parsed_derivation).unwrap(); - assert_eq!( - str::from_utf8(&derivation_bytes).unwrap(), - fs::read_to_string(&test_data.coreutils_drv_path).unwrap(), - ); -} - -fn build_derivation_success(test_data: &TestData) { - let tmp_dir = tempdir().unwrap(); - let build_dir = tempdir().unwrap(); - - let builder_script = "echo hello world && echo broken world 1>&2 && mkdir -p $out && echo hello file > $out/file.out"; - let derivation = simple_derivation(test_data, &tmp_dir, builder_script); - let derivation_path = tmp_dir.path().join("foo.drv"); - save_derivation(&mut File::create(&derivation_path).unwrap(), &derivation).unwrap(); - - let stdout_path = tmp_dir.path().join("stdout"); - File::create(&stdout_path).unwrap(); - let stderr_path = tmp_dir.path().join("stderr"); - File::create(&stderr_path).unwrap(); - - assert_cmd(&[ - "--stdout", - &stdout_path.to_str().unwrap(), - "--stderr", - &stderr_path.to_str().unwrap(), - "--build-dir", - &build_dir.path().to_str().unwrap(), - &derivation_path.to_str().unwrap(), - ]) - .success() - .stderr(predicate::str::is_empty()); - - assert_eq!(read_to_string(&stdout_path).unwrap(), "hello world\n"); - assert_eq!(read_to_string(&stderr_path).unwrap(), "broken world\n"); - assert_eq!( - read_to_string(&build_dir.path().join("output/file.out")).unwrap(), - "hello file\n" - ); - assert!(build_dir.path().join("dev/null").exists()); - assert!(fs::read_to_string(build_dir.path().join("dev/null")) - .unwrap() - .is_empty()); -} - -fn build_derivation_missing_deps(test_data: &TestData) { - let tmp_dir = tempdir().unwrap(); - let build_dir = tempdir().unwrap(); - let busybox_derivation = load_derivation(&test_data.busybox_drv_path).unwrap(); - let coreutils_derivation = load_derivation(&test_data.coreutils_drv_path).unwrap(); - - let derivation = test_derivation( - &tmp_dir, - Path::new("/output"), - "mkdir $out && touch $out/hello", - &format!("{}/bin/sh", busybox_derivation.outputs["out"].path), - &vec![], - BTreeMap::from([( - test_data.busybox_drv_path.clone(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: BTreeSet::from(["out".to_owned()]), - }, - )]), - BTreeMap::from([( - "PATH".to_owned(), - format!("{}/bin", coreutils_derivation.outputs["out"].path), - )]), - ); - let derivation_path = tmp_dir.path().join("foo.drv"); - let mut derivation_file = File::create(&derivation_path).unwrap(); - save_derivation(&mut derivation_file, &derivation).unwrap(); - - let stderr_path = tmp_dir.path().join("stderr"); - fs::File::create(&stderr_path).unwrap(); - - assert_cmd(&[ - "--stderr", - &stderr_path.to_str().unwrap(), - "--build-dir", - &build_dir.path().to_str().unwrap(), - &derivation_path.to_str().unwrap(), - ]) - .failure() - .stderr(predicate::str::is_empty()); - - assert!(!fs::read_to_string(&stderr_path).unwrap().is_empty()); -} - -fn build_derivation_sandboxed_input_drvs(test_data: &TestData) { - let tmp_dir = tempdir().unwrap(); - let build_dir = tempdir().unwrap(); - - let derivation = simple_derivation(test_data, &tmp_dir, "mkdir $out && touch $out/hello"); - let derivation_path = tmp_dir.path().join("foo.drv"); - save_derivation(&mut File::create(&derivation_path).unwrap(), &derivation).unwrap(); - - assert_cmd(&[ - "--build-dir", - &build_dir.path().to_str().unwrap(), - &derivation_path.to_str().unwrap(), - ]) - .success() - .stderr(predicate::str::is_empty()); - - assert!(build_dir.path().join("output/hello").exists()); -} - -fn assert_cmd(hash_args: &[&str]) -> assert_cmd::assert::Assert { - let mut rix_args = vec!["build-derivation"]; - rix_args.extend_from_slice(hash_args); - return Command::cargo_bin("rix").unwrap().args(rix_args).assert(); -} - -#[derive(Clone)] -struct TestData { - busybox_drv_path: String, - coreutils_drv_path: String, -} - -impl TestData { - pub fn new() -> Self { - TestData { - busybox_drv_path: get_derivation_path(".#pkgs.busybox-sandbox-shell") - .expect("Couldn't find the derivation for busybox."), - coreutils_drv_path: get_derivation_path(".#pkgs.coreutils") - .expect("Couldn't find derivation for coreutils."), - } - } -} - -fn simple_derivation( - test_data: &TestData, - tmp_dir: &tempfile::TempDir, - builder_script: &str, -) -> Derivation { - let busybox_derivation = load_derivation(&test_data.busybox_drv_path).unwrap(); - let coreutils_derivation = load_derivation(&test_data.coreutils_drv_path).unwrap(); - return test_derivation( - tmp_dir, - Path::new("/output"), - builder_script, - &format!("{}/bin/sh", busybox_derivation.outputs["out"].path), - &vec![], - BTreeMap::from([ - ( - test_data.coreutils_drv_path.clone(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: BTreeSet::from(["out".to_owned()]), - }, - ), - ( - test_data.busybox_drv_path.clone(), - InputDrv { - dynamic_outputs: BTreeMap::new(), - outputs: BTreeSet::from(["out".to_owned()]), - }, - ), - ]), - BTreeMap::from([( - "PATH".to_owned(), - format!("{}/bin", coreutils_derivation.outputs["out"].path), - )]), - ); -} - -fn test_derivation( - src_dir: &tempfile::TempDir, - out_dir: &Path, - builder_script: &str, - builder: &str, - input_srcs: &Vec, - input_drvs: BTreeMap, - mut env: BTreeMap, -) -> Derivation { - let builder_script_file = tmp_file(&src_dir, "builder.sh", builder_script); - fs::set_permissions(&builder_script_file, fs::Permissions::from_mode(0o640)).unwrap(); - env.extend([("out".to_owned(), out_dir.to_str().unwrap().to_owned())]); - - Derivation { - builder: builder.to_owned(), - args: vec![builder_script_file.clone()], - env: env, - input_drvs: input_drvs, - input_srcs: input_srcs - .iter() - .chain(&[builder_script_file.clone()]) - .cloned() - .collect(), - outputs: BTreeMap::from([( - "out".to_owned(), - DerivationOutput { - hash: Some("".to_owned()), - hash_algo: Some("".to_owned()), - path: out_dir.to_str().unwrap().to_owned(), - }, - )]), - system: "any".to_owned(), - } -} - -fn get_derivation_path(installable_arg: &str) -> Option { - let mut nix_args = vec!["show-derivation".to_owned()]; - nix_args.push(installable_arg.to_owned()); - let show_drv_out = Command::new("nix") - .args(&nix_args) - .output() - .expect("failed to show the derivation"); - - let parsed_out: BTreeMap = - serde_json::from_str(str::from_utf8(&show_drv_out.stdout).unwrap()).unwrap(); - - parsed_out.keys().next().cloned() -} diff --git a/tests/cmd/hash.rs b/tests/cmd/hash.rs deleted file mode 100644 index a2f2d4c..0000000 --- a/tests/cmd/hash.rs +++ /dev/null @@ -1,196 +0,0 @@ -use assert_cmd::prelude::*; -use predicates::prelude::*; -use std::process::Command; - -#[test] -fn help() { - assert_hash_cmd(&["--help"]) - .success() - .stderr(predicate::str::is_empty()); -} - -#[test] -fn to_base16_help() { - assert_hash_cmd(&["to-base16", "--help"]) - .success() - .stderr(predicate::str::is_empty()); -} - -#[test] -fn md5_to_base16() { - assert_hash_cmd(&["to-base16", "--type=md5", "61h2nin3nx3lj7vj2ywixsiv5y"]) - .success() - .stdout(predicate::str::diff("beeca87be45ec87d241ddd0e1bad80c1\n")) - .stderr(predicate::str::is_empty()); -} - -#[test] -fn sha1_to_base16() { - assert_hash_cmd(&["to-base16", "--type=sha1", "1upkmUx5+XtipytCb75gVqGUu5A="]) - .success() - .stdout(predicate::str::diff( - "d6ea64994c79f97b62a72b426fbe6056a194bb90\n", - )) - .stderr(predicate::str::is_empty()); -} - -#[test] -fn sha256_to_base16() { - let hash_str = "1TE4YoVvd3C9/+0t/oxBeoTz9tXhHDtcGUIPITB2b4E="; - assert_hash_cmd(&["to-base16", "--type=sha256", hash_str]) - .success() - .stdout(predicate::str::diff( - "d5313862856f7770bdffed2dfe8c417a84f3f6d5e11c3b5c19420f2130766f81\n", - )) - .stderr(predicate::str::is_empty()); -} - -#[test] -fn sha512_to_base16() { - assert_hash_cmd(&["to-base16", "--type=sha512", "+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ=="]) - .success() - .stdout(predicate::str::diff("fb2e199de3e9bd6b357dcfcb8594531e44deb1b5e4c8162e381fb90b2a1d66aac4b844d78b7cce55fa404087600b79576c72d30c6f5d428b3147d061bcb2832d\n")) - .stderr(predicate::str::is_empty()); -} - -#[test] -fn sha512_to_base32() { - assert_hash_cmd(&[ - "to-base32", - "--type=sha512", - "+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ==", - ]) - .success() - .stdout(predicate::str::diff("0nq7cmwc784fccb89fny36kf9n5fy8bc23l0h7sap77r2yp8jwc9ak63lm0pf8z70p1dj74nnqxwi0yafa8bjygglsnpgg9wffijbpv\n")) - .stderr(predicate::str::is_empty()); -} - -#[test] -fn sha512_to_base64() { - assert_hash_cmd(&[ - "to-base64", - "--type=sha512", - "0nq7cmwc784fccb89fny36kf9n5fy8bc23l0h7sap77r2yp8jwc9ak63lm0pf8z70p1dj74nnqxwi0yafa8bjygglsnpgg9wffijbpv", - ]) - .success() - .stdout(predicate::str::diff("+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ==\n")) - .stderr(predicate::str::is_empty()); -} - -#[test] -fn to_base16_invalid_sri() { - assert_hash_cmd(&["to-base16", "61h2nin3nx3lj7vj2ywixsiv5y"]) - .failure() - .stderr(predicate::str::diff( - "error: Failed to parse '61h2nin3nx3lj7vj2ywixsiv5y'. Not an SRI hash.\n", - )); -} - -#[test] -fn to_base16_sri_unknown_hash_type() { - assert_hash_cmd(&["to-base16", "foobar-61h2nin3nx3lj7vj2ywixsiv5y"]) - .failure() - .stderr(predicate::str::diff("error: Unknown hash type 'foobar'.\n")); -} - -#[test] -fn to_base16_sri_md5() { - assert_hash_cmd(&["to-base16", "md5-61h2nin3nx3lj7vj2ywixsiv5y"]) - .success() - .stdout(predicate::str::diff("beeca87be45ec87d241ddd0e1bad80c1\n")); -} - -#[test] -fn to_base32_sri_sha1() { - assert_hash_cmd(&["to-base32", "sha1-1upkmUx5+XtipytCb75gVqGUu5A="]) - .success() - .stdout(predicate::str::diff("j2xr98anc2z6yhiblxi7pybr9jcn9snn\n")); -} - -#[test] -fn to_base64_sri_sha256() { - let hash_str = "sha256-10bgfqq223s235f3n771spvg713s866gwbgdzyyp0xvghmi3hcfm"; - assert_hash_cmd(&["to-base64", hash_str]) - .success() - .stdout(predicate::str::diff( - "1TE4YoVvd3C9/+0t/oxBeoTz9tXhHDtcGUIPITB2b4E=\n", - )); -} - -#[test] -fn to_base32_sri_sha512() { - assert_hash_cmd(&["to-base32","sha512:+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ=="]) - .success() - .stdout(predicate::str::diff("0nq7cmwc784fccb89fny36kf9n5fy8bc23l0h7sap77r2yp8jwc9ak63lm0pf8z70p1dj74nnqxwi0yafa8bjygglsnpgg9wffijbpv\n")); -} - -#[test] -fn to_sri_md5() { - let sri = "md5-vuyoe+ReyH0kHd0OG62AwQ=="; - let base32 = "61h2nin3nx3lj7vj2ywixsiv5y"; - let sri_base32 = "md5-61h2nin3nx3lj7vj2ywixsiv5y"; - assert_hash_cmd(&["to-sri", sri_base32]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", "--type", "md5", base32]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", sri_base32]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); -} - -#[test] -fn to_sri_sha1() { - let sri = "sha1-1upkmUx5+XtipytCb75gVqGUu5A="; - let base16 = "d6ea64994c79f97b62a72b426fbe6056a194bb90"; - let sri_base16 = "sha1-d6ea64994c79f97b62a72b426fbe6056a194bb90"; - assert_hash_cmd(&["to-sri", sri]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", "--type", "sha1", base16]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", sri_base16]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); -} - -#[test] -fn to_sri_sha256() { - let sri = "sha256-1TE4YoVvd3C9/+0t/oxBeoTz9tXhHDtcGUIPITB2b4E="; - let base32 = "10bgfqq223s235f3n771spvg713s866gwbgdzyyp0xvghmi3hcfm"; - let sri_base32 = "sha256-10bgfqq223s235f3n771spvg713s866gwbgdzyyp0xvghmi3hcfm"; - assert_hash_cmd(&["to-sri", sri_base32]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", "--type", "sha256", base32]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", sri_base32]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); -} - -#[test] -fn to_sri_sha512() { - let sri = "sha512-+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ=="; - let base64 = - "+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ=="; - let sri_base64 = "sha512-+y4ZnePpvWs1fc/LhZRTHkTesbXkyBYuOB+5CyodZqrEuETXi3zOVfpAQIdgC3lXbHLTDG9dQosxR9BhvLKDLQ=="; - assert_hash_cmd(&["to-sri", sri_base64]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", "--type", "sha512", base64]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); - assert_hash_cmd(&["to-sri", sri_base64]) - .success() - .stdout(predicate::str::diff(format!("{}\n", sri))); -} - -fn assert_hash_cmd(hash_args: &[&str]) -> assert_cmd::assert::Assert { - let mut rix_args = vec!["hash"]; - rix_args.extend_from_slice(hash_args); - return Command::cargo_bin("rix").unwrap().args(rix_args).assert(); -} diff --git a/tests/cmd/mod.rs b/tests/cmd/mod.rs index 8294912..802fd17 100644 --- a/tests/cmd/mod.rs +++ b/tests/cmd/mod.rs @@ -1,5 +1,2 @@ -pub mod build_derivation; pub mod eval; -pub mod hash; -pub mod show_derivation; pub mod transpile; diff --git a/tests/cmd/show_derivation.rs b/tests/cmd/show_derivation.rs deleted file mode 100644 index 46de284..0000000 --- a/tests/cmd/show_derivation.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::test_utils::tmp_file; -use assert_cmd::prelude::*; -use predicates::prelude::*; -use serde_json::Value; -use std::process::Command; -use std::str; -use tempfile::tempdir; - -#[test] -fn show_derivation_help() { - Command::cargo_bin("rix") - .unwrap() - .args(["show-derivation", "--help"]) - .assert() - .success() - .stdout(predicate::str::contains("Usage:")); -} - -#[test] -fn show_derivation() { - let tmpdir = tempdir().unwrap(); - let derivation_contents = r#"Derive([("out","/foo","sha256","abc")],[("/drv1",["out"]),("/drv2",["dev"])],["/builder.sh"],"x86_64-linux","/bash",["-e","/builder.sh"],[("ENV1","val1"),("ENV2","val2")])"#; - let derivation_path = tmp_file(&tmpdir, "foo.drv", derivation_contents); - - let expected_output: Value = serde_json::from_str(format!( - "{{\"{}\":{}}}", - &derivation_path, - r#"{"args":["-e","/builder.sh"],"builder":"/bash","env":{"ENV1":"val1","ENV2":"val2"},"inputDrvs":{"/drv2":{"dynamicOutputs":{},"outputs":["dev"]},"/drv1":{"dynamicOutputs":{},"outputs":["out"]}},"inputSrcs":["/builder.sh"],"outputs":{"out":{"hash":"abc","hashAlgo":"sha256","path":"/foo"}},"system":"x86_64-linux"}"#, - ).as_str()).unwrap(); - - let cmd_result = Command::cargo_bin("rix") - .unwrap() - .args(["show-derivation", &derivation_path]) - .assert() - .success(); - - let output: Value = - serde_json::from_str(str::from_utf8(&cmd_result.get_output().stdout).unwrap()).unwrap(); - - assert_eq!(output, expected_output); -}