From 195d2d4c1bc8cab7fe7c67490674246a16091bbd Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Mon, 19 Aug 2024 09:26:14 +0200 Subject: [PATCH 1/8] feat: pixi global manifest (#1802) This adds functionality and tests to parse the `pixi-global.toml` as described in the [proposal](https://pixi.sh/dev/design_proposals/pixi_global_manifest/) --- Cargo.lock | 17 ++ Cargo.toml | 1 + crates/pixi_manifest/src/lib.rs | 2 +- src/global/mod.rs | 4 + src/global/project/document.rs | 11 ++ src/global/project/environment.rs | 66 +++++++ src/global/project/errors.rs | 63 +++++++ src/global/project/manifest.rs | 85 +++++++++ src/global/project/mod.rs | 171 ++++++++++++++++++ src/global/project/parsed_manifest.rs | 102 +++++++++++ ...__parsed_manifest__tests__invalid_key.snap | 27 +++ src/lib.rs | 1 + 12 files changed, 549 insertions(+), 1 deletion(-) create mode 100644 src/global/mod.rs create mode 100644 src/global/project/document.rs create mode 100644 src/global/project/environment.rs create mode 100644 src/global/project/errors.rs create mode 100644 src/global/project/manifest.rs create mode 100644 src/global/project/mod.rs create mode 100644 src/global/project/parsed_manifest.rs create mode 100644 src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap diff --git a/Cargo.lock b/Cargo.lock index 734974033..c463c1109 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1116,6 +1116,12 @@ dependencies = [ "syn 2.0.72", ] +[[package]] +name = "deunicode" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339544cc9e2c4dc3fc7149fd630c5f22263a4fdf18a98afd0075784968b5cf00" + [[package]] name = "dialoguer" version = "0.11.0" @@ -1376,6 +1382,16 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "fake" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c25829bde82205da46e1823b2259db6273379f626fc211f126f65654a2669be" +dependencies = [ + "deunicode", + "rand", +] + [[package]] name = "fancy_display" version = "0.1.0" @@ -3409,6 +3425,7 @@ dependencies = [ "distribution-filename", "distribution-types", "dunce", + "fake", "fancy_display", "fd-lock", "flate2", diff --git a/Cargo.toml b/Cargo.toml index 9234f1542..b9315c646 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -314,6 +314,7 @@ strip = false [dev-dependencies] +fake = "2.9.2" insta = { workspace = true, features = ["yaml", "glob"] } rstest = { workspace = true } serde_json = { workspace = true } diff --git a/crates/pixi_manifest/src/lib.rs b/crates/pixi_manifest/src/lib.rs index a537e0482..f87dc0ab6 100644 --- a/crates/pixi_manifest/src/lib.rs +++ b/crates/pixi_manifest/src/lib.rs @@ -27,7 +27,7 @@ pub use dependencies::{CondaDependencies, Dependencies, PyPiDependencies}; pub use manifest::{Manifest, ManifestKind}; pub use crate::environments::Environments; -pub use crate::parsed_manifest::ParsedManifest; +pub use crate::parsed_manifest::{deserialize_package_map, ParsedManifest}; pub use crate::solve_group::{SolveGroup, SolveGroups}; pub use activation::Activation; pub use channel::PrioritizedChannel; diff --git a/src/global/mod.rs b/src/global/mod.rs new file mode 100644 index 000000000..e9916f7d1 --- /dev/null +++ b/src/global/mod.rs @@ -0,0 +1,4 @@ +// TODO: remove this before merging to main +#![allow(unused)] + +mod project; diff --git a/src/global/project/document.rs b/src/global/project/document.rs new file mode 100644 index 000000000..55b824194 --- /dev/null +++ b/src/global/project/document.rs @@ -0,0 +1,11 @@ +use std::fmt; + +#[derive(Debug, Clone)] +/// Represents a mutable pixi global TOML. +pub(crate) struct ManifestSource(pub(crate) toml_edit::DocumentMut); + +impl fmt::Display for ManifestSource { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/src/global/project/environment.rs b/src/global/project/environment.rs new file mode 100644 index 000000000..63bef84c0 --- /dev/null +++ b/src/global/project/environment.rs @@ -0,0 +1,66 @@ +use std::{fmt, str::FromStr}; + +use miette::Diagnostic; +use regex::Regex; +use serde::{self, Deserialize, Deserializer}; +use thiserror::Error; + +/// Represents the name of an environment. +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub(crate) struct EnvironmentName(String); + +impl EnvironmentName { + /// Returns the name of the environment. + pub fn as_str(&self) -> &str { + &self.0 + } +} + +impl fmt::Display for EnvironmentName { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl PartialEq for EnvironmentName { + fn eq(&self, other: &str) -> bool { + self.as_str() == other + } +} + +impl<'de> Deserialize<'de> for EnvironmentName { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = String::deserialize(deserializer)?; + name.parse().map_err(serde::de::Error::custom) + } +} + +/// Represents an error that occurs when parsing an environment name. +/// +/// This error is returned when a string fails to be parsed as an environment name. +#[derive(Debug, Clone, Error, Diagnostic, PartialEq)] +#[error("Failed to parse environment name '{attempted_parse}', please use only lowercase letters, numbers and dashes")] +pub struct ParseEnvironmentNameError { + /// The string that was attempted to be parsed. + pub attempted_parse: String, +} + +impl FromStr for EnvironmentName { + type Err = ParseEnvironmentNameError; + fn from_str(s: &str) -> Result { + static REGEX: std::sync::OnceLock = std::sync::OnceLock::new(); + let regex = REGEX + .get_or_init(|| Regex::new(r"^[a-z0-9-]+$").expect("Regex should be able to compile")); + + if !regex.is_match(s) { + // Return an error if the string does not match the regex + return Err(ParseEnvironmentNameError { + attempted_parse: s.to_string(), + }); + } + Ok(EnvironmentName(s.to_string())) + } +} diff --git a/src/global/project/errors.rs b/src/global/project/errors.rs new file mode 100644 index 000000000..1b28d1108 --- /dev/null +++ b/src/global/project/errors.rs @@ -0,0 +1,63 @@ +use miette::{Diagnostic, IntoDiagnostic, LabeledSpan, NamedSource, Report}; + +use thiserror::Error; + +/// Represents errors that can occur when working with a pixi global manifest +#[derive(Error, Debug, Clone, Diagnostic)] +pub enum ManifestError { + #[error(transparent)] + Error(#[from] toml_edit::TomlError), + #[error("Could not find or access the part '{part}' in the path '[{table_name}]'")] + TableError { part: String, table_name: String }, + #[error("Could not find or access array '{array_name}' in '[{table_name}]'")] + ArrayError { + array_name: String, + table_name: String, + }, +} + +impl ManifestError { + pub fn to_fancy(&self, file_name: &str, contents: impl Into) -> Result { + if let Some(span) = self.span() { + Err(miette::miette!( + labels = vec![LabeledSpan::at(span, self.message())], + "failed to parse project manifest" + ) + .with_source_code(NamedSource::new(file_name, contents.into()))) + } else { + Err(self.clone()).into_diagnostic() + } + } + + fn span(&self) -> Option> { + match self { + ManifestError::Error(e) => e.span(), + _ => None, + } + } + fn message(&self) -> String { + match self { + ManifestError::Error(e) => e.message().to_owned(), + _ => self.to_string(), + } + } + + pub fn table_error(part: &str, table_name: &str) -> Self { + Self::TableError { + part: part.into(), + table_name: table_name.into(), + } + } + + pub fn array_error(array_name: &str, table_name: &str) -> Self { + Self::ArrayError { + array_name: array_name.into(), + table_name: table_name.into(), + } + } +} +impl From for ManifestError { + fn from(e: toml_edit::de::Error) -> Self { + ManifestError::Error(e.into()) + } +} diff --git a/src/global/project/manifest.rs b/src/global/project/manifest.rs new file mode 100644 index 000000000..d9fe7d80f --- /dev/null +++ b/src/global/project/manifest.rs @@ -0,0 +1,85 @@ +use std::path::{Path, PathBuf}; + +use miette::IntoDiagnostic; +use rattler_conda_types::{MatchSpec, PackageName}; +use toml_edit::DocumentMut; + +use super::errors::ManifestError; + +use super::MANIFEST_DEFAULT_NAME; +use super::{document::ManifestSource, parsed_manifest::ParsedManifest}; + +/// Handles the global project's manifest file. +/// This struct is responsible for reading, parsing, editing, and saving the +/// manifest. It encapsulates all logic related to the manifest's TOML format +/// and structure. The manifest data is represented as a [`ParsedManifest`] +/// struct for easy manipulation. +#[derive(Debug, Clone)] +pub struct Manifest { + /// The path to the manifest file + pub path: PathBuf, + + /// The raw contents of the manifest file + pub contents: String, + + /// Editable toml document + pub document: ManifestSource, + + /// The parsed manifest + pub parsed: ParsedManifest, +} + +impl Manifest { + /// Create a new manifest from a path + pub fn from_path(path: impl AsRef) -> miette::Result { + let manifest_path = dunce::canonicalize(path.as_ref()).into_diagnostic()?; + let contents = std::fs::read_to_string(path.as_ref()).into_diagnostic()?; + Self::from_str(manifest_path.as_ref(), contents) + } + + /// Create a new manifest from a string + pub fn from_str(manifest_path: &Path, contents: impl Into) -> miette::Result { + let contents = contents.into(); + let parsed = ParsedManifest::from_toml_str(&contents); + + let (manifest, document) = match parsed.and_then(|manifest| { + contents + .parse::() + .map(|doc| (manifest, doc)) + .map_err(ManifestError::from) + }) { + Ok(result) => result, + Err(e) => e.to_fancy(MANIFEST_DEFAULT_NAME, &contents)?, + }; + + let source = ManifestSource(document); + let manifest = Self { + path: manifest_path.to_path_buf(), + contents, + document: source, + parsed: manifest, + }; + + Ok(manifest) + } + + /// Adds an environment to the project. + pub fn add_environment(&mut self, _name: String) -> miette::Result<()> { + todo!() + } + + /// Removes an environment from the project. + pub fn remove_environment(&mut self, _name: &str) -> miette::Result { + todo!() + } + + /// Add a matchspec to the manifest + pub fn add_dependency(&mut self, _spec: &MatchSpec) -> miette::Result { + todo!() + } + + /// Removes a dependency based on `SpecType`. + pub fn remove_dependency(&mut self, _dep: &PackageName) -> miette::Result<()> { + todo!() + } +} diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs new file mode 100644 index 000000000..fadde0c37 --- /dev/null +++ b/src/global/project/mod.rs @@ -0,0 +1,171 @@ +use std::{ + env, + fmt::Formatter, + fs, + path::{Path, PathBuf}, + sync::OnceLock, +}; + +use manifest::Manifest; +use miette::IntoDiagnostic; +use rattler_repodata_gateway::Gateway; +use reqwest_middleware::ClientWithMiddleware; +use std::fmt::Debug; + +mod document; +mod environment; +mod errors; +mod manifest; +mod parsed_manifest; + +const MANIFEST_DEFAULT_NAME: &str = "pixi-global.toml"; + +/// The pixi global project, this main struct to interact with the pixi global project. +/// This struct holds the `Manifest` and has functions to modify +/// or request information from it. This allows in the future to have multiple manifests +/// linked to a pixi global project. +#[derive(Clone)] +pub struct Project { + /// Root folder of the project + root: PathBuf, + /// Reqwest client shared for this project. + /// This is wrapped in a `OnceLock` to allow for lazy initialization. + client: OnceLock<(reqwest::Client, ClientWithMiddleware)>, + /// The repodata gateway to use for answering queries about repodata. + /// This is wrapped in a `OnceLock` to allow for lazy initialization. + repodata_gateway: OnceLock, + /// The manifest for the project + pub(crate) manifest: Manifest, +} + +impl Debug for Project { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Global Project") + .field("root", &self.root) + .field("manifest", &self.manifest) + .finish() + } +} + +impl Project { + /// Constructs a new instance from an internal manifest representation + fn from_manifest(manifest: Manifest) -> Self { + let root = manifest + .path + .parent() + .expect("manifest path should always have a parent") + .to_owned(); + + Self { + root, + client: Default::default(), + repodata_gateway: Default::default(), + manifest, + } + } + + /// Constructs a project from a manifest. + pub fn from_str(manifest_path: &Path, content: &str) -> miette::Result { + let manifest = Manifest::from_str(manifest_path, content)?; + Ok(Self::from_manifest(manifest)) + } + + /// Discovers the project manifest file in path set by `PIXI_GLOBAL_MANIFESTS` + /// or alternatively at `~/.pixi/manifests/pixi-global.toml`. + /// If the manifest doesn't exist yet, and empty one will be created. + pub fn discover() -> miette::Result { + let manifest_dir = env::var("PIXI_GLOBAL_MANIFESTS") + .map(PathBuf::from) + .or_else(|_| Self::default_dir())?; + + fs::create_dir_all(&manifest_dir).into_diagnostic()?; + + let manifest_path = manifest_dir.join(MANIFEST_DEFAULT_NAME); + + if !manifest_path.exists() { + fs::File::create(&manifest_path).into_diagnostic()?; + } + Self::from_path(&manifest_path) + } + + /// Get default dir for the pixi global manifest + fn default_dir() -> miette::Result { + // If environment variable is not set, use default directory + let default_dir = dirs::home_dir() + .ok_or_else(|| miette::miette!("Could not get home directory"))? + .join(".pixi/manifests"); + Ok(default_dir) + } + + /// Loads a project from manifest file. + pub fn from_path(manifest_path: &Path) -> miette::Result { + let manifest = Manifest::from_path(manifest_path)?; + Ok(Project::from_manifest(manifest)) + } +} + +#[cfg(test)] +mod tests { + use std::io::Write; + + use super::*; + use fake::{faker::filesystem::zh_tw::FilePath, Fake}; + + const SIMPLE_MANIFEST: &str = r#" + [envs.python.dependencies] + python = "3.11.*" + [envs.python.exposed] + python = "python" + "#; + + #[test] + fn test_project_from_str() { + let manifest_path: PathBuf = FilePath().fake(); + + let project = Project::from_str(&manifest_path, SIMPLE_MANIFEST).unwrap(); + assert_eq!(project.root, manifest_path.parent().unwrap()); + } + + #[test] + fn test_project_from_path() { + let tempdir = tempfile::tempdir().unwrap(); + let manifest_path = tempdir.path().join(MANIFEST_DEFAULT_NAME); + + // Create and write global manifest + let mut file = fs::File::create(&manifest_path).unwrap(); + file.write_all(SIMPLE_MANIFEST.as_bytes()).unwrap(); + let project = Project::from_path(&manifest_path).unwrap(); + + // Canonicalize both paths + let canonical_root = project.root.canonicalize().unwrap(); + let canonical_manifest_parent = manifest_path.parent().unwrap().canonicalize().unwrap(); + + assert_eq!(canonical_root, canonical_manifest_parent); + } + + #[test] + fn test_project_discover() { + let tempdir = tempfile::tempdir().unwrap(); + let manifest_dir = tempdir.path(); + env::set_var("PIXI_GLOBAL_MANIFESTS", manifest_dir); + let project = Project::discover().unwrap(); + assert!(project.manifest.path.exists()); + let expected_manifest_path = + dunce::canonicalize(manifest_dir.join(MANIFEST_DEFAULT_NAME)).unwrap(); + assert_eq!(project.manifest.path, expected_manifest_path) + } + + #[test] + fn test_project_from_manifest() { + let manifest_path: PathBuf = FilePath().fake(); + + let manifest = Manifest::from_str(&manifest_path, SIMPLE_MANIFEST).unwrap(); + let project = Project::from_manifest(manifest); + assert_eq!(project.root, manifest_path.parent().unwrap()); + } + + #[test] + fn test_project_default_dir() { + Project::default_dir().unwrap(); + } +} diff --git a/src/global/project/parsed_manifest.rs b/src/global/project/parsed_manifest.rs new file mode 100644 index 000000000..4545fc236 --- /dev/null +++ b/src/global/project/parsed_manifest.rs @@ -0,0 +1,102 @@ +use indexmap::IndexMap; +use pixi_manifest::deserialize_package_map; +use rattler_conda_types::PackageName; +use serde_with::{serde_as, serde_derive::Deserialize}; + +use super::environment::EnvironmentName; + +use super::errors::ManifestError; +use pixi_spec::PixiSpec; + +/// Describes the contents of a parsed global project manifest. +#[serde_as] +#[derive(Deserialize, Debug, Clone)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct ParsedManifest { + /// The environments the project can create. + #[serde(default)] + envs: IndexMap, +} + +impl ParsedManifest { + /// Parses a toml string into a project manifest. + pub fn from_toml_str(source: &str) -> Result { + toml_edit::de::from_str(source).map_err(ManifestError::from) + } +} + +#[serde_as] +#[derive(Deserialize, Debug, Clone)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +struct ParsedEnvironment { + #[serde(default, deserialize_with = "deserialize_package_map")] + dependencies: IndexMap, + exposed: IndexMap, +} + +#[cfg(test)] +mod tests { + use insta::assert_snapshot; + + use super::ParsedManifest; + + #[test] + fn test_invalid_key() { + let examples = [ + "[invalid]", + "[envs.ipython.invalid]", + "[envs.INVALID.dependencies]", + "[envs.python_3.dependencies]", + ]; + assert_snapshot!(examples + .into_iter() + .map(|example| ParsedManifest::from_toml_str(example) + .unwrap_err() + .to_string()) + .collect::>() + .join("\n")) + } + + #[test] + fn test_duplicate_dependency() { + let contents = r#" + [envs.python.dependencies] + python = "*" + PYTHON = "*" + [envs.python.exposed] + python = "python" + "#; + let manifest = ParsedManifest::from_toml_str(contents); + + assert!(manifest.is_err()); + assert!(manifest + .unwrap_err() + .to_string() + .contains("duplicate dependency")); + } + + #[test] + fn test_tool_deserialization() { + let contents = r#" + # The name of the environment is `python` + # It will expose python, python3 and python3.11, but not pip + [envs.python.dependencies] + python = "3.11.*" + pip = "*" + + [envs.python.exposed] + python = "python" + python3 = "python3" + "python3.11" = "python3.11" + + # The name of the environment is `python3-10` + # It will expose python3.10 + [envs.python3-10.dependencies] + python = "3.10.*" + + [envs.python3-10.exposed] + "python3.10" = "python" + "#; + let _manifest = ParsedManifest::from_toml_str(contents).unwrap(); + } +} diff --git a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap new file mode 100644 index 000000000..93e574135 --- /dev/null +++ b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap @@ -0,0 +1,27 @@ +--- +source: src/global/project/parsed_manifest.rs +expression: "examples.into_iter().map(|example|\n ParsedManifest::from_toml_str(example).unwrap_err().to_string()).collect::>().join(\"\\n\")" +--- +TOML parse error at line 1, column 2 + | +1 | [invalid] + | ^^^^^^^ +unknown field `invalid`, expected `envs` + +TOML parse error at line 1, column 15 + | +1 | [envs.ipython.invalid] + | ^^^^^^^ +unknown field `invalid`, expected `dependencies` or `exposed` + +TOML parse error at line 1, column 7 + | +1 | [envs.INVALID.dependencies] + | ^^^^^^^ +Failed to parse environment name 'INVALID', please use only lowercase letters, numbers and dashes + +TOML parse error at line 1, column 7 + | +1 | [envs.python_3.dependencies] + | ^^^^^^^^ +Failed to parse environment name 'python_3', please use only lowercase letters, numbers and dashes diff --git a/src/lib.rs b/src/lib.rs index 6f135279e..ce59bcfb1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,6 +2,7 @@ pub mod activation; pub mod cli; pub(crate) mod conda_pypi_clobber; mod environment; +mod global; mod install_pypi; mod install_wheel; mod lock_file; From c021fbba053b2a7f528cebe3ff6c4c62f0573e0c Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Tue, 3 Sep 2024 10:22:42 +0200 Subject: [PATCH 2/8] feat: add `global sync` (#1835) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Main features: - minimal implementation of `pixi global sync` - it creates environments if they are on the manifest but not on the systsem - it deletes environments if they are not in the manifest - It adds binaries that are exposed in the manifest - It removes binares that are not exposed in the manifest - it doesn´t check yet whether environments already exist to avoid double work - it doesn´t import from existing environments on the system yet Side features: - Add `sort_channels_by_priority` function - Heavily refactor structs in `common.rs` - Add many tests How to test: Create `~/.pixi/manifests/pixi-global.toml` with: ```toml # The name of the environment is `python` [envs.python] channels = ["conda-forge"] # optional, defaults to your current OS # platform = "osx-64" # It will expose python, python3 and python3.11, but not pip [envs.python.dependencies] python = "3.11.*" pip = "*" [envs.python.exposed] python = "python" python3 = "python3" "python3.11" = "python3.11" # The name of the environment is `python3-10` [envs.python3-10] channels = ["https://fast.prefix.dev/conda-forge"] # It will expose python3.10 [envs.python3-10.dependencies] python = "3.10.*" [envs.python3-10.exposed] "python3.10" = "python" ``` --------- Co-authored-by: nichmor --- Cargo.lock | 44 +- crates/pixi_manifest/src/channel.rs | 23 +- crates/pixi_manifest/src/features_ext.rs | 24 +- crates/pixi_manifest/src/lib.rs | 2 +- src/cli/global/common.rs | 145 ----- src/cli/global/install.rs | 478 +---------------- src/cli/global/list.rs | 152 +----- src/cli/global/mod.rs | 16 +- src/cli/global/remove.rs | 99 +--- src/cli/global/sync.rs | 53 ++ src/cli/global/upgrade.rs | 215 -------- src/cli/global/upgrade_all.rs | 42 -- src/global/common.rs | 296 ++++++++++ src/global/install.rs | 505 ++++++++++++++++++ src/global/mod.rs | 8 + src/global/project/environment.rs | 23 +- src/global/project/{errors.rs => error.rs} | 0 src/global/project/manifest.rs | 2 +- src/global/project/mod.rs | 58 +- src/global/project/parsed_manifest.rs | 192 ++++++- ...manifest__tests__duplicate_dependency.snap | 9 + ...ed_manifest__tests__duplicate_exposed.snap | 5 + ...__parsed_manifest__tests__expose_pixi.snap | 9 + ...__parsed_manifest__tests__invalid_key.snap | 2 +- 24 files changed, 1180 insertions(+), 1222 deletions(-) delete mode 100644 src/cli/global/common.rs create mode 100644 src/cli/global/sync.rs delete mode 100644 src/cli/global/upgrade.rs delete mode 100644 src/cli/global/upgrade_all.rs create mode 100644 src/global/common.rs create mode 100644 src/global/install.rs rename src/global/project/{errors.rs => error.rs} (100%) create mode 100644 src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_dependency.snap create mode 100644 src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap create mode 100644 src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__expose_pixi.snap diff --git a/Cargo.lock b/Cargo.lock index b36be52b8..be29fed80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2295,7 +2295,7 @@ dependencies = [ "pypi-types", "reflink-copy", "regex", - "rustc-hash 2.0.0", + "rustc-hash", "serde", "serde_json", "sha2", @@ -2380,7 +2380,7 @@ checksum = "db2b7379a75544c94b3da32821b0bf41f9062e9970e23b78cc577d0d89676d16" dependencies = [ "jiff-tzdb-platform", "serde", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3245,7 +3245,7 @@ dependencies = [ "pep440_rs", "pubgrub", "regex", - "rustc-hash 2.0.0", + "rustc-hash", "schemars", "serde", "smallvec", @@ -3663,7 +3663,7 @@ name = "platform-tags" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.4.0#d9bd3bc7a536037ea8645fb70f1d35c0bb62b68e" dependencies = [ - "rustc-hash 2.0.0", + "rustc-hash", "serde", "thiserror", ] @@ -3787,7 +3787,7 @@ dependencies = [ "indexmap 2.3.0", "log", "priority-queue", - "rustc-hash 1.1.0", + "rustc-hash", "thiserror", ] @@ -3898,7 +3898,7 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.0.0", + "rustc-hash", "rustls 0.23.12", "socket2", "thiserror", @@ -3915,7 +3915,7 @@ dependencies = [ "bytes", "rand", "ring", - "rustc-hash 2.0.0", + "rustc-hash", "rustls 0.23.12", "slab", "thiserror", @@ -4799,12 +4799,6 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - [[package]] name = "rustc-hash" version = "2.0.0" @@ -6167,7 +6161,7 @@ dependencies = [ "reqwest 0.12.5", "reqwest-middleware", "rust-netrc", - "rustc-hash 2.0.0", + "rustc-hash", "tokio", "tracing", "url", @@ -6188,7 +6182,7 @@ dependencies = [ "pep508_rs", "pypi-types", "regex", - "rustc-hash 2.0.0", + "rustc-hash", "serde", "serde_json", "tempfile", @@ -6216,7 +6210,7 @@ dependencies = [ "nanoid", "pypi-types", "rmp-serde", - "rustc-hash 2.0.0", + "rustc-hash", "serde", "tempfile", "tracing", @@ -6283,7 +6277,7 @@ dependencies = [ "pep508_rs", "platform-tags", "pypi-types", - "rustc-hash 2.0.0", + "rustc-hash", "serde", "serde_json", "thiserror", @@ -6305,7 +6299,7 @@ dependencies = [ "install-wheel-rs", "itertools 0.13.0", "pypi-types", - "rustc-hash 2.0.0", + "rustc-hash", "tracing", "uv-build", "uv-cache", @@ -6338,7 +6332,7 @@ dependencies = [ "reqwest 0.12.5", "reqwest-middleware", "rmp-serde", - "rustc-hash 2.0.0", + "rustc-hash", "serde", "tempfile", "thiserror", @@ -6373,7 +6367,7 @@ dependencies = [ "pypi-types", "rayon", "reqwest 0.12.5", - "rustc-hash 2.0.0", + "rustc-hash", "sha2", "thiserror", "tokio", @@ -6444,7 +6438,7 @@ dependencies = [ "platform-tags", "pypi-types", "rayon", - "rustc-hash 2.0.0", + "rustc-hash", "same-file", "tempfile", "thiserror", @@ -6575,7 +6569,7 @@ dependencies = [ "pypi-types", "requirements-txt", "rkyv", - "rustc-hash 2.0.0", + "rustc-hash", "same-file", "serde", "textwrap", @@ -6621,7 +6615,7 @@ dependencies = [ "pep440_rs", "pep508_rs", "pypi-types", - "rustc-hash 2.0.0", + "rustc-hash", "thiserror", "url", "uv-cache", @@ -6660,7 +6654,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.4.0#d9bd3bc7a536037ea8645fb7 dependencies = [ "anstream", "owo-colors", - "rustc-hash 2.0.0", + "rustc-hash", ] [[package]] @@ -6674,7 +6668,7 @@ dependencies = [ "pep440_rs", "pep508_rs", "pypi-types", - "rustc-hash 2.0.0", + "rustc-hash", "serde", "thiserror", "tokio", diff --git a/crates/pixi_manifest/src/channel.rs b/crates/pixi_manifest/src/channel.rs index 6b13f15cb..15b3ce641 100644 --- a/crates/pixi_manifest/src/channel.rs +++ b/crates/pixi_manifest/src/channel.rs @@ -1,5 +1,6 @@ use std::str::FromStr; +use itertools::Itertools; use rattler_conda_types::NamedChannelOrUrl; use serde::{de::Error, Deserialize, Deserializer}; use serde_with::serde_as; @@ -14,6 +15,26 @@ pub struct PrioritizedChannel { pub priority: Option, } +impl PrioritizedChannel { + /// The prioritized channels contain a priority, sort on this priority. + /// Higher priority comes first. [-10, 1, 0 ,2] -> [2, 1, 0, -10] + pub fn sort_channels_by_priority<'a, I>( + channels: I, + ) -> impl Iterator + where + I: IntoIterator, + { + channels + .into_iter() + .sorted_by(|a, b| { + let a = a.priority.unwrap_or(0); + let b = b.priority.unwrap_or(0); + b.cmp(&a) + }) + .map(|prioritized_channel| &prioritized_channel.channel) + } +} + impl From for PrioritizedChannel { fn from(value: NamedChannelOrUrl) -> Self { Self { @@ -58,7 +79,7 @@ impl<'de> Deserialize<'de> for TomlPrioritizedChannelStrOrMap { } /// Helper so that we can deserialize -/// [`crate::project::manifest::serde::PrioritizedChannel`] from a string or a +/// [`crate::channel::PrioritizedChannel`] from a string or a /// map. impl<'de> serde_with::DeserializeAs<'de, PrioritizedChannel> for TomlPrioritizedChannelStrOrMap { fn deserialize_as(deserializer: D) -> Result diff --git a/crates/pixi_manifest/src/features_ext.rs b/crates/pixi_manifest/src/features_ext.rs index 0b47f4f7c..a0723d80c 100644 --- a/crates/pixi_manifest/src/features_ext.rs +++ b/crates/pixi_manifest/src/features_ext.rs @@ -4,7 +4,7 @@ use indexmap::IndexSet; use rattler_conda_types::{NamedChannelOrUrl, Platform}; use rattler_solve::ChannelPriority; -use crate::{HasManifestRef, SpecType}; +use crate::{HasManifestRef, PrioritizedChannel, SpecType}; use crate::has_features_iter::HasFeaturesIter; use crate::{pypi::pypi_options::PypiOptions, SystemRequirements}; @@ -35,24 +35,12 @@ pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'sourc fn channels(&self) -> IndexSet<&'source NamedChannelOrUrl> { // Collect all the channels from the features in one set, // deduplicate them and sort them on feature index, default feature comes last. - let channels: IndexSet<_> = self - .features() - .flat_map(|feature| match &feature.channels { - Some(channels) => channels, - None => &self.manifest().parsed.project.channels, - }) - .collect(); + let channels = self.features().flat_map(|feature| match &feature.channels { + Some(channels) => channels, + None => &self.manifest().parsed.project.channels, + }); - // The prioritized channels contain a priority, sort on this priority. - // Higher priority comes first. [-10, 1, 0 ,2] -> [2, 1, 0, -10] - channels - .sorted_by(|a, b| { - let a = a.priority.unwrap_or(0); - let b = b.priority.unwrap_or(0); - b.cmp(&a) - }) - .map(|prioritized_channel| &prioritized_channel.channel) - .collect() + PrioritizedChannel::sort_channels_by_priority(channels).collect() } /// Returns the channel priority, error on multiple values, return None if no value is set. diff --git a/crates/pixi_manifest/src/lib.rs b/crates/pixi_manifest/src/lib.rs index f87dc0ab6..a972e7497 100644 --- a/crates/pixi_manifest/src/lib.rs +++ b/crates/pixi_manifest/src/lib.rs @@ -30,7 +30,7 @@ pub use crate::environments::Environments; pub use crate::parsed_manifest::{deserialize_package_map, ParsedManifest}; pub use crate::solve_group::{SolveGroup, SolveGroups}; pub use activation::Activation; -pub use channel::PrioritizedChannel; +pub use channel::{PrioritizedChannel, TomlPrioritizedChannelStrOrMap}; pub use environment::{Environment, EnvironmentName}; pub use feature::{Feature, FeatureName}; use itertools::Itertools; diff --git a/src/cli/global/common.rs b/src/cli/global/common.rs deleted file mode 100644 index 616cfa18c..000000000 --- a/src/cli/global/common.rs +++ /dev/null @@ -1,145 +0,0 @@ -use std::path::PathBuf; - -use miette::IntoDiagnostic; -use rattler_conda_types::{Channel, ChannelConfig, PackageName, PrefixRecord}; - -use crate::{prefix::Prefix, repodata}; -use pixi_config::home_path; - -/// Global binaries directory, default to `$HOME/.pixi/bin` -pub struct BinDir(pub PathBuf); - -impl BinDir { - /// Create the Binary Executable directory - pub async fn create() -> miette::Result { - let bin_dir = bin_dir().ok_or(miette::miette!( - "could not determine global binary executable directory" - ))?; - tokio::fs::create_dir_all(&bin_dir) - .await - .into_diagnostic()?; - Ok(Self(bin_dir)) - } - - /// Get the Binary Executable directory, erroring if it doesn't already - /// exist. - pub async fn from_existing() -> miette::Result { - let bin_dir = bin_dir().ok_or(miette::miette!( - "could not find global binary executable directory" - ))?; - if tokio::fs::try_exists(&bin_dir).await.into_diagnostic()? { - Ok(Self(bin_dir)) - } else { - Err(miette::miette!( - "binary executable directory does not exist" - )) - } - } -} - -/// Global binary environments directory, default to `$HOME/.pixi/envs` -pub struct BinEnvDir(pub PathBuf); - -impl BinEnvDir { - /// Construct the path to the env directory for the binary package - /// `package_name`. - fn package_bin_env_dir(package_name: &PackageName) -> miette::Result { - Ok(bin_env_dir() - .ok_or(miette::miette!( - "could not find global binary environment directory" - ))? - .join(package_name.as_normalized())) - } - - /// Get the Binary Environment directory, erroring if it doesn't already - /// exist. - pub async fn from_existing(package_name: &PackageName) -> miette::Result { - let bin_env_dir = Self::package_bin_env_dir(package_name)?; - if tokio::fs::try_exists(&bin_env_dir) - .await - .into_diagnostic()? - { - Ok(Self(bin_env_dir)) - } else { - Err(miette::miette!( - "could not find environment for package {}", - package_name.as_source() - )) - } - } - - /// Create the Binary Environment directory - pub async fn create(package_name: &PackageName) -> miette::Result { - let bin_env_dir = Self::package_bin_env_dir(package_name)?; - tokio::fs::create_dir_all(&bin_env_dir) - .await - .into_diagnostic()?; - Ok(Self(bin_env_dir)) - } -} - -/// Global binaries directory, default to `$HOME/.pixi/bin` -/// -/// # Returns -/// -/// The global binaries directory -pub(crate) fn bin_dir() -> Option { - home_path().map(|path| path.join("bin")) -} - -/// Global binary environments directory, default to `$HOME/.pixi/envs` -/// -/// # Returns -/// -/// The global binary environments directory -pub(crate) fn bin_env_dir() -> Option { - home_path().map(|path| path.join("envs")) -} - -/// Get the friendly channel name of a [`PrefixRecord`] -/// -/// # Returns -/// -/// The friendly channel name of the given prefix record -pub(super) fn channel_name_from_prefix( - prefix_package: &PrefixRecord, - channel_config: &ChannelConfig, -) -> String { - Channel::from_str(&prefix_package.repodata_record.channel, channel_config) - .map(|ch| repodata::friendly_channel_name(&ch)) - .unwrap_or_else(|_| prefix_package.repodata_record.channel.clone()) -} - -/// Find the globally installed package with the given [`PackageName`] -/// -/// # Returns -/// -/// The PrefixRecord of the installed package -pub(super) async fn find_installed_package( - package_name: &PackageName, -) -> miette::Result { - let BinEnvDir(bin_prefix) = BinEnvDir::from_existing(package_name).await.or_else(|_| { - miette::bail!( - "Package {} is not globally installed", - package_name.as_source() - ) - })?; - let prefix = Prefix::new(bin_prefix); - find_designated_package(&prefix, package_name).await -} - -/// Find the designated package in the given [`Prefix`] -/// -/// # Returns -/// -/// The PrefixRecord of the designated package -pub async fn find_designated_package( - prefix: &Prefix, - package_name: &PackageName, -) -> miette::Result { - let prefix_records = prefix.find_installed_packages(None).await?; - prefix_records - .into_iter() - .find(|r| r.repodata_record.package_record.name == *package_name) - .ok_or_else(|| miette::miette!("could not find {} in prefix", package_name.as_source())) -} diff --git a/src/cli/global/install.rs b/src/cli/global/install.rs index d87e23224..2cbe4fd1a 100644 --- a/src/cli/global/install.rs +++ b/src/cli/global/install.rs @@ -1,36 +1,8 @@ -use std::{ - collections::HashMap, - ffi::OsStr, - path::{Path, PathBuf}, -}; - use clap::Parser; -use indexmap::IndexMap; -use itertools::Itertools; -use miette::{Context, IntoDiagnostic}; -use pixi_utils::reqwest::build_reqwest_clients; -use rattler::{ - install::{DefaultProgressFormatter, IndicatifReporter, Installer}, - package_cache::PackageCache, -}; -use rattler_conda_types::{ - GenericVirtualPackage, MatchSpec, PackageName, Platform, PrefixRecord, RepoDataRecord, -}; -use rattler_shell::{ - activation::{ActivationVariables, Activator, PathModificationBehavior}, - shell::{Shell, ShellEnum}, -}; -use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; -use rattler_virtual_packages::VirtualPackage; -use reqwest_middleware::ClientWithMiddleware; +use rattler_conda_types::Platform; -use super::common::{channel_name_from_prefix, find_designated_package, BinDir, BinEnvDir}; -use crate::{ - cli::cli_config::ChannelsConfig, cli::has_specs::HasSpecs, prefix::Prefix, - rlimit::try_increase_rlimit_to_sensible, -}; -use pixi_config::{self, Config, ConfigCli}; -use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; +use crate::{cli::cli_config::ChannelsConfig, cli::has_specs::HasSpecs}; +use pixi_config::{self, ConfigCli}; /// Installs the defined package in a global accessible location. #[derive(Parser, Debug)] @@ -56,447 +28,7 @@ impl HasSpecs for Args { } } -/// Create the environment activation script -fn create_activation_script(prefix: &Prefix, shell: ShellEnum) -> miette::Result { - let activator = - Activator::from_path(prefix.root(), shell, Platform::current()).into_diagnostic()?; - let result = activator - .activation(ActivationVariables { - conda_prefix: None, - path: None, - path_modification_behavior: PathModificationBehavior::Prepend, - }) - .into_diagnostic()?; - - // Add a shebang on unix based platforms - let script = if cfg!(unix) { - format!("#!/bin/sh\n{}", result.script.contents().into_diagnostic()?) - } else { - result.script.contents().into_diagnostic()? - }; - - Ok(script) -} - -fn is_executable(prefix: &Prefix, relative_path: &Path) -> bool { - // Check if the file is in a known executable directory. - let binary_folders = if cfg!(windows) { - &([ - "", - "Library/mingw-w64/bin/", - "Library/usr/bin/", - "Library/bin/", - "Scripts/", - "bin/", - ][..]) - } else { - &(["bin"][..]) - }; - - let parent_folder = match relative_path.parent() { - Some(dir) => dir, - None => return false, - }; - - if !binary_folders - .iter() - .any(|bin_path| Path::new(bin_path) == parent_folder) - { - return false; - } - - // Check if the file is executable - let absolute_path = prefix.root().join(relative_path); - is_executable::is_executable(absolute_path) -} - -/// Find the executable scripts within the specified package installed in this -/// conda prefix. -fn find_executables<'a>(prefix: &Prefix, prefix_package: &'a PrefixRecord) -> Vec<&'a Path> { - prefix_package - .files - .iter() - .filter(|relative_path| is_executable(prefix, relative_path)) - .map(|buf| buf.as_ref()) - .collect() -} - -/// Mapping from an executable in a package environment to its global binary -/// script location. -#[derive(Debug)] -pub struct BinScriptMapping<'a> { - pub original_executable: &'a Path, - pub global_binary_path: PathBuf, -} - -/// For each executable provided, map it to the installation path for its global -/// binary script. -async fn map_executables_to_global_bin_scripts<'a>( - package_executables: &[&'a Path], - bin_dir: &BinDir, -) -> miette::Result>> { - #[cfg(target_family = "windows")] - let extensions_list: Vec = if let Ok(pathext) = std::env::var("PATHEXT") { - pathext.split(';').map(|s| s.to_lowercase()).collect() - } else { - tracing::debug!("Could not find 'PATHEXT' variable, using a default list"); - [ - ".COM", ".EXE", ".BAT", ".CMD", ".VBS", ".VBE", ".JS", ".JSE", ".WSF", ".WSH", ".MSC", - ".CPL", - ] - .iter() - .map(|&s| s.to_lowercase()) - .collect() - }; - - #[cfg(target_family = "unix")] - // TODO: Find if there are more relevant cases, these cases are generated by our big friend - // GPT-4 - let extensions_list: Vec = vec![ - ".sh", ".bash", ".zsh", ".csh", ".tcsh", ".ksh", ".fish", ".py", ".pl", ".rb", ".lua", - ".php", ".tcl", ".awk", ".sed", - ] - .iter() - .map(|&s| s.to_owned()) - .collect(); - - let BinDir(bin_dir) = bin_dir; - let mut mappings = vec![]; - - for exec in package_executables.iter() { - // Remove the extension of a file if it is in the list of known extensions. - let Some(file_name) = exec - .file_name() - .and_then(OsStr::to_str) - .map(str::to_lowercase) - else { - continue; - }; - let file_name = extensions_list - .iter() - .find_map(|ext| file_name.strip_suffix(ext)) - .unwrap_or(file_name.as_str()); - - let mut executable_script_path = bin_dir.join(file_name); - - if cfg!(windows) { - executable_script_path.set_extension("bat"); - }; - mappings.push(BinScriptMapping { - original_executable: exec, - global_binary_path: executable_script_path, - }); - } - Ok(mappings) -} - -/// Find all executable scripts in a package and map them to their global -/// install paths. -/// -/// (Convenience wrapper around `find_executables` and -/// `map_executables_to_global_bin_scripts` which are generally used together.) -pub(super) async fn find_and_map_executable_scripts<'a>( - prefix: &Prefix, - prefix_package: &'a PrefixRecord, - bin_dir: &BinDir, -) -> miette::Result>> { - let executables = find_executables(prefix, prefix_package); - map_executables_to_global_bin_scripts(&executables, bin_dir).await -} - -/// Create the executable scripts by modifying the activation script -/// to activate the environment and run the executable. -pub(super) async fn create_executable_scripts( - mapped_executables: &[BinScriptMapping<'_>], - prefix: &Prefix, - shell: &ShellEnum, - activation_script: String, -) -> miette::Result<()> { - for BinScriptMapping { - original_executable: exec, - global_binary_path: executable_script_path, - } in mapped_executables - { - let mut script = activation_script.clone(); - shell - .run_command( - &mut script, - [ - format!("\"{}\"", prefix.root().join(exec).to_string_lossy()).as_str(), - get_catch_all_arg(shell), - ], - ) - .expect("should never fail"); - - if matches!(shell, ShellEnum::CmdExe(_)) { - // wrap the script contents in `@echo off` and `setlocal` to prevent echoing the - // script and to prevent leaking environment variables into the - // parent shell (e.g. PATH would grow longer and longer) - script = format!("@echo off\nsetlocal\n{}\nendlocal", script); - } - - tokio::fs::write(&executable_script_path, script) - .await - .into_diagnostic()?; - - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - std::fs::set_permissions( - executable_script_path, - std::fs::Permissions::from_mode(0o755), - ) - .into_diagnostic()?; - } - } - Ok(()) -} - -/// Warn user on dangerous package installations, interactive yes no prompt -pub(crate) fn prompt_user_to_continue( - packages: &IndexMap, -) -> miette::Result { - let dangerous_packages = HashMap::from([ - ("pixi", "Installing `pixi` globally doesn't work as expected.\nUse `pixi self-update` to update pixi and `pixi self-update --version x.y.z` for a specific version."), - ("pip", "Installing `pip` with `pixi global` won't make pip-installed packages globally available.\nInstead, use a pixi project and add PyPI packages with `pixi add --pypi`, which is recommended. Alternatively, `pixi add pip` and use it within the project.") - ]); - - // Check if any of the packages are dangerous, and prompt the user to ask if - // they want to continue, including the advice. - for (name, _spec) in packages { - if let Some(advice) = dangerous_packages.get(&name.as_normalized()) { - let prompt = format!( - "{}\nDo you want to continue?", - console::style(advice).yellow() - ); - if !dialoguer::Confirm::new() - .with_prompt(prompt) - .default(false) - .show_default(true) - .interact() - .into_diagnostic()? - { - return Ok(false); - } - } - } - - Ok(true) -} - /// Install a global command -pub async fn execute(args: Args) -> miette::Result<()> { - // Figure out what channels we are using - let config = Config::with_cli_config(&args.config); - let channels = args.channels.resolve_from_config(&config); - - let specs = args.specs()?; - - // Warn user on dangerous package installations, interactive yes no prompt - if !prompt_user_to_continue(&specs)? { - return Ok(()); - } - - // Fetch the repodata - let (_, auth_client) = build_reqwest_clients(Some(&config)); - - let gateway = config.gateway(auth_client.clone()); - - let repodata = gateway - .query( - channels, - [args.platform, Platform::NoArch], - specs.values().cloned().collect_vec(), - ) - .recursive(true) - .await - .into_diagnostic()?; - - // Determine virtual packages of the current platform - let virtual_packages = VirtualPackage::current() - .into_diagnostic() - .context("failed to determine virtual packages")? - .iter() - .cloned() - .map(GenericVirtualPackage::from) - .collect(); - - // Solve the environment - let solver_specs = specs.clone(); - let solved_records = wrap_in_progress("solving environment", move || { - Solver.solve(SolverTask { - specs: solver_specs.values().cloned().collect_vec(), - virtual_packages, - ..SolverTask::from_iter(&repodata) - }) - }) - .into_diagnostic() - .context("failed to solve environment")?; - - // Install the package(s) - let mut executables = vec![]; - for (package_name, _) in specs { - let (prefix_package, scripts, _) = globally_install_package( - &package_name, - solved_records.clone(), - auth_client.clone(), - args.platform, - ) - .await?; - let channel_name = - channel_name_from_prefix(&prefix_package, config.global_channel_config()); - let record = &prefix_package.repodata_record.package_record; - - // Warn if no executables were created for the package - if scripts.is_empty() { - eprintln!( - "{}No executable entrypoint found in package {}, are you sure it exists?", - console::style(console::Emoji("⚠️", "")).yellow().bold(), - console::style(record.name.as_source()).bold() - ); - } - - eprintln!( - "{}Installed package {} {} {} from {}", - console::style(console::Emoji("✔ ", "")).green(), - console::style(record.name.as_source()).bold(), - console::style(record.version.version()).bold(), - console::style(record.build.as_str()).bold(), - channel_name, - ); - - executables.extend(scripts); - } - - if !executables.is_empty() { - print_executables_available(executables).await?; - } - - Ok(()) -} - -async fn print_executables_available(executables: Vec) -> miette::Result<()> { - let BinDir(bin_dir) = BinDir::from_existing().await?; - let whitespace = console::Emoji(" ", "").to_string(); - let executable = executables - .into_iter() - .map(|path| { - path.strip_prefix(&bin_dir) - .expect("script paths were constructed by joining onto BinDir") - .to_string_lossy() - .to_string() - }) - .join(&format!("\n{whitespace} - ")); - - if is_bin_folder_on_path().await { - eprintln!( - "{whitespace}These executables are now globally available:\n{whitespace} - {executable}", - ) - } else { - eprintln!("{whitespace}These executables have been added to {}\n{whitespace} - {executable}\n\n{} To use them, make sure to add {} to your PATH", - console::style(&bin_dir.display()).bold(), - console::style("!").yellow().bold(), - console::style(&bin_dir.display()).bold() - ) - } - - Ok(()) -} - -/// Install given package globally, with all its dependencies -pub(super) async fn globally_install_package( - package_name: &PackageName, - records: Vec, - authenticated_client: ClientWithMiddleware, - platform: Platform, -) -> miette::Result<(PrefixRecord, Vec, bool)> { - try_increase_rlimit_to_sensible(); - - // Create the binary environment prefix where we install or update the package - let BinEnvDir(bin_prefix) = BinEnvDir::create(package_name).await?; - let prefix = Prefix::new(bin_prefix); - - // Install the environment - let package_cache = PackageCache::new( - pixi_config::get_cache_dir()?.join(pixi_consts::consts::CONDA_PACKAGE_CACHE_DIR), - ); - - let result = await_in_progress("creating virtual environment", |pb| { - Installer::new() - .with_download_client(authenticated_client) - .with_io_concurrency_limit(100) - .with_execute_link_scripts(false) - .with_package_cache(package_cache) - .with_target_platform(platform) - .with_reporter( - IndicatifReporter::builder() - .with_multi_progress(global_multi_progress()) - .with_placement(rattler::install::Placement::After(pb)) - .with_formatter(DefaultProgressFormatter::default().with_prefix(" ")) - .clear_when_done(true) - .finish(), - ) - .install(prefix.root(), records) - }) - .await - .into_diagnostic()?; - - // Find the installed package in the environment - let prefix_package = find_designated_package(&prefix, package_name).await?; - - // Determine the shell to use for the invocation script - let shell: ShellEnum = if cfg!(windows) { - rattler_shell::shell::CmdExe.into() - } else { - rattler_shell::shell::Bash.into() - }; - - // Construct the reusable activation script for the shell and generate an - // invocation script for each executable added by the package to the - // environment. - let activation_script = create_activation_script(&prefix, shell.clone())?; - - let bin_dir = BinDir::create().await?; - let script_mapping = - find_and_map_executable_scripts(&prefix, &prefix_package, &bin_dir).await?; - create_executable_scripts(&script_mapping, &prefix, &shell, activation_script).await?; - - let scripts: Vec<_> = script_mapping - .into_iter() - .map( - |BinScriptMapping { - global_binary_path: path, - .. - }| path, - ) - .collect(); - - Ok(( - prefix_package, - scripts, - result.transaction.operations.is_empty(), - )) -} - -/// Returns the string to add for all arguments passed to the script -fn get_catch_all_arg(shell: &ShellEnum) -> &str { - match shell { - ShellEnum::CmdExe(_) => "%*", - ShellEnum::PowerShell(_) => "@args", - _ => "\"$@\"", - } -} - -/// Returns true if the bin folder is available on the PATH. -async fn is_bin_folder_on_path() -> bool { - let bin_path = match BinDir::from_existing().await.ok() { - Some(BinDir(bin_dir)) => bin_dir, - None => return false, - }; - - std::env::var_os("PATH") - .map(|path| std::env::split_paths(&path).collect_vec()) - .unwrap_or_default() - .into_iter() - .contains(&bin_path) +pub async fn execute(_args: Args) -> miette::Result<()> { + todo!() } diff --git a/src/cli/global/list.rs b/src/cli/global/list.rs index ea6212a63..a801f2a68 100644 --- a/src/cli/global/list.rs +++ b/src/cli/global/list.rs @@ -1,159 +1,9 @@ -use std::collections::HashSet; -use std::str::FromStr; - use clap::Parser; -use itertools::Itertools; -use miette::IntoDiagnostic; -use rattler_conda_types::PackageName; - -use crate::prefix::Prefix; -use pixi_config::home_path; - -use super::common::{bin_env_dir, find_designated_package, BinDir, BinEnvDir}; -use super::install::{find_and_map_executable_scripts, BinScriptMapping}; /// Lists all packages previously installed into a globally accessible location via `pixi global install`. #[derive(Parser, Debug)] pub struct Args {} -#[derive(Debug)] -struct InstalledPackageInfo { - /// The name of the installed package - name: PackageName, - - /// The binaries installed by this package - binaries: Vec, - - /// The version of the installed package - version: String, -} - -fn print_no_packages_found_message() { - eprintln!( - "{} No globally installed binaries found", - console::style("!").yellow().bold() - ) -} - pub async fn execute(_args: Args) -> miette::Result<()> { - let packages = list_global_packages().await?; - - let mut package_info = vec![]; - - for package_name in packages { - let Ok(BinEnvDir(bin_env_prefix)) = BinEnvDir::from_existing(&package_name).await else { - print_no_packages_found_message(); - return Ok(()); - }; - let prefix = Prefix::new(bin_env_prefix); - - let Ok(bin_prefix) = BinDir::from_existing().await else { - print_no_packages_found_message(); - return Ok(()); - }; - - // Find the installed package in the environment - let prefix_package = find_designated_package(&prefix, &package_name).await?; - - let binaries: Vec<_> = - find_and_map_executable_scripts(&prefix, &prefix_package, &bin_prefix) - .await? - .into_iter() - .map( - |BinScriptMapping { - global_binary_path: path, - .. - }| { - path.strip_prefix(&bin_prefix.0) - .expect("script paths were constructed by joining onto BinDir") - .to_string_lossy() - .to_string() - }, - ) - // Collecting to a HashSet first is a workaround for issue #317 and can be removed - // once that is fixed. - .collect::>() - .into_iter() - .collect(); - - let version = prefix_package - .repodata_record - .package_record - .version - .to_string(); - package_info.push(InstalledPackageInfo { - name: package_name, - binaries, - version, - }); - } - - if package_info.is_empty() { - print_no_packages_found_message(); - } else { - let path = home_path().ok_or(miette::miette!("Could not determine home directory"))?; - let len = package_info.len(); - let mut message = String::new(); - for (idx, pkgi) in package_info.into_iter().enumerate() { - let last = (idx + 1) == len; - let no_binary = pkgi.binaries.is_empty(); - - if last { - message.push_str("└──"); - } else { - message.push_str("├──"); - } - - message.push_str(&format!( - " {} {}", - console::style(&pkgi.name.as_source()).bold().magenta(), - console::style(&pkgi.version).bright().black() - )); - - if !no_binary { - let p = if last { " " } else { "|" }; - message.push_str(&format!( - "\n{} └─ exec: {}", - p, - pkgi.binaries - .iter() - .map(|x| console::style(x).green()) - .join(", ") - )); - } - - if !last { - message.push('\n'); - } - } - - eprintln!("Global install location: {}\n{}", path.display(), message); - } - - Ok(()) -} - -/// List all globally installed packages -/// -/// # Returns -/// -/// A list of all globally installed packages represented as [`PackageName`]s -pub(super) async fn list_global_packages() -> miette::Result> { - let mut packages = vec![]; - let bin_env_dir = - bin_env_dir().ok_or(miette::miette!("Could not determine global envs directory"))?; - let Ok(mut dir_contents) = tokio::fs::read_dir(bin_env_dir).await else { - return Ok(vec![]); - }; - - while let Some(entry) = dir_contents.next_entry().await.into_diagnostic()? { - if entry.file_type().await.into_diagnostic()?.is_dir() { - if let Ok(name) = PackageName::from_str(entry.file_name().to_string_lossy().as_ref()) { - packages.push(name); - } - } - } - - packages.sort(); - Ok(packages) + todo!() } diff --git a/src/cli/global/mod.rs b/src/cli/global/mod.rs index 0bc94513f..fcc901b43 100644 --- a/src/cli/global/mod.rs +++ b/src/cli/global/mod.rs @@ -1,24 +1,23 @@ use clap::Parser; -mod common; mod install; mod list; mod remove; -mod upgrade; -mod upgrade_all; +mod sync; #[derive(Debug, Parser)] pub enum Command { + // TODO: Needs to adapted #[clap(visible_alias = "i")] Install(install::Args), + // TODO: Needs to adapted #[clap(visible_alias = "rm")] Remove(remove::Args), + // TODO: Needs to adapted #[clap(visible_alias = "ls")] List(list::Args), - #[clap(visible_alias = "u")] - Upgrade(upgrade::Args), - #[clap(visible_alias = "ua")] - UpgradeAll(upgrade_all::Args), + #[clap(visible_alias = "s")] + Sync(sync::Args), } /// Subcommand for global package management actions @@ -38,8 +37,7 @@ pub async fn execute(cmd: Args) -> miette::Result<()> { Command::Install(args) => install::execute(args).await?, Command::Remove(args) => remove::execute(args).await?, Command::List(args) => list::execute(args).await?, - Command::Upgrade(args) => upgrade::execute(args).await?, - Command::UpgradeAll(args) => upgrade_all::execute(args).await?, + Command::Sync(args) => sync::execute(args).await?, }; Ok(()) } diff --git a/src/cli/global/remove.rs b/src/cli/global/remove.rs index 020574d41..d44ba1bf7 100644 --- a/src/cli/global/remove.rs +++ b/src/cli/global/remove.rs @@ -1,16 +1,7 @@ -use std::collections::HashSet; - use clap::Parser; -use clap_verbosity_flag::{Level, Verbosity}; -use itertools::Itertools; -use miette::IntoDiagnostic; -use rattler_conda_types::PackageName; +use clap_verbosity_flag::Verbosity; use crate::cli::has_specs::HasSpecs; -use crate::prefix::Prefix; - -use super::common::{find_designated_package, BinDir, BinEnvDir}; -use super::install::{find_and_map_executable_scripts, BinScriptMapping}; /// Removes a package previously installed into a globally accessible location via `pixi global install`. #[derive(Parser, Debug)] @@ -30,90 +21,6 @@ impl HasSpecs for Args { } } -pub async fn execute(args: Args) -> miette::Result<()> { - for (package_name, _) in args.specs()? { - remove_global_package(package_name, &args.verbose).await?; - } - - Ok(()) -} - -async fn remove_global_package( - package_name: PackageName, - verbose: &Verbosity, -) -> miette::Result<()> { - let BinEnvDir(bin_prefix) = BinEnvDir::from_existing(&package_name).await?; - let prefix = Prefix::new(bin_prefix.clone()); - - // Find the installed package in the environment - let prefix_package = find_designated_package(&prefix, &package_name).await?; - - // Construct the paths to all the installed package executables, which are what we need to remove. - let paths_to_remove: Vec<_> = - find_and_map_executable_scripts(&prefix, &prefix_package, &BinDir::from_existing().await?) - .await? - .into_iter() - .map( - |BinScriptMapping { - global_binary_path: path, - .. - }| path, - ) - // Collecting to a HashSet first is a workaround for issue #317 and can be removed - // once that is fixed. - .collect::>() - .into_iter() - .collect(); - - let dirs_to_remove: Vec<_> = vec![bin_prefix]; - - if verbose.log_level().unwrap_or(Level::Error) >= Level::Warn { - let whitespace = console::Emoji(" ", "").to_string(); - let names_to_remove = dirs_to_remove - .iter() - .map(|dir| dir.to_string_lossy()) - .chain(paths_to_remove.iter().map(|path| path.to_string_lossy())) - .join(&format!("\n{whitespace} - ")); - - eprintln!( - "{} Removing the following files and directories:\n{whitespace} - {names_to_remove}", - console::style("!").yellow().bold(), - ) - } - - let mut errors = vec![]; - - for file in paths_to_remove { - if let Err(e) = tokio::fs::remove_file(&file).await.into_diagnostic() { - errors.push((file, e)) - } - } - - for dir in dirs_to_remove { - if let Err(e) = tokio::fs::remove_dir_all(&dir).await.into_diagnostic() { - errors.push((dir, e)) - } - } - - if errors.is_empty() { - eprintln!( - "{}Successfully removed global package {}", - console::style(console::Emoji("✔ ", "")).green(), - console::style(package_name.as_source()).bold(), - ); - } else { - let whitespace = console::Emoji(" ", "").to_string(); - let error_string = errors - .into_iter() - .map(|(file, e)| format!("{} (on {})", e, file.to_string_lossy())) - .join(&format!("\n{whitespace} - ")); - miette::bail!( - "got multiple errors trying to remove global package {}:\n{} - {}", - package_name.as_source(), - whitespace, - error_string, - ); - } - - Ok(()) +pub async fn execute(_args: Args) -> miette::Result<()> { + todo!() } diff --git a/src/cli/global/sync.rs b/src/cli/global/sync.rs new file mode 100644 index 000000000..94bd1584e --- /dev/null +++ b/src/cli/global/sync.rs @@ -0,0 +1,53 @@ +use crate::global::{self, BinDir, EnvRoot}; +use clap::Parser; +use pixi_config::{Config, ConfigCli}; +use pixi_utils::reqwest::build_reqwest_clients; + +/// Sync global manifest with installed environments +#[derive(Parser, Debug)] +pub struct Args { + #[clap(flatten)] + config: ConfigCli, +} + +/// Sync global manifest with installed environments +pub async fn execute(args: Args) -> miette::Result<()> { + let config = Config::with_cli_config(&args.config); + + // Check if the certain file is present + let certain_file_path = global::Project::manifest_dir()?.join(global::MANIFEST_DEFAULT_NAME); + if !certain_file_path.exists() { + eprintln!( + "This will remove your existing global installation. Do you want to continue? (y/N): " + ); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let input = input.trim().to_lowercase(); + + if input != "y" { + eprintln!("Operation aborted."); + return Ok(()); + } + } + + let project = global::Project::discover()?.with_cli_config(config.clone()); + + // Fetch the repodata + let (_, auth_client) = build_reqwest_clients(Some(&config)); + + let gateway = config.gateway(auth_client.clone()); + + let env_root = EnvRoot::from_env().await?; + let bin_dir = BinDir::from_env().await?; + + global::sync( + &env_root, + &project, + &bin_dir, + &config, + &gateway, + &auth_client, + ) + .await +} diff --git a/src/cli/global/upgrade.rs b/src/cli/global/upgrade.rs deleted file mode 100644 index dfbd2c85b..000000000 --- a/src/cli/global/upgrade.rs +++ /dev/null @@ -1,215 +0,0 @@ -use std::{collections::HashMap, sync::Arc, time::Duration}; - -use clap::Parser; -use indexmap::IndexMap; -use indicatif::ProgressBar; -use itertools::Itertools; -use miette::{Context, IntoDiagnostic, Report}; -use pixi_utils::reqwest::build_reqwest_clients; -use rattler_conda_types::{Channel, GenericVirtualPackage, MatchSpec, PackageName, Platform}; -use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; -use rattler_virtual_packages::VirtualPackage; -use tokio::task::JoinSet; - -use super::{common::find_installed_package, install::globally_install_package}; -use crate::cli::{cli_config::ChannelsConfig, has_specs::HasSpecs}; -use pixi_config::Config; -use pixi_progress::{global_multi_progress, long_running_progress_style, wrap_in_progress}; - -/// Upgrade specific package which is installed globally. -#[derive(Parser, Debug)] -#[clap(arg_required_else_help = true)] -pub struct Args { - /// Specifies the packages to upgrade. - #[arg(required = true)] - pub specs: Vec, - - #[clap(flatten)] - channels: ChannelsConfig, - - /// The platform to install the package for. - #[clap(long, default_value_t = Platform::current())] - platform: Platform, -} - -impl HasSpecs for Args { - fn packages(&self) -> Vec<&str> { - self.specs.iter().map(AsRef::as_ref).collect() - } -} - -pub async fn execute(args: Args) -> miette::Result<()> { - let config = Config::load_global(); - let specs = args.specs()?; - upgrade_packages(specs, config, &args.channels, args.platform).await -} - -pub(super) async fn upgrade_packages( - specs: IndexMap, - config: Config, - cli_channels: &ChannelsConfig, - platform: Platform, -) -> miette::Result<()> { - let channel_cli = cli_channels.resolve_from_config(&config); - - // Get channels and version of globally installed packages in parallel - let mut channels = HashMap::with_capacity(specs.len()); - let mut versions = HashMap::with_capacity(specs.len()); - let mut set: JoinSet> = JoinSet::new(); - for package_name in specs.keys().cloned() { - let channel_config = config.global_channel_config().clone(); - set.spawn(async move { - let p = find_installed_package(&package_name).await?; - let channel = - Channel::from_str(p.repodata_record.channel, &channel_config).into_diagnostic()?; - let version = p.repodata_record.package_record.version.into_version(); - Ok((package_name, channel, version)) - }); - } - while let Some(data) = set.join_next().await { - let (package_name, channel, version) = data.into_diagnostic()??; - channels.insert(package_name.clone(), channel); - versions.insert(package_name, version); - } - - // Fetch repodata across all channels - - // Start by aggregating all channels that we need to iterate - let all_channels: Vec = channels - .values() - .cloned() - .chain(channel_cli.iter().cloned()) - .unique() - .collect(); - - // Now ask gateway to query repodata for these channels - let (_, authenticated_client) = build_reqwest_clients(Some(&config)); - let gateway = config.gateway(authenticated_client.clone()); - let repodata = gateway - .query( - all_channels, - [platform, Platform::NoArch], - specs.values().cloned().collect_vec(), - ) - .recursive(true) - .await - .into_diagnostic()?; - - // Resolve environments in parallel - let mut set: JoinSet> = JoinSet::new(); - - // Create arcs for these structs - // as they later will be captured by closure - let repodata = Arc::new(repodata); - let config = Arc::new(config); - let channel_cli = Arc::new(channel_cli); - let channels = Arc::new(channels); - - for (package_name, package_matchspec) in specs { - let repodata = repodata.clone(); - let config = config.clone(); - let channel_cli = channel_cli.clone(); - let channels = channels.clone(); - - set.spawn_blocking(move || { - // Filter repodata based on channels specific to the package (and from the CLI) - let specific_repodata: Vec<_> = repodata - .iter() - .filter_map(|repodata| { - let filtered: Vec<_> = repodata - .iter() - .filter(|item| { - let item_channel = - Channel::from_str(&item.channel, config.global_channel_config()) - .expect("should be parseable"); - channel_cli.contains(&item_channel) - || channels - .get(&package_name) - .map_or(false, |c| c == &item_channel) - }) - .collect(); - - (!filtered.is_empty()).then_some(filtered) - }) - .collect(); - - // Determine virtual packages of the current platform - let virtual_packages = VirtualPackage::current() - .into_diagnostic() - .context("failed to determine virtual packages")? - .iter() - .cloned() - .map(GenericVirtualPackage::from) - .collect(); - - // Solve the environment - let solver_matchspec = package_matchspec.clone(); - let solved_records = wrap_in_progress("solving environment", move || { - Solver.solve(SolverTask { - specs: vec![solver_matchspec], - virtual_packages, - ..SolverTask::from_iter(specific_repodata) - }) - }) - .into_diagnostic() - .context("failed to solve environment")?; - - Ok((package_name, package_matchspec.clone(), solved_records)) - }); - } - - // Upgrade each package when relevant - let mut upgraded = false; - while let Some(data) = set.join_next().await { - let (package_name, package_matchspec, records) = data.into_diagnostic()??; - let toinstall_version = records - .iter() - .find(|r| r.package_record.name == package_name) - .map(|p| p.package_record.version.version().to_owned()) - .ok_or_else(|| { - miette::miette!( - "Package {} not found in the specified channels", - package_name.as_normalized() - ) - })?; - let installed_version = versions - .get(&package_name) - .expect("should have the installed version") - .to_owned(); - - // Perform upgrade if a specific version was requested - // OR if a more recent version is available - if package_matchspec.version.is_some() || toinstall_version > installed_version { - let message = format!( - "{} v{} -> v{}", - package_name.as_normalized(), - installed_version, - toinstall_version - ); - - let pb = global_multi_progress().add(ProgressBar::new_spinner()); - pb.enable_steady_tick(Duration::from_millis(100)); - pb.set_style(long_running_progress_style()); - pb.set_message(format!( - "{} {}", - console::style("Updating").green(), - message - )); - globally_install_package( - &package_name, - records, - authenticated_client.clone(), - platform, - ) - .await?; - pb.finish_with_message(format!("{} {}", console::style("Updated").green(), message)); - upgraded = true; - } - } - - if !upgraded { - eprintln!("Nothing to upgrade"); - } - - Ok(()) -} diff --git a/src/cli/global/upgrade_all.rs b/src/cli/global/upgrade_all.rs deleted file mode 100644 index fa7f3efcb..000000000 --- a/src/cli/global/upgrade_all.rs +++ /dev/null @@ -1,42 +0,0 @@ -use clap::Parser; -use indexmap::IndexMap; - -use rattler_conda_types::{MatchSpec, Platform}; - -use pixi_config::{Config, ConfigCli}; - -use crate::cli::cli_config::ChannelsConfig; - -use super::{list::list_global_packages, upgrade::upgrade_packages}; - -/// Upgrade all globally installed packages -#[derive(Parser, Debug)] -pub struct Args { - #[clap(flatten)] - channels: ChannelsConfig, - - #[clap(flatten)] - config: ConfigCli, - - /// The platform to install the package for. - #[clap(long, default_value_t = Platform::current())] - platform: Platform, -} - -pub async fn execute(args: Args) -> miette::Result<()> { - let config = Config::with_cli_config(&args.config); - - let names = list_global_packages().await?; - let mut specs = IndexMap::with_capacity(names.len()); - for name in names { - specs.insert( - name.clone(), - MatchSpec { - name: Some(name), - ..Default::default() - }, - ); - } - - upgrade_packages(specs, config, &args.channels, args.platform).await -} diff --git a/src/global/common.rs b/src/global/common.rs new file mode 100644 index 000000000..6a23a13dc --- /dev/null +++ b/src/global/common.rs @@ -0,0 +1,296 @@ +use std::path::{Path, PathBuf}; + +use itertools::Itertools; +use miette::{Context, IntoDiagnostic}; +use pixi_progress::{await_in_progress, global_multi_progress}; +use rattler::{ + install::{DefaultProgressFormatter, IndicatifReporter, Installer}, + package_cache::PackageCache, +}; +use rattler_conda_types::{ + Channel, ChannelConfig, PackageName, Platform, PrefixRecord, RepoDataRecord, +}; +use rattler_shell::{ + activation::{ActivationVariables, Activator, PathModificationBehavior}, + shell::ShellEnum, +}; +use reqwest_middleware::ClientWithMiddleware; + +use crate::{ + cli::project::environment, prefix::Prefix, repodata, rlimit::try_increase_rlimit_to_sensible, +}; +use pixi_config::home_path; + +use super::{EnvironmentName, ExposedKey}; + +/// Global binaries directory, default to `$HOME/.pixi/bin` +pub struct BinDir(PathBuf); + +impl BinDir { + /// Create the Binary Executable directory + pub async fn from_env() -> miette::Result { + let bin_dir = home_path() + .map(|path| path.join("bin")) + .ok_or(miette::miette!( + "could not determine global binary executable directory" + ))?; + tokio::fs::create_dir_all(&bin_dir) + .await + .into_diagnostic()?; + Ok(Self(bin_dir)) + } + + /// Asynchronously retrieves all files in the Binary Executable directory. + /// + /// This function reads the directory specified by `self.0` and collects all + /// file paths into a vector. It returns a `miette::Result` containing the + /// vector of file paths or an error if the directory cannot be read. + pub(crate) async fn files(&self) -> miette::Result> { + let mut files = Vec::new(); + let mut entries = tokio::fs::read_dir(&self.0) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Could not read {}", &self.0.display()))?; + + while let Some(entry) = entries.next_entry().await.into_diagnostic()? { + let path = entry.path(); + if path.is_file() { + files.push(path); + } + } + + Ok(files) + } + + /// Returns the path to the binary directory + pub fn path(&self) -> &Path { + &self.0 + } + + /// Returns the path to the executable script for the given exposed name. + /// + /// This function constructs the path to the executable script by joining the + /// `bin_dir` with the provided `exposed_name`. If the target platform is + /// Windows, it sets the file extension to `.bat`. + pub(crate) fn executable_script_path(&self, exposed_name: &ExposedKey) -> PathBuf { + let mut executable_script_path = self.0.join(exposed_name.to_string()); + if cfg!(windows) { + executable_script_path.set_extension("bat"); + } + executable_script_path + } + + pub(crate) async fn print_executables_available( + &self, + executables: Vec, + ) -> miette::Result<()> { + let whitespace = console::Emoji(" ", "").to_string(); + let executable = executables + .into_iter() + .map(|path| { + path.strip_prefix(self.path()) + .expect("script paths were constructed by joining onto BinDir") + .to_string_lossy() + .to_string() + }) + .join(&format!("\n{whitespace} - ")); + + if self.is_on_path() { + eprintln!( + "{whitespace}These executables are now globally available:\n{whitespace} - {executable}", + ) + } else { + eprintln!("{whitespace}These executables have been added to {}\n{whitespace} - {executable}\n\n{} To use them, make sure to add {} to your PATH", + console::style(&self.path().display()).bold(), + console::style("!").yellow().bold(), + console::style(&self.path().display()).bold() + ) + } + + Ok(()) + } + + /// Returns true if the bin folder is available on the PATH. + fn is_on_path(&self) -> bool { + let Some(path_content) = std::env::var_os("PATH") else { + return false; + }; + std::env::split_paths(&path_content).contains(&self.path().to_owned()) + } +} + +#[derive(Debug, Clone)] +pub struct EnvRoot(PathBuf); + +impl EnvRoot { + pub async fn new(path: PathBuf) -> miette::Result { + tokio::fs::create_dir_all(&path).await.into_diagnostic()?; + Ok(Self(path)) + } + + pub async fn from_env() -> miette::Result { + let path = home_path() + .map(|path| path.join("envs")) + .ok_or_else(|| miette::miette!("Could not get home path"))?; + tokio::fs::create_dir_all(&path).await.into_diagnostic()?; + Ok(Self(path)) + } + + pub fn path(&self) -> &Path { + &self.0 + } + + /// Delete environments that are not listed + pub(crate) async fn prune( + &self, + environments: impl IntoIterator, + ) -> miette::Result<()> { + let env_set: ahash::HashSet = environments.into_iter().collect(); + let mut entries = tokio::fs::read_dir(&self.path()) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Could not read directory {}", self.path().display()))?; + + while let Some(entry) = entries.next_entry().await.into_diagnostic()? { + let path = entry.path(); + if path.is_dir() { + let Some(Ok(env_name)) = path + .file_name() + .and_then(|name| name.to_str()) + .map(|name| name.parse()) + else { + continue; + }; + if !env_set.contains(&env_name) { + tokio::fs::remove_dir_all(&path) + .await + .into_diagnostic() + .wrap_err_with(|| { + format!("Could not remove directory {}", path.display()) + })?; + eprintln!( + "{} Remove environment '{env_name}'", + console::style(console::Emoji("✔", " ")).green() + ); + } + } + } + + Ok(()) + } +} + +/// Global binary environments directory +pub(crate) struct EnvDir { + root: EnvRoot, + path: PathBuf, +} + +impl EnvDir { + /// Create the Binary Environment directory + pub(crate) async fn new( + root: EnvRoot, + environment_name: EnvironmentName, + ) -> miette::Result { + let path = root.path().join(environment_name.as_str()); + tokio::fs::create_dir_all(&path).await.into_diagnostic()?; + + Ok(Self { root, path }) + } + + /// Construct the path to the env directory for the environment + /// `environment_name`. + pub(crate) fn path(&self) -> &Path { + &self.path + } +} + +/// Get the friendly channel name of a [`PrefixRecord`] +/// +/// # Returns +/// +/// The friendly channel name of the given prefix record +pub(crate) fn channel_name_from_prefix( + prefix_package: &PrefixRecord, + channel_config: &ChannelConfig, +) -> String { + Channel::from_str(&prefix_package.repodata_record.channel, channel_config) + .map(|ch| repodata::friendly_channel_name(&ch)) + .unwrap_or_else(|_| prefix_package.repodata_record.channel.clone()) +} + +/// Find the designated package in the given [`Prefix`] +/// +/// # Returns +/// +/// The PrefixRecord of the designated package +pub(crate) async fn find_designated_package( + prefix: &Prefix, + package_name: &PackageName, +) -> miette::Result { + let prefix_records = prefix.find_installed_packages(None).await?; + prefix_records + .into_iter() + .find(|r| r.repodata_record.package_record.name == *package_name) + .ok_or_else(|| miette::miette!("could not find {} in prefix", package_name.as_source())) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::Path; + use tempfile::tempdir; + + #[tokio::test] + async fn test_create() { + // Create a temporary directory + let temp_dir = tempdir().unwrap(); + + // Set the env root to the temporary directory + let env_root = EnvRoot::new(temp_dir.path().to_owned()).await.unwrap(); + + // Define a test environment name + let environment_name = "test-env".parse().unwrap(); + + // Create a new binary env dir + let bin_env_dir = EnvDir::new(env_root, environment_name).await.unwrap(); + + // Verify that the directory was created + assert!(bin_env_dir.path().exists()); + assert!(bin_env_dir.path().is_dir()); + } + + #[tokio::test] + async fn test_prune() { + // Create a temporary directory + let temp_dir = tempdir().unwrap(); + + // Set the env root to the temporary directory + let env_root = EnvRoot::new(temp_dir.path().to_owned()).await.unwrap(); + + // Create some directories in the temporary directory + let envs = ["env1", "env2", "env3"]; + for env in &envs { + EnvDir::new(env_root.clone(), env.parse().unwrap()) + .await + .unwrap(); + } + + // Call the prune method with a list of environments to keep + env_root + .prune(["env1".parse().unwrap(), "env3".parse().unwrap()]) + .await + .unwrap(); + + // Verify that only the specified directories remain + let remaining_dirs = std::fs::read_dir(env_root.path()) + .unwrap() + .filter_map(|entry| entry.ok()) + .filter(|entry| entry.path().is_dir()) + .map(|entry| entry.file_name().into_string().unwrap()) + .sorted() + .collect_vec(); + + assert_eq!(remaining_dirs, vec!["env1", "env3"]); + } +} diff --git a/src/global/install.rs b/src/global/install.rs new file mode 100644 index 000000000..4b179d9ee --- /dev/null +++ b/src/global/install.rs @@ -0,0 +1,505 @@ +use crate::global; +use std::{ + collections::HashMap, + ffi::OsStr, + iter, + path::{Path, PathBuf}, + time, +}; + +use clap::Parser; +use distribution_types::Diagnostic; +use indexmap::IndexMap; +use itertools::Itertools; +use miette::{bail, Context, IntoDiagnostic}; +use pixi_utils::reqwest::build_reqwest_clients; +use rattler::{ + install::{DefaultProgressFormatter, IndicatifReporter, Installer}, + package_cache::PackageCache, +}; +use rattler_conda_types::{ + GenericVirtualPackage, MatchSpec, PackageName, Platform, PrefixRecord, RepoDataRecord, +}; +use rattler_shell::{ + activation::{ActivationVariables, Activator, PathModificationBehavior}, + shell::{Shell, ShellEnum}, +}; +use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; +use rattler_virtual_packages::VirtualPackage; +use reqwest_middleware::ClientWithMiddleware; + +use crate::{ + cli::cli_config::ChannelsConfig, cli::has_specs::HasSpecs, prefix::Prefix, + rlimit::try_increase_rlimit_to_sensible, +}; +use crate::{ + global::{channel_name_from_prefix, find_designated_package, BinDir, EnvDir}, + task::ExecutableTask, +}; +use pixi_config::{self, default_channel_config, Config, ConfigCli}; +use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; + +use super::{common::EnvRoot, EnvironmentName, ExposedKey}; + +/// Installs global environment records +pub(crate) async fn install_environment( + environment_name: &EnvironmentName, + exposed: &IndexMap, + packages: Vec, + records: Vec, + authenticated_client: ClientWithMiddleware, + platform: Platform, + bin_dir: &BinDir, +) -> miette::Result<()> { + try_increase_rlimit_to_sensible(); + + // Create the binary environment prefix where we install or update the package + let env_root = EnvRoot::from_env().await?; + let bin_env_dir = EnvDir::new(env_root, environment_name.clone()).await?; + let prefix = Prefix::new(bin_env_dir.path()); + + // Install the environment + let package_cache = PackageCache::new(pixi_config::get_cache_dir()?.join("pkgs")); + + let result = await_in_progress("creating virtual environment", |pb| { + Installer::new() + .with_download_client(authenticated_client) + .with_io_concurrency_limit(100) + .with_execute_link_scripts(false) + .with_package_cache(package_cache) + .with_target_platform(platform) + .with_reporter( + IndicatifReporter::builder() + .with_multi_progress(global_multi_progress()) + .with_placement(rattler::install::Placement::After(pb)) + .with_formatter(DefaultProgressFormatter::default().with_prefix(" ")) + .clear_when_done(true) + .finish(), + ) + .install(prefix.root(), records) + }) + .await + .into_diagnostic()?; + + // Determine the shell to use for the invocation script + let shell: ShellEnum = if cfg!(windows) { + rattler_shell::shell::CmdExe.into() + } else { + rattler_shell::shell::Bash.into() + }; + + // Construct the reusable activation script for the shell and generate an + // invocation script for each executable added by the package to the + // environment. + let activation_script = create_activation_script(&prefix, shell.clone())?; + + let prefix_records = prefix.find_installed_packages(None).await?; + + /// Processes prefix records to filter and collect executable files. + /// It performs the following steps: + /// 1. Filters records to only include direct dependencies + /// 2. Finds executables for each filtered record. + /// 3. Maps executables to a tuple of file name (as a string) and file path. + /// 4. Filters tuples to include only those whose names are in the `exposed` values. + /// 5. Collects the resulting tuples into a vector of executables. + let executables: Vec<(String, PathBuf)> = prefix_records + .into_iter() + .filter(|record| packages.contains(&record.repodata_record.package_record.name)) + .flat_map(|record| find_executables(&prefix, record)) + .filter_map(|path| { + path.file_name() + .and_then(|name| name.to_str()) + .map(|name| (name.to_string(), path.clone())) + }) + .filter(|(name, path)| exposed.values().contains(&name)) + .collect(); + + let script_mapping = exposed + .into_iter() + .map(|(exposed_name, entry_point)| { + script_exec_mapping( + exposed_name, + entry_point, + executables.clone(), + bin_dir, + environment_name, + ) + }) + .collect::>>()?; + + create_executable_scripts(&script_mapping, &prefix, &shell, activation_script).await?; + + Ok(()) +} + +/// Maps an entry point in the environment to a concrete `ScriptExecMapping`. +/// +/// This function takes an entry point and a list of executable names and paths, +/// and returns a `ScriptExecMapping` that contains the path to the script and +/// the original executable. +/// # Returns +/// +/// A `miette::Result` containing the `ScriptExecMapping` if the entry point is found, +/// or an error if it is not. +/// +/// # Errors +/// +/// Returns an error if the entry point is not found in the list of executable names. +fn script_exec_mapping( + exposed_name: &ExposedKey, + entry_point: &str, + executables: impl IntoIterator, + bin_dir: &BinDir, + environment_name: &EnvironmentName, +) -> miette::Result { + executables + .into_iter() + .find(|(executable_name, _)| *executable_name == entry_point) + .map(|(_, executable_path)| ScriptExecMapping { + global_script_path: bin_dir.executable_script_path(exposed_name), + original_executable: executable_path, + }) + .ok_or_else(|| miette::miette!("Could not find {entry_point} in {environment_name}")) +} + +/// Create the environment activation script +fn create_activation_script(prefix: &Prefix, shell: ShellEnum) -> miette::Result { + let activator = + Activator::from_path(prefix.root(), shell, Platform::current()).into_diagnostic()?; + let result = activator + .activation(ActivationVariables { + conda_prefix: None, + path: None, + path_modification_behavior: PathModificationBehavior::Prepend, + }) + .into_diagnostic()?; + + // Add a shebang on unix based platforms + let script = if cfg!(unix) { + format!("#!/bin/sh\n{}", result.script.contents().into_diagnostic()?) + } else { + result.script.contents().into_diagnostic()? + }; + + Ok(script) +} + +/// Mapping from the global script location to an executable in a package environment . +#[derive(Debug)] +pub struct ScriptExecMapping { + pub global_script_path: PathBuf, + pub original_executable: PathBuf, +} + +/// Find the executable scripts within the specified package installed in this +/// conda prefix. +fn find_executables(prefix: &Prefix, prefix_package: PrefixRecord) -> Vec { + prefix_package + .files + .into_iter() + .filter(|relative_path| is_executable(prefix, relative_path)) + .collect() +} + +fn is_executable(prefix: &Prefix, relative_path: &Path) -> bool { + // Check if the file is in a known executable directory. + let binary_folders = if cfg!(windows) { + &([ + "", + "Library/mingw-w64/bin/", + "Library/usr/bin/", + "Library/bin/", + "Scripts/", + "bin/", + ][..]) + } else { + &(["bin"][..]) + }; + + let parent_folder = match relative_path.parent() { + Some(dir) => dir, + None => return false, + }; + + if !binary_folders + .iter() + .any(|bin_path| Path::new(bin_path) == parent_folder) + { + return false; + } + + // Check if the file is executable + let absolute_path = prefix.root().join(relative_path); + is_executable::is_executable(absolute_path) +} + +/// Returns the string to add for all arguments passed to the script +fn get_catch_all_arg(shell: &ShellEnum) -> &str { + match shell { + ShellEnum::CmdExe(_) => "%*", + ShellEnum::PowerShell(_) => "@args", + _ => "\"$@\"", + } +} + +/// For each executable provided, map it to the installation path for its global +/// binary script. +async fn map_executables_to_global_bin_scripts( + package_executables: impl IntoIterator, + bin_dir: &BinDir, +) -> miette::Result> { + #[cfg(target_family = "windows")] + let extensions_list: Vec = if let Ok(pathext) = std::env::var("PATHEXT") { + pathext.split(';').map(|s| s.to_lowercase()).collect() + } else { + tracing::debug!("Could not find 'PATHEXT' variable, using a default list"); + [ + ".COM", ".EXE", ".BAT", ".CMD", ".VBS", ".VBE", ".JS", ".JSE", ".WSF", ".WSH", ".MSC", + ".CPL", + ] + .iter() + .map(|&s| s.to_lowercase()) + .collect() + }; + + #[cfg(target_family = "unix")] + // TODO: Find if there are more relevant cases, these cases are generated by our big friend + // GPT-4 + let extensions_list: Vec = vec![ + ".sh", ".bash", ".zsh", ".csh", ".tcsh", ".ksh", ".fish", ".py", ".pl", ".rb", ".lua", + ".php", ".tcl", ".awk", ".sed", + ] + .iter() + .map(|&s| s.to_owned()) + .collect(); + + let mut mappings = vec![]; + + for exec in package_executables { + // Remove the extension of a file if it is in the list of known extensions. + let Some(file_name) = exec + .file_name() + .and_then(OsStr::to_str) + .map(str::to_lowercase) + else { + continue; + }; + let file_name = extensions_list + .iter() + .find_map(|ext| file_name.strip_suffix(ext)) + .unwrap_or(file_name.as_str()); + + let mut executable_script_path = bin_dir.path().join(file_name); + + if cfg!(windows) { + executable_script_path.set_extension("bat"); + }; + mappings.push(ScriptExecMapping { + original_executable: exec, + global_script_path: executable_script_path, + }); + } + Ok(mappings) +} + +/// Create the executable scripts by modifying the activation script +/// to activate the environment and run the executable. +async fn create_executable_scripts( + mapped_executables: &[ScriptExecMapping], + prefix: &Prefix, + shell: &ShellEnum, + activation_script: String, +) -> miette::Result<()> { + for ScriptExecMapping { + global_script_path, + original_executable, + } in mapped_executables + { + let mut script = activation_script.clone(); + shell + .run_command( + &mut script, + [ + format!( + "\"{}\"", + prefix.root().join(original_executable).to_string_lossy() + ) + .as_str(), + get_catch_all_arg(shell), + ], + ) + .expect("should never fail"); + + if matches!(shell, ShellEnum::CmdExe(_)) { + // wrap the script contents in `@echo off` and `setlocal` to prevent echoing the + // script and to prevent leaking environment variables into the + // parent shell (e.g. PATH would grow longer and longer) + script = format!("@echo off\nsetlocal\n{}\nendlocal", script); + } + + tokio::fs::write(&global_script_path, script) + .await + .into_diagnostic()?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + std::fs::set_permissions(global_script_path, std::fs::Permissions::from_mode(0o755)) + .into_diagnostic()?; + } + } + Ok(()) +} + +/// Warn user on dangerous package installations, interactive yes no prompt +pub(crate) fn prompt_user_to_continue( + packages: &IndexMap, +) -> miette::Result { + let dangerous_packages = HashMap::from([ + ("pixi", "Installing `pixi` globally doesn't work as expected.\nUse `pixi self-update` to update pixi and `pixi self-update --version x.y.z` for a specific version."), + ("pip", "Installing `pip` with `pixi global` won't make pip-installed packages globally available.\nInstead, use a pixi project and add PyPI packages with `pixi add --pypi`, which is recommended. Alternatively, `pixi add pip` and use it within the project.") + ]); + + // Check if any of the packages are dangerous, and prompt the user to ask if + // they want to continue, including the advice. + for (name, _spec) in packages { + if let Some(advice) = dangerous_packages.get(&name.as_normalized()) { + let prompt = format!( + "{}\nDo you want to continue?", + console::style(advice).yellow() + ); + if !dialoguer::Confirm::new() + .with_prompt(prompt) + .default(false) + .show_default(true) + .interact() + .into_diagnostic()? + { + return Ok(false); + } + } + } + + Ok(true) +} + +pub(crate) async fn sync( + env_root: &EnvRoot, + project: &global::Project, + bin_dir: &BinDir, + config: &Config, + gateway: &rattler_repodata_gateway::Gateway, + auth_client: &reqwest_middleware::ClientWithMiddleware, +) -> Result<(), miette::Error> { + // Prune environments that are not listed + env_root + .prune(project.environments().keys().cloned()) + .await?; + + // Remove binaries that are not listed as exposed + let exposed_paths = project + .environments() + .values() + .flat_map(|environment| { + environment + .exposed + .keys() + .map(|e| bin_dir.executable_script_path(e)) + }) + .collect_vec(); + for file in bin_dir.files().await? { + let file_name = file + .file_stem() + .and_then(OsStr::to_str) + .ok_or_else(|| miette::miette!("Could not get file stem of {}", file.display()))?; + if !exposed_paths.contains(&file) && file_name != "pixi" { + tokio::fs::remove_file(&file) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Could not remove {}", &file.display()))?; + eprintln!( + "{} Remove binary '{file_name}'.", + console::style(console::Emoji("✔", " ")).green() + ); + } + } + + for (environment_name, environment) in project.environments() { + let specs = environment + .dependencies + .clone() + .into_iter() + .map(|(name, spec)| { + let match_spec = MatchSpec::from_nameless( + spec.clone() + .try_into_nameless_match_spec(&default_channel_config()) + .into_diagnostic()? + .ok_or_else(|| { + miette::miette!("Could not convert {spec:?} to nameless match spec.") + })?, + Some(name.clone()), + ); + Ok((name, match_spec)) + }) + .collect::, miette::Report>>()?; + + let channels = environment + .channels() + .into_iter() + .map(|channel| channel.clone().into_channel(config.global_channel_config())) + .collect_vec(); + + let repodata = await_in_progress("querying repodata ", |_| async { + gateway + .query( + channels, + [environment.platform(), Platform::NoArch], + specs.values().cloned().collect_vec(), + ) + .recursive(true) + .await + .into_diagnostic() + }) + .await?; + + // Determine virtual packages of the current platform + let virtual_packages = VirtualPackage::current() + .into_diagnostic() + .context("failed to determine virtual packages")? + .iter() + .cloned() + .map(GenericVirtualPackage::from) + .collect(); + + // Solve the environment + let solver_specs = specs.clone(); + let solved_records = tokio::task::spawn_blocking(move || { + wrap_in_progress("solving environment", move || { + Solver.solve(SolverTask { + specs: solver_specs.values().cloned().collect_vec(), + virtual_packages, + ..SolverTask::from_iter(&repodata) + }) + }) + .into_diagnostic() + .context("failed to solve environment") + }) + .await + .into_diagnostic()??; + + let packages = specs.keys().cloned().collect(); + + install_environment( + &environment_name, + &environment.exposed, + packages, + solved_records.clone(), + auth_client.clone(), + environment.platform(), + bin_dir, + ) + .await?; + } + + Ok(()) +} diff --git a/src/global/mod.rs b/src/global/mod.rs index e9916f7d1..34bfb8a47 100644 --- a/src/global/mod.rs +++ b/src/global/mod.rs @@ -1,4 +1,12 @@ // TODO: remove this before merging to main #![allow(unused)] +mod common; +mod install; mod project; + +pub(crate) use common::{ + channel_name_from_prefix, find_designated_package, BinDir, EnvDir, EnvRoot, +}; +pub(crate) use install::sync; +pub(crate) use project::{EnvironmentName, ExposedKey, Project, MANIFEST_DEFAULT_NAME}; diff --git a/src/global/project/environment.rs b/src/global/project/environment.rs index 63bef84c0..826a9e1e5 100644 --- a/src/global/project/environment.rs +++ b/src/global/project/environment.rs @@ -1,6 +1,9 @@ use std::{fmt, str::FromStr}; +use indexmap::IndexMap; use miette::Diagnostic; +use pixi_spec::PixiSpec; +use rattler_conda_types::PackageName; use regex::Regex; use serde::{self, Deserialize, Deserializer}; use thiserror::Error; @@ -38,16 +41,6 @@ impl<'de> Deserialize<'de> for EnvironmentName { } } -/// Represents an error that occurs when parsing an environment name. -/// -/// This error is returned when a string fails to be parsed as an environment name. -#[derive(Debug, Clone, Error, Diagnostic, PartialEq)] -#[error("Failed to parse environment name '{attempted_parse}', please use only lowercase letters, numbers and dashes")] -pub struct ParseEnvironmentNameError { - /// The string that was attempted to be parsed. - pub attempted_parse: String, -} - impl FromStr for EnvironmentName { type Err = ParseEnvironmentNameError; fn from_str(s: &str) -> Result { @@ -64,3 +57,13 @@ impl FromStr for EnvironmentName { Ok(EnvironmentName(s.to_string())) } } + +/// Represents an error that occurs when parsing an environment name. +/// +/// This error is returned when a string fails to be parsed as an environment name. +#[derive(Debug, Clone, Error, Diagnostic, PartialEq)] +#[error("Failed to parse environment name '{attempted_parse}', please use only lowercase letters, numbers and dashes")] +pub struct ParseEnvironmentNameError { + /// The string that was attempted to be parsed. + pub attempted_parse: String, +} diff --git a/src/global/project/errors.rs b/src/global/project/error.rs similarity index 100% rename from src/global/project/errors.rs rename to src/global/project/error.rs diff --git a/src/global/project/manifest.rs b/src/global/project/manifest.rs index d9fe7d80f..b8328dcba 100644 --- a/src/global/project/manifest.rs +++ b/src/global/project/manifest.rs @@ -4,7 +4,7 @@ use miette::IntoDiagnostic; use rattler_conda_types::{MatchSpec, PackageName}; use toml_edit::DocumentMut; -use super::errors::ManifestError; +use super::error::ManifestError; use super::MANIFEST_DEFAULT_NAME; use super::{document::ManifestSource, parsed_manifest::ParsedManifest}; diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index fadde0c37..5a503bbcd 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -6,19 +6,24 @@ use std::{ sync::OnceLock, }; +pub(crate) use environment::EnvironmentName; +use indexmap::IndexMap; use manifest::Manifest; use miette::IntoDiagnostic; +pub(crate) use parsed_manifest::ExposedKey; +use parsed_manifest::ParsedEnvironment; +use pixi_config::Config; use rattler_repodata_gateway::Gateway; use reqwest_middleware::ClientWithMiddleware; use std::fmt::Debug; mod document; mod environment; -mod errors; +mod error; mod manifest; mod parsed_manifest; -const MANIFEST_DEFAULT_NAME: &str = "pixi-global.toml"; +pub(crate) const MANIFEST_DEFAULT_NAME: &str = "pixi-global.toml"; /// The pixi global project, this main struct to interact with the pixi global project. /// This struct holds the `Manifest` and has functions to modify @@ -36,6 +41,8 @@ pub struct Project { repodata_gateway: OnceLock, /// The manifest for the project pub(crate) manifest: Manifest, + /// The global configuration as loaded from the config file(s) + config: Config, } impl Debug for Project { @@ -56,16 +63,19 @@ impl Project { .expect("manifest path should always have a parent") .to_owned(); + let config = Config::load(&root); + Self { root, client: Default::default(), repodata_gateway: Default::default(), manifest, + config, } } /// Constructs a project from a manifest. - pub fn from_str(manifest_path: &Path, content: &str) -> miette::Result { + pub(crate) fn from_str(manifest_path: &Path, content: &str) -> miette::Result { let manifest = Manifest::from_str(manifest_path, content)?; Ok(Self::from_manifest(manifest)) } @@ -73,10 +83,8 @@ impl Project { /// Discovers the project manifest file in path set by `PIXI_GLOBAL_MANIFESTS` /// or alternatively at `~/.pixi/manifests/pixi-global.toml`. /// If the manifest doesn't exist yet, and empty one will be created. - pub fn discover() -> miette::Result { - let manifest_dir = env::var("PIXI_GLOBAL_MANIFESTS") - .map(PathBuf::from) - .or_else(|_| Self::default_dir())?; + pub(crate) fn discover() -> miette::Result { + let manifest_dir = Self::manifest_dir()?; fs::create_dir_all(&manifest_dir).into_diagnostic()?; @@ -89,19 +97,35 @@ impl Project { } /// Get default dir for the pixi global manifest - fn default_dir() -> miette::Result { - // If environment variable is not set, use default directory - let default_dir = dirs::home_dir() - .ok_or_else(|| miette::miette!("Could not get home directory"))? - .join(".pixi/manifests"); - Ok(default_dir) + pub(crate) fn manifest_dir() -> miette::Result { + env::var("PIXI_GLOBAL_MANIFESTS") + .map(PathBuf::from) + .or_else(|_| { + dirs::home_dir() + .map(|dir| dir.join(".pixi/manifests")) + .ok_or_else(|| miette::miette!("Could not get home directory")) + }) } /// Loads a project from manifest file. - pub fn from_path(manifest_path: &Path) -> miette::Result { + pub(crate) fn from_path(manifest_path: &Path) -> miette::Result { let manifest = Manifest::from_path(manifest_path)?; Ok(Project::from_manifest(manifest)) } + + /// Merge config with existing config project + pub(crate) fn with_cli_config(mut self, config: C) -> Self + where + C: Into, + { + self.config = self.config.merge_config(config.into()); + self + } + + /// Returns the environments in this project. + pub(crate) fn environments(&self) -> IndexMap { + self.manifest.parsed.environments() + } } #[cfg(test)] @@ -112,6 +136,8 @@ mod tests { use fake::{faker::filesystem::zh_tw::FilePath, Fake}; const SIMPLE_MANIFEST: &str = r#" + [envs.python] + channels = ["conda-forge"] [envs.python.dependencies] python = "3.11.*" [envs.python.exposed] @@ -165,7 +191,7 @@ mod tests { } #[test] - fn test_project_default_dir() { - Project::default_dir().unwrap(); + fn test_project_manifest_dir() { + Project::manifest_dir().unwrap(); } } diff --git a/src/global/project/parsed_manifest.rs b/src/global/project/parsed_manifest.rs index 4545fc236..22b532df6 100644 --- a/src/global/project/parsed_manifest.rs +++ b/src/global/project/parsed_manifest.rs @@ -1,37 +1,148 @@ -use indexmap::IndexMap; -use pixi_manifest::deserialize_package_map; -use rattler_conda_types::PackageName; +use std::fmt; +use std::str::FromStr; + +use indexmap::{IndexMap, IndexSet}; +use itertools::Itertools; +use pixi_manifest::PrioritizedChannel; +use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform}; +use serde::de::{Deserialize, DeserializeSeed, Deserializer, MapAccess, Visitor}; use serde_with::{serde_as, serde_derive::Deserialize}; use super::environment::EnvironmentName; -use super::errors::ManifestError; +use super::error::ManifestError; use pixi_spec::PixiSpec; /// Describes the contents of a parsed global project manifest. -#[serde_as] -#[derive(Deserialize, Debug, Clone)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] +#[derive(Debug, Clone)] pub struct ParsedManifest { /// The environments the project can create. - #[serde(default)] - envs: IndexMap, + environments: IndexMap, } impl ParsedManifest { /// Parses a toml string into a project manifest. - pub fn from_toml_str(source: &str) -> Result { + pub(crate) fn from_toml_str(source: &str) -> Result { toml_edit::de::from_str(source).map_err(ManifestError::from) } + + pub(crate) fn environments(&self) -> IndexMap { + self.environments.clone() + } +} + +impl<'de> serde::Deserialize<'de> for ParsedManifest { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[serde_as] + #[derive(Deserialize, Debug, Clone)] + #[serde(deny_unknown_fields, rename_all = "kebab-case")] + pub struct TomlManifest { + /// The environments the project can create. + #[serde(default)] + envs: IndexMap, + } + + let mut manifest = TomlManifest::deserialize(deserializer)?; + + // Check for duplicate keys in the exposed fields + let mut exposed_keys = IndexSet::new(); + let mut duplicates = IndexMap::new(); + for key in manifest.envs.values().flat_map(|env| env.exposed.keys()) { + if !exposed_keys.insert(key) { + duplicates.entry(key).or_insert_with(Vec::new).push(key); + } + } + if !duplicates.is_empty() { + let duplicate_keys = duplicates.keys().map(|k| k.to_string()).collect_vec(); + return Err(serde::de::Error::custom(format!( + "Duplicate exposed keys found: '{}'", + duplicate_keys.join(", ") + ))); + } + + Ok(Self { + environments: manifest.envs, + }) + } } #[serde_as] #[derive(Deserialize, Debug, Clone)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct ParsedEnvironment { - #[serde(default, deserialize_with = "deserialize_package_map")] - dependencies: IndexMap, - exposed: IndexMap, +pub(crate) struct ParsedEnvironment { + #[serde_as(as = "IndexSet")] + channels: IndexSet, + // Platform used by the environment. + platform: Option, + #[serde(default, deserialize_with = "pixi_manifest::deserialize_package_map")] + pub(crate) dependencies: IndexMap, + pub(crate) exposed: IndexMap, +} + +impl ParsedEnvironment { + // If `self.platform` is `None` is not given, the current platform is used + pub(crate) fn platform(&self) -> Platform { + if let Some(platform) = self.platform { + platform + } else { + Platform::current() + } + } + + /// Returns the channels associated with this collection. + pub(crate) fn channels(&self) -> IndexSet<&NamedChannelOrUrl> { + PrioritizedChannel::sort_channels_by_priority(&self.channels).collect() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct ExposedKey(String); + +impl fmt::Display for ExposedKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for ExposedKey { + type Err = String; + + fn from_str(value: &str) -> Result { + if value == "pixi" { + Err("The key 'pixi' is not allowed in the exposed map".to_string()) + } else { + Ok(ExposedKey(value.to_string())) + } + } +} + +impl<'de> Deserialize<'de> for ExposedKey { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ExposedKeyVisitor; + + impl<'de> Visitor<'de> for ExposedKeyVisitor { + type Value = ExposedKey; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string that is not 'pixi'") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + ExposedKey::from_str(value).map_err(serde::de::Error::custom) + } + } + + deserializer.deserialize_str(ExposedKeyVisitor) + } } #[cfg(test)] @@ -57,9 +168,35 @@ mod tests { .join("\n")) } + #[test] + fn test_duplicate_exposed() { + let contents = r#" + [envs.python-3-10] + channels = ["conda-forge"] + [envs.python-3-10.dependencies] + python = "3.10" + [envs.python-3-10.exposed] + python = "python" + python3 = "python" + [envs.python-3-11] + channels = ["conda-forge"] + [envs.python-3-11.dependencies] + python = "3.11" + [envs.python-3-11.exposed] + "python" = "python" + "python3" = "python" + "#; + let manifest = ParsedManifest::from_toml_str(contents); + + assert!(manifest.is_err()); + assert_snapshot!(manifest.unwrap_err()); + } + #[test] fn test_duplicate_dependency() { let contents = r#" + [envs.python] + channels = ["conda-forge"] [envs.python.dependencies] python = "*" PYTHON = "*" @@ -69,16 +206,33 @@ mod tests { let manifest = ParsedManifest::from_toml_str(contents); assert!(manifest.is_err()); - assert!(manifest - .unwrap_err() - .to_string() - .contains("duplicate dependency")); + assert_snapshot!(manifest.unwrap_err()); + } + + #[test] + fn test_expose_pixi() { + let contents = r#" + [envs.test] + channels = ["conda-forge"] + [envs.test.dependencies] + python = "*" + [envs.test.exposed] + pixi = "python" + "#; + let manifest = ParsedManifest::from_toml_str(contents); + + assert!(manifest.is_err()); + assert_snapshot!(manifest.unwrap_err()); } #[test] fn test_tool_deserialization() { let contents = r#" # The name of the environment is `python` + [envs.python] + channels = ["conda-forge"] + # optional, defaults to your current OS + platform = "osx-64" # It will expose python, python3 and python3.11, but not pip [envs.python.dependencies] python = "3.11.*" @@ -90,6 +244,8 @@ mod tests { "python3.11" = "python3.11" # The name of the environment is `python3-10` + [envs.python3-10] + channels = ["https://fast.prefix.dev/conda-forge"] # It will expose python3.10 [envs.python3-10.dependencies] python = "3.10.*" diff --git a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_dependency.snap b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_dependency.snap new file mode 100644 index 000000000..9899838e7 --- /dev/null +++ b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_dependency.snap @@ -0,0 +1,9 @@ +--- +source: src/global/project/parsed_manifest.rs +expression: manifest.unwrap_err() +--- +TOML parse error at line 6, column 9 + | +6 | PYTHON = "*" + | ^^^^^^ +duplicate dependency: python (please avoid using capitalized names for the dependencies) diff --git a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap new file mode 100644 index 000000000..d2ebc752f --- /dev/null +++ b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap @@ -0,0 +1,5 @@ +--- +source: src/global/project/parsed_manifest.rs +expression: manifest.unwrap_err() +--- +Duplicate exposed keys found: 'python, python3' diff --git a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__expose_pixi.snap b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__expose_pixi.snap new file mode 100644 index 000000000..afca59397 --- /dev/null +++ b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__expose_pixi.snap @@ -0,0 +1,9 @@ +--- +source: src/global/project/parsed_manifest.rs +expression: manifest.unwrap_err() +--- +TOML parse error at line 7, column 9 + | +7 | pixi = "python" + | ^^^^ +The key 'pixi' is not allowed in the exposed map diff --git a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap index 93e574135..8635fb385 100644 --- a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap +++ b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__invalid_key.snap @@ -12,7 +12,7 @@ TOML parse error at line 1, column 15 | 1 | [envs.ipython.invalid] | ^^^^^^^ -unknown field `invalid`, expected `dependencies` or `exposed` +unknown field `invalid`, expected one of `channels`, `platform`, `dependencies`, `exposed` TOML parse error at line 1, column 7 | From 98a74c3b69b2403111d41454cb7f0509438e2110 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:44:09 +0200 Subject: [PATCH 3/8] fix: integration tests for pixi global (#1972) As soon as we implement it, this marker should throw an error --- tests/integration/test_main_cli.py | 25 +------------------------ 1 file changed, 1 insertion(+), 24 deletions(-) diff --git a/tests/integration/test_main_cli.py b/tests/integration/test_main_cli.py index 63b5d48f3..4ae73a5e3 100644 --- a/tests/integration/test_main_cli.py +++ b/tests/integration/test_main_cli.py @@ -219,37 +219,14 @@ def test_project_commands(tmp_path: Path, pixi: Path) -> None: ) +@pytest.mark.xfail(reason="`pixi global install` is not yet implemented", strict=True) def test_global_install(pixi: Path) -> None: # Install verify_cli_command( [pixi, "global", "install", "rattler-build"], ExitCode.SUCCESS, - stdout_excludes="rattler-build", ) - verify_cli_command( - [ - pixi, - "global", - "install", - "rattler-build", - "-c", - "https://fast.prefix.dev/conda-forge", - ], - ExitCode.SUCCESS, - stdout_excludes="rattler-build", - ) - - # Upgrade - verify_cli_command([pixi, "global", "upgrade", "rattler-build"], ExitCode.SUCCESS) - - # List - verify_cli_command([pixi, "global", "list"], ExitCode.SUCCESS, stderr_contains="rattler-build") - - # Remove - verify_cli_command([pixi, "global", "remove", "rattler-build"], ExitCode.SUCCESS) - verify_cli_command([pixi, "global", "remove", "rattler-build"], ExitCode.FAILURE) - def test_search(pixi: Path) -> None: verify_cli_command( From e04e20e93aa1a06c0e348bf3b89e0c6f508dcd49 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 20 Sep 2024 07:32:37 +0200 Subject: [PATCH 4/8] feat: add migration path for new pixi global (#1975) Co-authored-by: Bas Zalmstra Co-authored-by: Bas Zalmstra --- Cargo.lock | 5 +- Cargo.toml | 2 + crates/pixi_manifest/src/channel.rs | 42 ++- pixi.toml | 5 +- src/cli/global/sync.rs | 43 +-- src/global/common.rs | 126 ++++++-- src/global/install.rs | 439 ++++++++++++++++---------- src/global/mod.rs | 46 +++ src/global/project/environment.rs | 4 +- src/global/project/mod.rs | 314 +++++++++++++++--- src/global/project/parsed_manifest.rs | 56 +++- tests/integration/common.py | 6 +- tests/integration/conftest.py | 5 + tests/integration/test_global.py | 190 +++++++++++ tests/integration/test_main_cli.py | 16 +- 15 files changed, 991 insertions(+), 308 deletions(-) create mode 100644 tests/integration/test_global.py diff --git a/Cargo.lock b/Cargo.lock index 4b84e967a..785a8ea3d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3479,6 +3479,7 @@ dependencies = [ "tempfile", "thiserror", "tokio", + "tokio-stream", "tokio-util", "toml_edit 0.22.20", "tracing", @@ -5832,9 +5833,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", diff --git a/Cargo.toml b/Cargo.toml index 392d8a3e1..f911af46b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -81,6 +81,7 @@ tar = "0.4.40" tempfile = "3.10.1" thiserror = "1.0.58" tokio = "1.37.0" +tokio-stream = "0.1.16" tokio-util = "0.7.10" toml_edit = "0.22.11" tracing = "0.1.40" @@ -265,6 +266,7 @@ tar = { workspace = true } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["macros", "rt-multi-thread", "signal"] } +tokio-stream = { workspace = true, features = ["fs"] } tokio-util = { workspace = true } toml_edit = { workspace = true, features = ["serde"] } tracing = { workspace = true } diff --git a/crates/pixi_manifest/src/channel.rs b/crates/pixi_manifest/src/channel.rs index 15b3ce641..1f8db4bd1 100644 --- a/crates/pixi_manifest/src/channel.rs +++ b/crates/pixi_manifest/src/channel.rs @@ -2,14 +2,14 @@ use std::str::FromStr; use itertools::Itertools; use rattler_conda_types::NamedChannelOrUrl; -use serde::{de::Error, Deserialize, Deserializer}; +use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; use serde_with::serde_as; /// A channel with an optional priority. /// If the priority is not specified, it is assumed to be 0. /// The higher the priority, the more important the channel is. #[serde_as] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] pub struct PrioritizedChannel { pub channel: NamedChannelOrUrl, pub priority: Option, @@ -61,6 +61,19 @@ impl TomlPrioritizedChannelStrOrMap { } } +impl From for TomlPrioritizedChannelStrOrMap { + fn from(channel: PrioritizedChannel) -> Self { + if let Some(priority) = channel.priority { + TomlPrioritizedChannelStrOrMap::Map(PrioritizedChannel { + channel: channel.channel, + priority: Some(priority), + }) + } else { + TomlPrioritizedChannelStrOrMap::Str(channel.channel) + } + } +} + impl<'de> Deserialize<'de> for TomlPrioritizedChannelStrOrMap { fn deserialize(deserializer: D) -> Result where @@ -78,6 +91,18 @@ impl<'de> Deserialize<'de> for TomlPrioritizedChannelStrOrMap { } } +impl Serialize for TomlPrioritizedChannelStrOrMap { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + TomlPrioritizedChannelStrOrMap::Map(map) => map.serialize(serializer), + TomlPrioritizedChannelStrOrMap::Str(str) => str.serialize(serializer), + } + } +} + /// Helper so that we can deserialize /// [`crate::channel::PrioritizedChannel`] from a string or a /// map. @@ -90,3 +115,16 @@ impl<'de> serde_with::DeserializeAs<'de, PrioritizedChannel> for TomlPrioritized Ok(prioritized_channel.into_prioritized_channel()) } } + +/// Helper so that we can serialize +/// [`crate::channel::PrioritizedChannel`] to a string or a +/// map. +impl serde_with::SerializeAs for TomlPrioritizedChannelStrOrMap { + fn serialize_as(source: &PrioritizedChannel, serializer: S) -> Result + where + S: Serializer, + { + let toml_prioritized_channel: TomlPrioritizedChannelStrOrMap = source.clone().into(); + toml_prioritized_channel.serialize(serializer) + } +} diff --git a/pixi.toml b/pixi.toml index 8c10711f2..5d2d8e3ac 100644 --- a/pixi.toml +++ b/pixi.toml @@ -27,11 +27,8 @@ test = "cargo test" test-all = "cargo test --all-features" [feature.pytest.dependencies] -mypy = ">=1.11,<1.12" -# For detecting cpu cores with pytest-xdist -psutil = ">=6.0.0,<7" -# For running tests in parallel, use this instead of regular pytest filelock = ">=3.16.0,<4" +mypy = ">=1.11,<1.12" pytest = "*" pytest-rerunfailures = ">=14.0,<15" pytest-xdist = ">=3.6.1,<4" diff --git a/src/cli/global/sync.rs b/src/cli/global/sync.rs index 94bd1584e..85b0edf50 100644 --- a/src/cli/global/sync.rs +++ b/src/cli/global/sync.rs @@ -1,11 +1,13 @@ -use crate::global::{self, BinDir, EnvRoot}; +use crate::global::{self}; use clap::Parser; use pixi_config::{Config, ConfigCli}; -use pixi_utils::reqwest::build_reqwest_clients; /// Sync global manifest with installed environments #[derive(Parser, Debug)] pub struct Args { + /// Answer yes to all questions. + #[clap(short = 'y', long = "yes", long = "assume-yes")] + assume_yes: bool, #[clap(flatten)] config: ConfigCli, } @@ -14,40 +16,5 @@ pub struct Args { pub async fn execute(args: Args) -> miette::Result<()> { let config = Config::with_cli_config(&args.config); - // Check if the certain file is present - let certain_file_path = global::Project::manifest_dir()?.join(global::MANIFEST_DEFAULT_NAME); - if !certain_file_path.exists() { - eprintln!( - "This will remove your existing global installation. Do you want to continue? (y/N): " - ); - - let mut input = String::new(); - std::io::stdin().read_line(&mut input).unwrap(); - let input = input.trim().to_lowercase(); - - if input != "y" { - eprintln!("Operation aborted."); - return Ok(()); - } - } - - let project = global::Project::discover()?.with_cli_config(config.clone()); - - // Fetch the repodata - let (_, auth_client) = build_reqwest_clients(Some(&config)); - - let gateway = config.gateway(auth_client.clone()); - - let env_root = EnvRoot::from_env().await?; - let bin_dir = BinDir::from_env().await?; - - global::sync( - &env_root, - &project, - &bin_dir, - &config, - &gateway, - &auth_client, - ) - .await + global::sync(&config, args.assume_yes).await } diff --git a/src/global/common.rs b/src/global/common.rs index 6a23a13dc..6d326ee59 100644 --- a/src/global/common.rs +++ b/src/global/common.rs @@ -1,4 +1,7 @@ -use std::path::{Path, PathBuf}; +use std::{ + io::Read, + path::{Path, PathBuf}, +}; use itertools::Itertools; use miette::{Context, IntoDiagnostic}; @@ -15,6 +18,7 @@ use rattler_shell::{ shell::ShellEnum, }; use reqwest_middleware::ClientWithMiddleware; +use tokio::io::AsyncReadExt; use crate::{ cli::project::environment, prefix::Prefix, repodata, rlimit::try_increase_rlimit_to_sensible, @@ -27,7 +31,7 @@ use super::{EnvironmentName, ExposedKey}; pub struct BinDir(PathBuf); impl BinDir { - /// Create the Binary Executable directory + /// Create the binary executable directory from environment variables pub async fn from_env() -> miette::Result { let bin_dir = home_path() .map(|path| path.join("bin")) @@ -40,7 +44,7 @@ impl BinDir { Ok(Self(bin_dir)) } - /// Asynchronously retrieves all files in the Binary Executable directory. + /// Asynchronously retrieves all files in the binary executable directory. /// /// This function reads the directory specified by `self.0` and collects all /// file paths into a vector. It returns a `miette::Result` containing the @@ -123,16 +127,25 @@ impl BinDir { pub struct EnvRoot(PathBuf); impl EnvRoot { + /// Create the environment root directory + #[cfg(test)] pub async fn new(path: PathBuf) -> miette::Result { - tokio::fs::create_dir_all(&path).await.into_diagnostic()?; + tokio::fs::create_dir_all(&path) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Couldn't create directory {}", path.display()))?; Ok(Self(path)) } - pub async fn from_env() -> miette::Result { + /// Create the environment root directory from environment variables + pub(crate) async fn from_env() -> miette::Result { let path = home_path() .map(|path| path.join("envs")) .ok_or_else(|| miette::miette!("Could not get home path"))?; - tokio::fs::create_dir_all(&path).await.into_diagnostic()?; + tokio::fs::create_dir_all(&path) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Couldn't create directory {}", path.display()))?; Ok(Self(path)) } @@ -140,12 +153,9 @@ impl EnvRoot { &self.0 } - /// Delete environments that are not listed - pub(crate) async fn prune( - &self, - environments: impl IntoIterator, - ) -> miette::Result<()> { - let env_set: ahash::HashSet = environments.into_iter().collect(); + /// Get all directories in the env root + pub(crate) async fn directories(&self) -> miette::Result> { + let mut directories = Vec::new(); let mut entries = tokio::fs::read_dir(&self.path()) .await .into_diagnostic() @@ -154,25 +164,39 @@ impl EnvRoot { while let Some(entry) = entries.next_entry().await.into_diagnostic()? { let path = entry.path(); if path.is_dir() { - let Some(Ok(env_name)) = path - .file_name() - .and_then(|name| name.to_str()) - .map(|name| name.parse()) - else { - continue; - }; - if !env_set.contains(&env_name) { - tokio::fs::remove_dir_all(&path) - .await - .into_diagnostic() - .wrap_err_with(|| { - format!("Could not remove directory {}", path.display()) - })?; - eprintln!( - "{} Remove environment '{env_name}'", - console::style(console::Emoji("✔", " ")).green() - ); - } + directories.push(path); + } + } + + Ok(directories) + } + + /// Delete environments that are not listed + pub(crate) async fn prune( + &self, + environments_to_keep: impl IntoIterator, + ) -> miette::Result<()> { + let env_set: ahash::HashSet = environments_to_keep.into_iter().collect(); + + for env_path in self.directories().await? { + let Some(Ok(env_name)) = env_path + .file_name() + .and_then(|name| name.to_str()) + .map(|name| name.parse()) + else { + continue; + }; + if !env_set.contains(&env_name) { + tokio::fs::remove_dir_all(&env_path) + .await + .into_diagnostic() + .wrap_err_with(|| { + format!("Could not remove directory {}", env_path.display()) + })?; + eprintln!( + "{} Remove environment '{env_name}'", + console::style(console::Emoji("✔", " ")).green() + ); } } @@ -180,14 +204,14 @@ impl EnvRoot { } } -/// Global binary environments directory +/// A global environment directory pub(crate) struct EnvDir { root: EnvRoot, path: PathBuf, } impl EnvDir { - /// Create the Binary Environment directory + /// Create a global environment directory pub(crate) async fn new( root: EnvRoot, environment_name: EnvironmentName, @@ -198,6 +222,22 @@ impl EnvDir { Ok(Self { root, path }) } + /// Initialize a global environment directory from an existing path + pub(crate) fn try_from_existing( + root: EnvRoot, + environment_name: EnvironmentName, + ) -> miette::Result { + let path = root.path().join(environment_name.as_str()); + if !path.is_dir() { + return Err(miette::miette!( + "Directory does not exist: {}", + path.display() + )); + } + + Ok(Self { root, path }) + } + /// Construct the path to the env directory for the environment /// `environment_name`. pub(crate) fn path(&self) -> &Path { @@ -235,6 +275,26 @@ pub(crate) async fn find_designated_package( .ok_or_else(|| miette::miette!("could not find {} in prefix", package_name.as_source())) } +/// Checks if a file is binary by reading the first 1024 bytes and checking for null bytes. +pub(crate) fn is_binary(file_path: impl AsRef) -> miette::Result { + let mut file = std::fs::File::open(&file_path) + .into_diagnostic() + .wrap_err_with(|| format!("Could not open {}", &file_path.as_ref().display()))?; + let mut buffer = [0; 1024]; + let bytes_read = file + .read(&mut buffer) + .into_diagnostic() + .wrap_err_with(|| format!("Could not read {}", &file_path.as_ref().display()))?; + + Ok(buffer[..bytes_read].contains(&0)) +} + +/// Checks if given path points to a text file by calling `is_binary`. +/// If that returns `false`, then it is a text file and vice-versa. +pub(crate) fn is_text(file_path: impl AsRef) -> miette::Result { + Ok(!is_binary(file_path)?) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/global/install.rs b/src/global/install.rs index 3a8144edc..7c5587e76 100644 --- a/src/global/install.rs +++ b/src/global/install.rs @@ -1,9 +1,10 @@ -use crate::global; use std::{ + borrow::Borrow, collections::HashMap, ffi::OsStr, iter, path::{Path, PathBuf}, + str::FromStr, time, }; @@ -12,57 +13,99 @@ use distribution_types::Diagnostic; use indexmap::IndexMap; use itertools::Itertools; use miette::{bail, Context, IntoDiagnostic}; +use pixi_config::{self, default_channel_config, Config, ConfigCli}; +use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; use pixi_utils::reqwest::build_reqwest_clients; use rattler::{ install::{DefaultProgressFormatter, IndicatifReporter, Installer}, package_cache::PackageCache, }; use rattler_conda_types::{ - GenericVirtualPackage, MatchSpec, PackageName, Platform, PrefixRecord, RepoDataRecord, + GenericVirtualPackage, MatchSpec, Matches, PackageName, ParseStrictness, Platform, + PrefixRecord, RepoDataRecord, }; +use rattler_repodata_gateway::Gateway; use rattler_shell::{ activation::{ActivationVariables, Activator, PathModificationBehavior}, shell::{Shell, ShellEnum}, }; use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; -use rattler_virtual_packages::VirtualPackage; -use rattler_virtual_packages::VirtualPackageOverrides; +use rattler_virtual_packages::{VirtualPackage, VirtualPackageOverrides}; use reqwest_middleware::ClientWithMiddleware; +use super::{common::EnvRoot, project::ParsedEnvironment, EnvironmentName, ExposedKey}; use crate::{ - cli::cli_config::ChannelsConfig, cli::has_specs::HasSpecs, prefix::Prefix, + cli::{cli_config::ChannelsConfig, has_specs::HasSpecs, project::platform}, + global::{self, channel_name_from_prefix, find_designated_package, BinDir, EnvDir}, + prefix::Prefix, rlimit::try_increase_rlimit_to_sensible, -}; -use crate::{ - global::{channel_name_from_prefix, find_designated_package, BinDir, EnvDir}, task::ExecutableTask, }; -use pixi_config::{self, default_channel_config, Config, ConfigCli}; -use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; - -use super::{common::EnvRoot, EnvironmentName, ExposedKey}; /// Installs global environment records pub(crate) async fn install_environment( - environment_name: &EnvironmentName, - exposed: &IndexMap, - packages: Vec, - records: Vec, + specs: &IndexMap, + env_name: &EnvironmentName, + parsed_environment: &ParsedEnvironment, authenticated_client: ClientWithMiddleware, - platform: Platform, - bin_dir: &BinDir, + prefix: &Prefix, + config: &Config, + gateway: &Gateway, ) -> miette::Result<()> { - try_increase_rlimit_to_sensible(); + let channels = parsed_environment + .channels() + .into_iter() + .map(|channel| channel.clone().into_channel(config.global_channel_config())) + .collect_vec(); - // Create the binary environment prefix where we install or update the package - let env_root = EnvRoot::from_env().await?; - let bin_env_dir = EnvDir::new(env_root, environment_name.clone()).await?; - let prefix = Prefix::new(bin_env_dir.path()); + let platform = parsed_environment + .platform() + .unwrap_or_else(Platform::current); + + let repodata = await_in_progress("querying repodata ", |_| async { + gateway + .query( + channels, + [platform, Platform::NoArch], + specs.values().cloned().collect_vec(), + ) + .recursive(true) + .await + .into_diagnostic() + }) + .await?; + + // Determine virtual packages of the current platform + let virtual_packages = VirtualPackage::detect(&VirtualPackageOverrides::default()) + .into_diagnostic() + .context("failed to determine virtual packages")? + .iter() + .cloned() + .map(GenericVirtualPackage::from) + .collect(); + + // Solve the environment + let solver_specs = specs.clone(); + let solved_records = tokio::task::spawn_blocking(move || { + wrap_in_progress("solving environment", move || { + Solver.solve(SolverTask { + specs: solver_specs.values().cloned().collect_vec(), + virtual_packages, + ..SolverTask::from_iter(&repodata) + }) + }) + .into_diagnostic() + .context("failed to solve environment") + }) + .await + .into_diagnostic()??; + + try_increase_rlimit_to_sensible(); // Install the environment let package_cache = PackageCache::new(pixi_config::get_cache_dir()?.join("pkgs")); - let result = await_in_progress("creating virtual environment", |pb| { + await_in_progress("creating virtual environment", |pb| { Installer::new() .with_download_client(authenticated_client) .with_io_concurrency_limit(100) @@ -77,11 +120,21 @@ pub(crate) async fn install_environment( .clear_when_done(true) .finish(), ) - .install(prefix.root(), records) + .install(prefix.root(), solved_records) }) .await .into_diagnostic()?; + Ok(()) +} + +pub(crate) async fn expose_executables( + env_name: &EnvironmentName, + parsed_environment: &ParsedEnvironment, + packages: Vec, + prefix: &Prefix, + bin_dir: &BinDir, +) -> miette::Result { // Determine the shell to use for the invocation script let shell: ShellEnum = if cfg!(windows) { rattler_shell::shell::CmdExe.into() @@ -92,45 +145,42 @@ pub(crate) async fn install_environment( // Construct the reusable activation script for the shell and generate an // invocation script for each executable added by the package to the // environment. - let activation_script = create_activation_script(&prefix, shell.clone())?; + let activation_script = create_activation_script(prefix, shell.clone())?; let prefix_records = prefix.find_installed_packages(None).await?; - /// Processes prefix records to filter and collect executable files. - /// It performs the following steps: - /// 1. Filters records to only include direct dependencies - /// 2. Finds executables for each filtered record. - /// 3. Maps executables to a tuple of file name (as a string) and file path. - /// 4. Filters tuples to include only those whose names are in the `exposed` values. - /// 5. Collects the resulting tuples into a vector of executables. + // Processes prefix records to filter and collect executable files. let executables: Vec<(String, PathBuf)> = prefix_records .into_iter() + // Filters records to only include direct dependencies .filter(|record| packages.contains(&record.repodata_record.package_record.name)) - .flat_map(|record| find_executables(&prefix, record)) + // Finds executables for each filtered record. + .flat_map(|record| global::find_executables(prefix, &record)) + // Maps executables to a tuple of file name (as a string) and file path. .filter_map(|path| { - path.file_name() - .and_then(|name| name.to_str()) + path.file_stem() + .and_then(OsStr::to_str) .map(|name| (name.to_string(), path.clone())) }) - .filter(|(name, path)| exposed.values().contains(&name)) + // Filters tuples to include only those whose names are in the `exposed` values + .filter(|(name, path)| parsed_environment.exposed.values().contains(&name)) .collect(); - let script_mapping = exposed - .into_iter() + let script_mapping = parsed_environment + .exposed + .iter() .map(|(exposed_name, entry_point)| { script_exec_mapping( exposed_name, entry_point, executables.clone(), bin_dir, - environment_name, + env_name, ) }) .collect::>>()?; - create_executable_scripts(&script_mapping, &prefix, &shell, activation_script).await?; - - Ok(()) + create_executable_scripts(&script_mapping, prefix, &shell, activation_script).await } /// Maps an entry point in the environment to a concrete `ScriptExecMapping`. @@ -140,12 +190,13 @@ pub(crate) async fn install_environment( /// the original executable. /// # Returns /// -/// A `miette::Result` containing the `ScriptExecMapping` if the entry point is found, -/// or an error if it is not. +/// A `miette::Result` containing the `ScriptExecMapping` if the entry point is +/// found, or an error if it is not. /// /// # Errors /// -/// Returns an error if the entry point is not found in the list of executable names. +/// Returns an error if the entry point is not found in the list of executable +/// names. fn script_exec_mapping( exposed_name: &ExposedKey, entry_point: &str, @@ -185,55 +236,14 @@ fn create_activation_script(prefix: &Prefix, shell: ShellEnum) -> miette::Result Ok(script) } -/// Mapping from the global script location to an executable in a package environment . +/// Mapping from the global script location to an executable in a package +/// environment . #[derive(Debug)] pub struct ScriptExecMapping { pub global_script_path: PathBuf, pub original_executable: PathBuf, } -/// Find the executable scripts within the specified package installed in this -/// conda prefix. -fn find_executables(prefix: &Prefix, prefix_package: PrefixRecord) -> Vec { - prefix_package - .files - .into_iter() - .filter(|relative_path| is_executable(prefix, relative_path)) - .collect() -} - -fn is_executable(prefix: &Prefix, relative_path: &Path) -> bool { - // Check if the file is in a known executable directory. - let binary_folders = if cfg!(windows) { - &([ - "", - "Library/mingw-w64/bin/", - "Library/usr/bin/", - "Library/bin/", - "Scripts/", - "bin/", - ][..]) - } else { - &(["bin"][..]) - }; - - let parent_folder = match relative_path.parent() { - Some(dir) => dir, - None => return false, - }; - - if !binary_folders - .iter() - .any(|bin_path| Path::new(bin_path) == parent_folder) - { - return false; - } - - // Check if the file is executable - let absolute_path = prefix.root().join(relative_path); - is_executable::is_executable(absolute_path) -} - /// Returns the string to add for all arguments passed to the script fn get_catch_all_arg(shell: &ShellEnum) -> &str { match shell { @@ -244,7 +254,7 @@ fn get_catch_all_arg(shell: &ShellEnum) -> &str { } /// For each executable provided, map it to the installation path for its global -/// binary script. +/// executable script. async fn map_executables_to_global_bin_scripts( package_executables: impl IntoIterator, bin_dir: &BinDir, @@ -305,12 +315,21 @@ async fn map_executables_to_global_bin_scripts( /// Create the executable scripts by modifying the activation script /// to activate the environment and run the executable. +/// +/// Returns true if a change was made. async fn create_executable_scripts( mapped_executables: &[ScriptExecMapping], prefix: &Prefix, shell: &ShellEnum, activation_script: String, -) -> miette::Result<()> { +) -> miette::Result { + let mut changed = false; + enum AddedOrChanged { + Unchanged, + Added, + Changed, + } + for ScriptExecMapping { global_script_path, original_executable, @@ -335,12 +354,31 @@ async fn create_executable_scripts( // wrap the script contents in `@echo off` and `setlocal` to prevent echoing the // script and to prevent leaking environment variables into the // parent shell (e.g. PATH would grow longer and longer) - script = format!("@echo off\nsetlocal\n{}\nendlocal", script); + script = format!( + "@echo off\nsetlocal\n{}\nset exitcode=%ERRORLEVEL%\nendlocal\nexit %exitcode%", + script.trim() + ); } - tokio::fs::write(&global_script_path, script) - .await - .into_diagnostic()?; + let added_or_changed = if global_script_path.exists() { + match tokio::fs::read_to_string(global_script_path).await { + Ok(previous_script) if previous_script != script => AddedOrChanged::Changed, + Ok(_) => AddedOrChanged::Unchanged, + Err(_) => AddedOrChanged::Changed, + } + } else { + AddedOrChanged::Added + }; + + if matches!( + added_or_changed, + AddedOrChanged::Changed | AddedOrChanged::Added + ) { + tokio::fs::write(&global_script_path, script) + .await + .into_diagnostic()?; + changed = true; + } #[cfg(unix)] { @@ -348,8 +386,26 @@ async fn create_executable_scripts( std::fs::set_permissions(global_script_path, std::fs::Permissions::from_mode(0o755)) .into_diagnostic()?; } + + let executable_name = global_script_path + .file_stem() + .and_then(OsStr::to_str) + .expect("must always have at least a name"); + match added_or_changed { + AddedOrChanged::Unchanged => {} + AddedOrChanged::Added => eprintln!( + "{}Added executable '{}'.", + console::style(console::Emoji("✔ ", "")).green(), + executable_name + ), + AddedOrChanged::Changed => eprintln!( + "{}Updated executable '{}'.", + console::style(console::Emoji("~ ", "")).yellow(), + executable_name + ), + } } - Ok(()) + Ok(changed) } /// Warn user on dangerous package installations, interactive yes no prompt @@ -384,14 +440,20 @@ pub(crate) fn prompt_user_to_continue( Ok(true) } -pub(crate) async fn sync( - env_root: &EnvRoot, - project: &global::Project, - bin_dir: &BinDir, - config: &Config, - gateway: &rattler_repodata_gateway::Gateway, - auth_client: &reqwest_middleware::ClientWithMiddleware, -) -> Result<(), miette::Error> { +pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette::Error> { + // Create directories + let bin_dir = BinDir::from_env().await?; + let env_root = EnvRoot::from_env().await?; + + let project = global::Project::discover_or_create(&bin_dir, &env_root, assume_yes) + .await? + .with_cli_config(config.clone()); + + // Fetch the repodata + let (_, auth_client) = build_reqwest_clients(Some(config)); + + let gateway = config.gateway(auth_client.clone()); + // Prune environments that are not listed env_root .prune(project.environments().keys().cloned()) @@ -419,14 +481,14 @@ pub(crate) async fn sync( .into_diagnostic() .wrap_err_with(|| format!("Could not remove {}", &file.display()))?; eprintln!( - "{} Remove binary '{file_name}'.", - console::style(console::Emoji("✔", " ")).green() + "{}Remove executable '{file_name}'.", + console::style(console::Emoji("✔ ", "")).green() ); } } - for (environment_name, environment) in project.environments() { - let specs = environment + for (env_name, parsed_environment) in project.environments() { + let specs = parsed_environment .dependencies .clone() .into_iter() @@ -444,63 +506,118 @@ pub(crate) async fn sync( }) .collect::, miette::Report>>()?; - let channels = environment - .channels() - .into_iter() - .map(|channel| channel.clone().into_channel(config.global_channel_config())) - .collect_vec(); - - let repodata = await_in_progress("querying repodata ", |_| async { - gateway - .query( - channels, - [environment.platform(), Platform::NoArch], - specs.values().cloned().collect_vec(), - ) - .recursive(true) - .await - .into_diagnostic() - }) - .await?; + let env_dir = EnvDir::new(env_root.clone(), env_name.clone()).await?; + let prefix = Prefix::new(env_dir.path()); - // Determine virtual packages of the current platform - let virtual_packages = VirtualPackage::detect(&VirtualPackageOverrides::default()) - .into_diagnostic() - .context("failed to determine virtual packages")? - .iter() - .cloned() - .map(GenericVirtualPackage::from) - .collect(); - - // Solve the environment - let solver_specs = specs.clone(); - let solved_records = tokio::task::spawn_blocking(move || { - wrap_in_progress("solving environment", move || { - Solver.solve(SolverTask { - specs: solver_specs.values().cloned().collect_vec(), - virtual_packages, - ..SolverTask::from_iter(&repodata) - }) - }) - .into_diagnostic() - .context("failed to solve environment") - }) - .await - .into_diagnostic()??; - - let packages = specs.keys().cloned().collect(); - - install_environment( - &environment_name, - &environment.exposed, - packages, - solved_records.clone(), - auth_client.clone(), - environment.platform(), - bin_dir, + let prefix_records = prefix.find_installed_packages(Some(50)).await?; + + if !specs_match_local_environment(&specs, prefix_records, parsed_environment.platform()) { + install_environment( + &specs, + &env_name, + &parsed_environment, + auth_client.clone(), + &prefix, + config, + &gateway, + ) + .await?; + } + + expose_executables( + &env_name, + &parsed_environment, + specs.keys().cloned().collect(), + &prefix, + &bin_dir, ) .await?; } Ok(()) } + +/// Checks if the local environment matches the given specifications. +/// +/// This function verifies that all the given specifications are present in the +/// local environment's prefix records and that there are no extra entries in +/// the prefix records that do not match any of the specifications. +fn specs_match_local_environment>( + specs: &IndexMap, + prefix_records: Vec, + platform: Option, +) -> bool { + // Check whether all specs in the manifest are present in the installed + // environment + let specs_in_manifest_are_present = specs.iter().all(|(name, spec)| { + prefix_records + .iter() + .any(|record| spec.matches(record.as_ref())) + }); + + if !specs_in_manifest_are_present { + return false; + } + + // Check whether all packages in the installed environment have the correct + // platform + let platform_specs_match_env = prefix_records.iter().all(|record| { + let Ok(package_platform) = Platform::from_str(&record.as_ref().package_record.subdir) + else { + return true; + }; + + match package_platform { + Platform::NoArch => true, + p if Some(p) == platform => true, + _ => false, + } + }); + + if !platform_specs_match_env { + return false; + } + + fn prune_dependencies>( + mut remaining_prefix_records: Vec, + matched_record: &T, + ) -> Vec { + let mut work_queue = Vec::from([matched_record.as_ref().clone()]); + + while let Some(current_record) = work_queue.pop() { + let dependencies = ¤t_record.as_ref().depends; + for dependency in dependencies { + let Ok(match_spec) = MatchSpec::from_str(dependency, ParseStrictness::Lenient) + else { + continue; + }; + let Some(index) = remaining_prefix_records + .iter() + .position(|record| match_spec.matches(&record.as_ref().package_record)) + else { + continue; + }; + + let matched_record = remaining_prefix_records.remove(index).as_ref().clone(); + work_queue.push(matched_record); + } + } + + remaining_prefix_records + } + + // Process each spec and remove matched entries and their dependencies + let remaining_prefix_records = specs.iter().fold(prefix_records, |mut acc, (name, spec)| { + let Some(index) = acc.iter().position(|record| { + record.as_ref().package_record.name == *name && spec.matches(record.as_ref()) + }) else { + return acc; + }; + let matched_record = acc.swap_remove(index); + prune_dependencies(acc, &matched_record) + }); + + // If there are no remaining prefix records, then this means that + // the environment doesn't contain records that don't match the manifest + remaining_prefix_records.is_empty() +} diff --git a/src/global/mod.rs b/src/global/mod.rs index 34bfb8a47..764fe402b 100644 --- a/src/global/mod.rs +++ b/src/global/mod.rs @@ -5,8 +5,54 @@ mod common; mod install; mod project; +use crate::prefix::Prefix; pub(crate) use common::{ channel_name_from_prefix, find_designated_package, BinDir, EnvDir, EnvRoot, }; pub(crate) use install::sync; pub(crate) use project::{EnvironmentName, ExposedKey, Project, MANIFEST_DEFAULT_NAME}; +use rattler_conda_types::PrefixRecord; +use std::path::{Path, PathBuf}; + +/// Find the executable scripts within the specified package installed in this +/// conda prefix. +fn find_executables(prefix: &Prefix, prefix_package: &PrefixRecord) -> Vec { + prefix_package + .files + .iter() + .filter(|&relative_path| is_executable(prefix, relative_path)) + .cloned() + .collect() +} + +fn is_executable(prefix: &Prefix, relative_path: &Path) -> bool { + // Check if the file is in a known executable directory. + let binary_folders = if cfg!(windows) { + &([ + "", + "Library/mingw-w64/bin/", + "Library/usr/bin/", + "Library/bin/", + "Scripts/", + "bin/", + ][..]) + } else { + &(["bin"][..]) + }; + + let parent_folder = match relative_path.parent() { + Some(dir) => dir, + None => return false, + }; + + if !binary_folders + .iter() + .any(|bin_path| Path::new(bin_path) == parent_folder) + { + return false; + } + + // Check if the file is executable + let absolute_path = prefix.root().join(relative_path); + is_executable::is_executable(absolute_path) +} diff --git a/src/global/project/environment.rs b/src/global/project/environment.rs index 826a9e1e5..4dc887292 100644 --- a/src/global/project/environment.rs +++ b/src/global/project/environment.rs @@ -5,11 +5,11 @@ use miette::Diagnostic; use pixi_spec::PixiSpec; use rattler_conda_types::PackageName; use regex::Regex; -use serde::{self, Deserialize, Deserializer}; +use serde::{self, Deserialize, Deserializer, Serialize}; use thiserror::Error; /// Represents the name of an environment. -#[derive(Debug, Clone, Eq, PartialEq, Hash)] +#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize)] pub(crate) struct EnvironmentName(String); impl EnvironmentName { diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index 5a503bbcd..35789a7f6 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -1,21 +1,35 @@ use std::{ env, - fmt::Formatter, - fs, + ffi::OsStr, + fmt::{Debug, Formatter}, path::{Path, PathBuf}, + str::FromStr, sync::OnceLock, }; pub(crate) use environment::EnvironmentName; use indexmap::IndexMap; +use itertools::Itertools; use manifest::Manifest; -use miette::IntoDiagnostic; -pub(crate) use parsed_manifest::ExposedKey; -use parsed_manifest::ParsedEnvironment; -use pixi_config::Config; +use miette::{miette, Context, IntoDiagnostic}; +use once_cell::sync::Lazy; +use parsed_manifest::ParsedManifest; +pub(crate) use parsed_manifest::{ExposedKey, ParsedEnvironment}; +use pixi_config::{default_channel_config, home_path, Config}; +use pixi_manifest::PrioritizedChannel; +use rattler_conda_types::{Channel, NamedChannelOrUrl, PackageName, Platform, PrefixRecord}; +use rattler_digest::digest::typenum::Exp; use rattler_repodata_gateway::Gateway; +use regex::Regex; use reqwest_middleware::ClientWithMiddleware; -use std::fmt::Debug; +use tokio_stream::{wrappers::ReadDirStream, StreamExt}; +use url::Url; + +use super::{find_executables, BinDir, EnvRoot}; +use crate::{ + global::{common::is_text, EnvDir}, + prefix::Prefix, +}; mod document; mod environment; @@ -25,10 +39,10 @@ mod parsed_manifest; pub(crate) const MANIFEST_DEFAULT_NAME: &str = "pixi-global.toml"; -/// The pixi global project, this main struct to interact with the pixi global project. -/// This struct holds the `Manifest` and has functions to modify -/// or request information from it. This allows in the future to have multiple manifests -/// linked to a pixi global project. +/// The pixi global project, this main struct to interact with the pixi global +/// project. This struct holds the `Manifest` and has functions to modify +/// or request information from it. This allows in the future to have multiple +/// manifests linked to a pixi global project. #[derive(Clone)] pub struct Project { /// Root folder of the project @@ -54,6 +68,179 @@ impl Debug for Project { } } +/// Intermediate struct to store all the binaries that are exposed. +#[derive(Debug)] +struct ExposedData { + env_name: EnvironmentName, + platform: Option, + channel: PrioritizedChannel, + package: PackageName, + exposed: ExposedKey, + executable_name: String, +} + +impl ExposedData { + /// Constructs an `ExposedData` instance from a exposed script path. + /// + /// This function extracts metadata from the exposed script path, including the + /// environment name, platform, channel, and package information, by reading + /// the associated `conda-meta` directory. + pub async fn from_exposed_path(path: &Path, env_root: &EnvRoot) -> miette::Result { + let exposed = path + .file_stem() + .and_then(OsStr::to_str) + .ok_or_else(|| miette::miette!("Could not get file stem of {}", path.display())) + .and_then(ExposedKey::from_str)?; + let executable_path = extract_executable_from_script(path)?; + + let executable = executable_path + .file_stem() + .and_then(OsStr::to_str) + .map(String::from) + .ok_or_else(|| miette::miette!("Could not get file stem of {}", path.display()))?; + + let env_path = determine_env_path(&executable_path, env_root.path())?; + let env_name = env_path + .file_name() + .and_then(OsStr::to_str) + .ok_or_else(|| { + miette::miette!( + "executable path's grandparent '{}' has no file name", + executable_path.display() + ) + }) + .and_then(|env| EnvironmentName::from_str(env).into_diagnostic())?; + + let conda_meta = env_path.join("conda-meta"); + + let bin_env_dir = EnvDir::new(env_root.clone(), env_name.clone()).await?; + let prefix = Prefix::new(bin_env_dir.path()); + + let (platform, channel, package) = + package_from_conda_meta(&conda_meta, &executable, &prefix).await?; + + Ok(ExposedData { + env_name, + platform, + channel, + package, + executable_name: executable, + exposed, + }) + } +} + +/// Extracts the executable path from a script file. +/// +/// This function reads the content of the script file and attempts to extract +/// the path of the executable it references. It is used to determine +/// the actual binary path from a wrapper script. +fn extract_executable_from_script(script: &Path) -> miette::Result { + // Read the script file into a string + let script_content = std::fs::read_to_string(script) + .into_diagnostic() + .wrap_err_with(|| format!("Could not read {}", script.display()))?; + + // Compile the regex pattern + #[cfg(unix)] + const PATTERN: &str = r#""([^"]+)" "\$@""#; + #[cfg(windows)] + const PATTERN: &str = r#"@"([^"]+)" %/*"#; + static RE: Lazy = Lazy::new(|| Regex::new(PATTERN).expect("Failed to compile regex")); + + // Apply the regex to the script content + if let Some(caps) = RE.captures(&script_content) { + if let Some(matched) = caps.get(1) { + return Ok(PathBuf::from(matched.as_str())); + } + } + + // Return an error if the executable path could not be extracted + miette::bail!( + "Failed to extract executable path from script {}", + script.display() + ) +} + +fn determine_env_path(executable_path: &Path, env_root: &Path) -> miette::Result { + let mut current_path = executable_path; + + while let Some(parent) = current_path.parent() { + if parent == env_root { + return Ok(current_path.to_owned()); + } + current_path = parent; + } + + miette::bail!( + "Couldn't determine environment path: no parent of '{}' has '{}' as its direct parent", + executable_path.display(), + env_root.display() + ) +} + +/// Extracts package metadata from the `conda-meta` directory for a given executable. +/// +/// This function reads the `conda-meta` directory to find the package metadata +/// associated with the specified executable. It returns the platform, channel, and +/// package name of the executable. +async fn package_from_conda_meta( + conda_meta: &Path, + executable: &str, + prefix: &Prefix, +) -> miette::Result<(Option, PrioritizedChannel, PackageName)> { + let channel_config = default_channel_config(); + + let read_dir = tokio::fs::read_dir(conda_meta) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Couldn't read directory {}", conda_meta.display()))?; + let mut entries = ReadDirStream::new(read_dir); + + while let Some(entry) = entries.next().await { + let path = entry + .into_diagnostic() + .wrap_err_with(|| { + format!("Couldn't read file from directory {}", conda_meta.display()) + })? + .path(); + // Check if the entry is a file and has a .json extension + if path.is_file() && path.extension().and_then(OsStr::to_str) == Some("json") { + let content = std::fs::read_to_string(&path).into_diagnostic()?; + let prefix_record = PrefixRecord::from_path(&path) + .into_diagnostic() + .wrap_err_with(|| format!("Could not parse json from {}", path.display()))?; + + let binaries = find_executables(prefix, &prefix_record); + let Some(found_executable) = binaries + .iter() + .find(|exe_path| exe_path.file_stem().and_then(OsStr::to_str) == Some(executable)) + else { + continue; + }; + + let platform = + match Platform::from_str(&prefix_record.repodata_record.package_record.subdir) { + Ok(Platform::NoArch) => None, + Ok(platform) if platform == Platform::current() => None, + Err(_) => None, + Ok(p) => Some(p), + }; + + let channel: PrioritizedChannel = + NamedChannelOrUrl::from_str(&prefix_record.repodata_record.channel) + .into_diagnostic()? + .into(); + + let name = prefix_record.repodata_record.package_record.name; + + return Ok((platform, channel, name)); + } + } + + miette::bail!("Could not find {executable} in {}", conda_meta.display()) +} + impl Project { /// Constructs a new instance from an internal manifest representation fn from_manifest(manifest: Manifest) -> Self { @@ -80,31 +267,93 @@ impl Project { Ok(Self::from_manifest(manifest)) } - /// Discovers the project manifest file in path set by `PIXI_GLOBAL_MANIFESTS` - /// or alternatively at `~/.pixi/manifests/pixi-global.toml`. - /// If the manifest doesn't exist yet, and empty one will be created. - pub(crate) fn discover() -> miette::Result { + /// Discovers the project manifest file in path at + /// `~/.pixi/manifests/pixi-global.toml`. If the manifest doesn't exist + /// yet, and the function will try to create one from the existing + /// installation. If that one fails, an empty one will be created. + pub(crate) async fn discover_or_create( + bin_dir: &BinDir, + env_root: &EnvRoot, + assume_yes: bool, + ) -> miette::Result { let manifest_dir = Self::manifest_dir()?; - fs::create_dir_all(&manifest_dir).into_diagnostic()?; + tokio::fs::create_dir_all(&manifest_dir) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Couldn't create directory {}", manifest_dir.display()))?; let manifest_path = manifest_dir.join(MANIFEST_DEFAULT_NAME); if !manifest_path.exists() { - fs::File::create(&manifest_path).into_diagnostic()?; + let warn = console::style(console::Emoji("⚠️ ", "")).yellow(); + let prompt = format!( + "{} You don't have a global manifest yet.\n\ + Do you want to create one based on your existing installation?\n\ + Your existing installation will be removed if you decide against it.", + console::style(console::Emoji("⚠️ ", "")).yellow(), + ); + if !env_root.directories().await?.is_empty() + && (assume_yes + || dialoguer::Confirm::new() + .with_prompt(prompt) + .default(true) + .show_default(true) + .interact() + .into_diagnostic()?) + { + return Self::try_from_existing_installation(&manifest_path, bin_dir, env_root) + .await + .wrap_err_with(|| { + "Failed to create global manifest from existing installation" + }); + } + + tokio::fs::File::create(&manifest_path) + .await + .into_diagnostic() + .wrap_err_with(|| format!("Couldn't create file {}", manifest_path.display()))?; } + Self::from_path(&manifest_path) } + async fn try_from_existing_installation( + manifest_path: &Path, + bin_dir: &BinDir, + env_root: &EnvRoot, + ) -> miette::Result { + let futures = bin_dir + .files() + .await? + .into_iter() + .filter_map(|path| match is_text(&path) { + Ok(true) => Some(Ok(path)), // Success and is text, continue with path + Ok(false) => None, // Success and isn't text, filter out + Err(e) => Some(Err(e)), // Failure, continue with error + }) + .map(|result| async move { + match result { + Ok(path) => ExposedData::from_exposed_path(&path, env_root).await, + Err(e) => Err(e), + } + }); + + let exposed_binaries: Vec = futures::future::try_join_all(futures).await?; + + let parsed_manifest = ParsedManifest::from(exposed_binaries); + let toml = toml_edit::ser::to_string_pretty(&parsed_manifest).into_diagnostic()?; + tokio::fs::write(&manifest_path, &toml) + .await + .into_diagnostic()?; + Self::from_str(manifest_path, &toml) + } + /// Get default dir for the pixi global manifest pub(crate) fn manifest_dir() -> miette::Result { - env::var("PIXI_GLOBAL_MANIFESTS") - .map(PathBuf::from) - .or_else(|_| { - dirs::home_dir() - .map(|dir| dir.join(".pixi/manifests")) - .ok_or_else(|| miette::miette!("Could not get home directory")) - }) + home_path() + .map(|dir| dir.join("manifests")) + .ok_or_else(|| miette::miette!("Could not get home directory")) } /// Loads a project from manifest file. @@ -132,9 +381,10 @@ impl Project { mod tests { use std::io::Write; - use super::*; use fake::{faker::filesystem::zh_tw::FilePath, Fake}; + use super::*; + const SIMPLE_MANIFEST: &str = r#" [envs.python] channels = ["conda-forge"] @@ -158,7 +408,7 @@ mod tests { let manifest_path = tempdir.path().join(MANIFEST_DEFAULT_NAME); // Create and write global manifest - let mut file = fs::File::create(&manifest_path).unwrap(); + let mut file = std::fs::File::create(&manifest_path).unwrap(); file.write_all(SIMPLE_MANIFEST.as_bytes()).unwrap(); let project = Project::from_path(&manifest_path).unwrap(); @@ -169,18 +419,6 @@ mod tests { assert_eq!(canonical_root, canonical_manifest_parent); } - #[test] - fn test_project_discover() { - let tempdir = tempfile::tempdir().unwrap(); - let manifest_dir = tempdir.path(); - env::set_var("PIXI_GLOBAL_MANIFESTS", manifest_dir); - let project = Project::discover().unwrap(); - assert!(project.manifest.path.exists()); - let expected_manifest_path = - dunce::canonicalize(manifest_dir.join(MANIFEST_DEFAULT_NAME)).unwrap(); - assert_eq!(project.manifest.path, expected_manifest_path) - } - #[test] fn test_project_from_manifest() { let manifest_path: PathBuf = FilePath().fake(); diff --git a/src/global/project/parsed_manifest.rs b/src/global/project/parsed_manifest.rs index 22b532df6..454a12221 100644 --- a/src/global/project/parsed_manifest.rs +++ b/src/global/project/parsed_manifest.rs @@ -1,23 +1,55 @@ use std::fmt; +use std::path::PathBuf; use std::str::FromStr; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; +use miette::IntoDiagnostic; use pixi_manifest::PrioritizedChannel; use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform}; use serde::de::{Deserialize, DeserializeSeed, Deserializer, MapAccess, Visitor}; +use serde::Serialize; use serde_with::{serde_as, serde_derive::Deserialize}; use super::environment::EnvironmentName; use super::error::ManifestError; +use super::ExposedData; use pixi_spec::PixiSpec; /// Describes the contents of a parsed global project manifest. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize)] pub struct ParsedManifest { /// The environments the project can create. - environments: IndexMap, + envs: IndexMap, +} + +impl From for ParsedManifest +where + I: IntoIterator, +{ + fn from(value: I) -> Self { + let mut envs: IndexMap = IndexMap::new(); + for data in value { + let ExposedData { + env_name, + platform, + channel, + package, + executable_name, + exposed, + } = data; + let mut parsed_environment = envs.entry(env_name).or_default(); + parsed_environment.channels.insert(channel); + parsed_environment.platform = platform; + parsed_environment + .dependencies + .insert(package, PixiSpec::default()); + parsed_environment.exposed.insert(exposed, executable_name); + } + + Self { envs } + } } impl ParsedManifest { @@ -27,7 +59,7 @@ impl ParsedManifest { } pub(crate) fn environments(&self) -> IndexMap { - self.environments.clone() + self.envs.clone() } } @@ -64,13 +96,13 @@ impl<'de> serde::Deserialize<'de> for ParsedManifest { } Ok(Self { - environments: manifest.envs, + envs: manifest.envs, }) } } #[serde_as] -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Serialize, Debug, Clone, Default)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] pub(crate) struct ParsedEnvironment { #[serde_as(as = "IndexSet")] @@ -84,12 +116,8 @@ pub(crate) struct ParsedEnvironment { impl ParsedEnvironment { // If `self.platform` is `None` is not given, the current platform is used - pub(crate) fn platform(&self) -> Platform { - if let Some(platform) = self.platform { - platform - } else { - Platform::current() - } + pub(crate) fn platform(&self) -> Option { + self.platform } /// Returns the channels associated with this collection. @@ -98,7 +126,7 @@ impl ParsedEnvironment { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)] pub(crate) struct ExposedKey(String); impl fmt::Display for ExposedKey { @@ -108,11 +136,11 @@ impl fmt::Display for ExposedKey { } impl FromStr for ExposedKey { - type Err = String; + type Err = miette::Report; fn from_str(value: &str) -> Result { if value == "pixi" { - Err("The key 'pixi' is not allowed in the exposed map".to_string()) + miette::bail!("The key 'pixi' is not allowed in the exposed map"); } else { Ok(ExposedKey(value.to_string())) } diff --git a/tests/integration/common.py b/tests/integration/common.py index 25448ca83..95f8ce518 100644 --- a/tests/integration/common.py +++ b/tests/integration/common.py @@ -1,6 +1,7 @@ from enum import IntEnum from pathlib import Path import subprocess +import os PIXI_VERSION = "0.29.0" @@ -9,6 +10,7 @@ class ExitCode(IntEnum): SUCCESS = 0 FAILURE = 1 INCORRECT_USAGE = 2 + LIFE = 42 def verify_cli_command( @@ -18,8 +20,10 @@ def verify_cli_command( stdout_excludes: str | list[str] | None = None, stderr_contains: str | list[str] | None = None, stderr_excludes: str | list[str] | None = None, + env: dict[str, str] | None = None, ) -> None: - process = subprocess.run(command, capture_output=True, text=True) + complete_env = os.environ if env is None else os.environ | env + process = subprocess.run(command, capture_output=True, text=True, env=complete_env) stdout, stderr, returncode = process.stdout, process.stderr, process.returncode print(f"command: {command}, stdout: {stdout}, stderr: {stderr}, code: {returncode}") if expected_exit_code is not None: diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 34799d8db..08124f87f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -6,3 +6,8 @@ @pytest.fixture def pixi() -> Path: return Path(__file__).parent.joinpath("../../.pixi/target/release/pixi") + + +@pytest.fixture +def test_data() -> Path: + return Path(__file__).parent.joinpath("test_data").resolve() diff --git a/tests/integration/test_global.py b/tests/integration/test_global.py new file mode 100644 index 000000000..a3ba059ab --- /dev/null +++ b/tests/integration/test_global.py @@ -0,0 +1,190 @@ +from pathlib import Path +import tomllib +import tomli_w +from .common import verify_cli_command, ExitCode +import platform + + +def test_global_sync_dependencies(pixi: Path, tmp_path: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + toml = """ + [envs.test] + channels = ["conda-forge"] + [envs.test.dependencies] + python = "3.12" + + [envs.test.exposed] + "python-injected" = "python" + """ + parsed_toml = tomllib.loads(toml) + manifest.write_text(toml) + exposed_exec = "python-injected.bat" if platform.system() == "Windows" else "python-injected" + python_injected = tmp_path / "bin" / exposed_exec + + # Test basic commands + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command( + [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" + ) + verify_cli_command([python_injected, "-c", "import numpy"], ExitCode.FAILURE, env=env) + + # Add numpy + parsed_toml["envs"]["test"]["dependencies"]["numpy"] = "*" + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command([python_injected, "-c", "import numpy"], ExitCode.SUCCESS, env=env) + + # Remove numpy again + del parsed_toml["envs"]["test"]["dependencies"]["numpy"] + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command([python_injected, "-c", "import numpy"], ExitCode.FAILURE, env=env) + + # Remove python + del parsed_toml["envs"]["test"]["dependencies"]["python"] + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command( + [pixi, "global", "sync"], + ExitCode.FAILURE, + env=env, + stderr_contains="Could not find python in test", + ) + + +def test_global_sync_platform(pixi: Path, tmp_path: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + toml = """ + [envs.test] + channels = ["conda-forge"] + platform = "win-64" + [envs.test.dependencies] + binutils = "2.40" + [envs.test.exposed] + """ + parsed_toml = tomllib.loads(toml) + manifest.write_text(toml) + # Exists on win-64 + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + + # Does not exist on osx-64 + parsed_toml["envs"]["test"]["platform"] = "osx-64" + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command( + [pixi, "global", "sync"], + ExitCode.FAILURE, + env=env, + stderr_contains="No candidates were found", + ) + + +def test_global_sync_change_expose(pixi: Path, tmp_path: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + toml = """ + [envs.test] + channels = ["conda-forge"] + [envs.test.dependencies] + python = "3.12" + + [envs.test.exposed] + "python-injected" = "python" + """ + parsed_toml = tomllib.loads(toml) + manifest.write_text(toml) + exposed_exec = "python-injected.bat" if platform.system() == "Windows" else "python-injected" + python_injected = tmp_path / "bin" / exposed_exec + + # Test basic commands + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command( + [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" + ) + verify_cli_command([python_injected], ExitCode.SUCCESS, env=env) + + # Add another expose + python_in_disguise_str = ( + "python-in-disguise.bat" if platform.system() == "Windows" else "python-in-disguise" + ) + python_in_disguise = tmp_path / "bin" / python_in_disguise_str + parsed_toml["envs"]["test"]["exposed"][python_in_disguise_str] = "python" + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command([python_in_disguise, "--version"], ExitCode.SUCCESS, env=env) + + # Remove expose again + del parsed_toml["envs"]["test"]["exposed"][python_in_disguise_str] + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + assert not python_in_disguise.is_file() + + +def test_global_sync_manually_remove_binary(pixi: Path, tmp_path: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + toml = """ + [envs.test] + channels = ["conda-forge"] + [envs.test.dependencies] + python = "3.12" + + [envs.test.exposed] + "python-injected" = "python" + """ + manifest.write_text(toml) + exposed_exec = "python-injected.bat" if platform.system() == "Windows" else "python-injected" + python_injected = tmp_path / "bin" / exposed_exec + + # Test basic commands + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command( + [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" + ) + verify_cli_command([python_injected], ExitCode.SUCCESS, env=env) + + # Remove binary manually + python_injected.unlink() + + # Binary is added again + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + verify_cli_command( + [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" + ) + + +def test_global_sync_migrate(pixi: Path, tmp_path: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + toml = """ + [envs.test] + channels = ["https://conda.anaconda.org/conda-forge"] + [envs.test.dependencies] + ripgrep = "*" + python = "*" + + [envs.test.exposed] + rg = "rg" + grep = "rg" + python = "python" + python3 = "python" + """ + manifest.write_text(toml) + verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) + + # Test migration from existing environments + original_manifest = tomllib.loads(manifest.read_text()) + manifest.unlink() + verify_cli_command([pixi, "global", "sync", "--assume-yes"], ExitCode.SUCCESS, env=env) + migrated_manifest = tomllib.loads(manifest.read_text()) + assert original_manifest == migrated_manifest diff --git a/tests/integration/test_main_cli.py b/tests/integration/test_main_cli.py index be065ddc0..1bb657e50 100644 --- a/tests/integration/test_main_cli.py +++ b/tests/integration/test_main_cli.py @@ -1,5 +1,4 @@ from pathlib import Path - from .common import verify_cli_command, ExitCode, PIXI_VERSION @@ -10,7 +9,7 @@ def test_pixi(pixi: Path) -> None: verify_cli_command([pixi, "--version"], ExitCode.SUCCESS, stdout_contains=PIXI_VERSION) -def test_project_commands(tmp_path: Path, pixi: Path) -> None: +def test_project_commands(pixi: Path, tmp_path: Path) -> None: manifest_path = tmp_path / "pixi.toml" # Create a new project verify_cli_command([pixi, "init", tmp_path], ExitCode.SUCCESS) @@ -164,15 +163,6 @@ def test_project_commands(tmp_path: Path, pixi: Path) -> None: ) -@pytest.mark.xfail(reason="`pixi global install` is not yet implemented", strict=True) -def test_global_install(pixi: Path) -> None: - # Install - verify_cli_command( - [pixi, "global", "install", "rattler-build"], - ExitCode.SUCCESS, - ) - - def test_search(pixi: Path) -> None: verify_cli_command( [pixi, "search", "rattler-build", "-c", "conda-forge"], @@ -186,7 +176,7 @@ def test_search(pixi: Path) -> None: ) -def test_simple_project_setup(tmp_path: Path, pixi: Path) -> None: +def test_simple_project_setup(pixi: Path, tmp_path: Path) -> None: manifest_path = tmp_path / "pixi.toml" # Create a new project verify_cli_command([pixi, "init", tmp_path], ExitCode.SUCCESS) @@ -288,7 +278,7 @@ def test_simple_project_setup(tmp_path: Path, pixi: Path) -> None: ) -def test_pixi_init_pyproject(tmp_path: Path, pixi: Path) -> None: +def test_pixi_init_pyproject(pixi: Path, tmp_path: Path) -> None: manifest_path = tmp_path / "pyproject.toml" # Create a new project verify_cli_command([pixi, "init", tmp_path, "--format", "pyproject"], ExitCode.SUCCESS) From ebda8b142eaf19b37f04b7a77f6eda0ebbcbb4d4 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 20 Sep 2024 08:51:44 +0200 Subject: [PATCH 5/8] fix: Remove `allow(unused` and fix resulting warnings (#2091) --- src/global/common.rs | 70 ++------------------------- src/global/install.rs | 30 ++++-------- src/global/mod.rs | 9 +--- src/global/project/environment.rs | 3 -- src/global/project/manifest.rs | 3 ++ src/global/project/mod.rs | 57 +++++++--------------- src/global/project/parsed_manifest.rs | 8 ++- src/lib.rs | 1 - src/repodata.rs | 10 ---- 9 files changed, 38 insertions(+), 153 deletions(-) delete mode 100644 src/repodata.rs diff --git a/src/global/common.rs b/src/global/common.rs index 6d326ee59..08d985d58 100644 --- a/src/global/common.rs +++ b/src/global/common.rs @@ -5,24 +5,7 @@ use std::{ use itertools::Itertools; use miette::{Context, IntoDiagnostic}; -use pixi_progress::{await_in_progress, global_multi_progress}; -use rattler::{ - install::{DefaultProgressFormatter, IndicatifReporter, Installer}, - package_cache::PackageCache, -}; -use rattler_conda_types::{ - Channel, ChannelConfig, PackageName, Platform, PrefixRecord, RepoDataRecord, -}; -use rattler_shell::{ - activation::{ActivationVariables, Activator, PathModificationBehavior}, - shell::ShellEnum, -}; -use reqwest_middleware::ClientWithMiddleware; -use tokio::io::AsyncReadExt; -use crate::{ - cli::project::environment, prefix::Prefix, repodata, rlimit::try_increase_rlimit_to_sensible, -}; use pixi_config::home_path; use super::{EnvironmentName, ExposedKey}; @@ -84,7 +67,7 @@ impl BinDir { executable_script_path } - pub(crate) async fn print_executables_available( + pub async fn print_executables_available( &self, executables: Vec, ) -> miette::Result<()> { @@ -206,7 +189,6 @@ impl EnvRoot { /// A global environment directory pub(crate) struct EnvDir { - root: EnvRoot, path: PathBuf, } @@ -219,23 +201,7 @@ impl EnvDir { let path = root.path().join(environment_name.as_str()); tokio::fs::create_dir_all(&path).await.into_diagnostic()?; - Ok(Self { root, path }) - } - - /// Initialize a global environment directory from an existing path - pub(crate) fn try_from_existing( - root: EnvRoot, - environment_name: EnvironmentName, - ) -> miette::Result { - let path = root.path().join(environment_name.as_str()); - if !path.is_dir() { - return Err(miette::miette!( - "Directory does not exist: {}", - path.display() - )); - } - - Ok(Self { root, path }) + Ok(Self { path }) } /// Construct the path to the env directory for the environment @@ -245,36 +211,6 @@ impl EnvDir { } } -/// Get the friendly channel name of a [`PrefixRecord`] -/// -/// # Returns -/// -/// The friendly channel name of the given prefix record -pub(crate) fn channel_name_from_prefix( - prefix_package: &PrefixRecord, - channel_config: &ChannelConfig, -) -> String { - Channel::from_str(&prefix_package.repodata_record.channel, channel_config) - .map(|ch| repodata::friendly_channel_name(&ch)) - .unwrap_or_else(|_| prefix_package.repodata_record.channel.clone()) -} - -/// Find the designated package in the given [`Prefix`] -/// -/// # Returns -/// -/// The PrefixRecord of the designated package -pub(crate) async fn find_designated_package( - prefix: &Prefix, - package_name: &PackageName, -) -> miette::Result { - let prefix_records = prefix.find_installed_packages(None).await?; - prefix_records - .into_iter() - .find(|r| r.repodata_record.package_record.name == *package_name) - .ok_or_else(|| miette::miette!("could not find {} in prefix", package_name.as_source())) -} - /// Checks if a file is binary by reading the first 1024 bytes and checking for null bytes. pub(crate) fn is_binary(file_path: impl AsRef) -> miette::Result { let mut file = std::fs::File::open(&file_path) @@ -298,7 +234,7 @@ pub(crate) fn is_text(file_path: impl AsRef) -> miette::Result { #[cfg(test)] mod tests { use super::*; - use std::path::Path; + use tempfile::tempdir; #[tokio::test] diff --git a/src/global/install.rs b/src/global/install.rs index 7c5587e76..54b0bc9b8 100644 --- a/src/global/install.rs +++ b/src/global/install.rs @@ -1,19 +1,9 @@ -use std::{ - borrow::Borrow, - collections::HashMap, - ffi::OsStr, - iter, - path::{Path, PathBuf}, - str::FromStr, - time, -}; +use std::{collections::HashMap, ffi::OsStr, path::PathBuf, str::FromStr}; -use clap::Parser; -use distribution_types::Diagnostic; use indexmap::IndexMap; use itertools::Itertools; -use miette::{bail, Context, IntoDiagnostic}; -use pixi_config::{self, default_channel_config, Config, ConfigCli}; +use miette::{Context, IntoDiagnostic}; +use pixi_config::{self, default_channel_config, Config}; use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; use pixi_utils::reqwest::build_reqwest_clients; use rattler::{ @@ -22,7 +12,7 @@ use rattler::{ }; use rattler_conda_types::{ GenericVirtualPackage, MatchSpec, Matches, PackageName, ParseStrictness, Platform, - PrefixRecord, RepoDataRecord, + RepoDataRecord, }; use rattler_repodata_gateway::Gateway; use rattler_shell::{ @@ -35,17 +25,14 @@ use reqwest_middleware::ClientWithMiddleware; use super::{common::EnvRoot, project::ParsedEnvironment, EnvironmentName, ExposedKey}; use crate::{ - cli::{cli_config::ChannelsConfig, has_specs::HasSpecs, project::platform}, - global::{self, channel_name_from_prefix, find_designated_package, BinDir, EnvDir}, + global::{self, BinDir, EnvDir}, prefix::Prefix, rlimit::try_increase_rlimit_to_sensible, - task::ExecutableTask, }; /// Installs global environment records pub(crate) async fn install_environment( specs: &IndexMap, - env_name: &EnvironmentName, parsed_environment: &ParsedEnvironment, authenticated_client: ClientWithMiddleware, prefix: &Prefix, @@ -163,7 +150,7 @@ pub(crate) async fn expose_executables( .map(|name| (name.to_string(), path.clone())) }) // Filters tuples to include only those whose names are in the `exposed` values - .filter(|(name, path)| parsed_environment.exposed.values().contains(&name)) + .filter(|(name, _)| parsed_environment.exposed.values().contains(&name)) .collect(); let script_mapping = parsed_environment @@ -255,6 +242,7 @@ fn get_catch_all_arg(shell: &ShellEnum) -> &str { /// For each executable provided, map it to the installation path for its global /// executable script. +#[allow(unused)] async fn map_executables_to_global_bin_scripts( package_executables: impl IntoIterator, bin_dir: &BinDir, @@ -409,6 +397,7 @@ async fn create_executable_scripts( } /// Warn user on dangerous package installations, interactive yes no prompt +#[allow(unused)] pub(crate) fn prompt_user_to_continue( packages: &IndexMap, ) -> miette::Result { @@ -514,7 +503,6 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette if !specs_match_local_environment(&specs, prefix_records, parsed_environment.platform()) { install_environment( &specs, - &env_name, &parsed_environment, auth_client.clone(), &prefix, @@ -549,7 +537,7 @@ fn specs_match_local_environment>( ) -> bool { // Check whether all specs in the manifest are present in the installed // environment - let specs_in_manifest_are_present = specs.iter().all(|(name, spec)| { + let specs_in_manifest_are_present = specs.values().all(|spec| { prefix_records .iter() .any(|record| spec.matches(record.as_ref())) diff --git a/src/global/mod.rs b/src/global/mod.rs index 764fe402b..76930498f 100644 --- a/src/global/mod.rs +++ b/src/global/mod.rs @@ -1,16 +1,11 @@ -// TODO: remove this before merging to main -#![allow(unused)] - mod common; mod install; mod project; use crate::prefix::Prefix; -pub(crate) use common::{ - channel_name_from_prefix, find_designated_package, BinDir, EnvDir, EnvRoot, -}; +pub(crate) use common::{BinDir, EnvDir, EnvRoot}; pub(crate) use install::sync; -pub(crate) use project::{EnvironmentName, ExposedKey, Project, MANIFEST_DEFAULT_NAME}; +pub(crate) use project::{EnvironmentName, ExposedKey, Project}; use rattler_conda_types::PrefixRecord; use std::path::{Path, PathBuf}; diff --git a/src/global/project/environment.rs b/src/global/project/environment.rs index 4dc887292..974f5b1c2 100644 --- a/src/global/project/environment.rs +++ b/src/global/project/environment.rs @@ -1,9 +1,6 @@ use std::{fmt, str::FromStr}; -use indexmap::IndexMap; use miette::Diagnostic; -use pixi_spec::PixiSpec; -use rattler_conda_types::PackageName; use regex::Regex; use serde::{self, Deserialize, Deserializer, Serialize}; use thiserror::Error; diff --git a/src/global/project/manifest.rs b/src/global/project/manifest.rs index b8328dcba..90d098d11 100644 --- a/src/global/project/manifest.rs +++ b/src/global/project/manifest.rs @@ -9,6 +9,9 @@ use super::error::ManifestError; use super::MANIFEST_DEFAULT_NAME; use super::{document::ManifestSource, parsed_manifest::ParsedManifest}; +// TODO: remove +#[allow(unused)] + /// Handles the global project's manifest file. /// This struct is responsible for reading, parsing, editing, and saving the /// manifest. It encapsulates all logic related to the manifest's TOML format diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index 35789a7f6..46b078721 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -1,33 +1,26 @@ use std::{ - env, ffi::OsStr, fmt::{Debug, Formatter}, path::{Path, PathBuf}, str::FromStr, - sync::OnceLock, }; pub(crate) use environment::EnvironmentName; use indexmap::IndexMap; -use itertools::Itertools; use manifest::Manifest; -use miette::{miette, Context, IntoDiagnostic}; +use miette::{Context, IntoDiagnostic}; use once_cell::sync::Lazy; use parsed_manifest::ParsedManifest; pub(crate) use parsed_manifest::{ExposedKey, ParsedEnvironment}; -use pixi_config::{default_channel_config, home_path, Config}; +use pixi_config::{home_path, Config}; use pixi_manifest::PrioritizedChannel; -use rattler_conda_types::{Channel, NamedChannelOrUrl, PackageName, Platform, PrefixRecord}; -use rattler_digest::digest::typenum::Exp; -use rattler_repodata_gateway::Gateway; +use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform, PrefixRecord}; use regex::Regex; -use reqwest_middleware::ClientWithMiddleware; use tokio_stream::{wrappers::ReadDirStream, StreamExt}; -use url::Url; -use super::{find_executables, BinDir, EnvRoot}; +use super::{BinDir, EnvRoot}; use crate::{ - global::{common::is_text, EnvDir}, + global::{common::is_text, find_executables, EnvDir}, prefix::Prefix, }; @@ -47,12 +40,6 @@ pub(crate) const MANIFEST_DEFAULT_NAME: &str = "pixi-global.toml"; pub struct Project { /// Root folder of the project root: PathBuf, - /// Reqwest client shared for this project. - /// This is wrapped in a `OnceLock` to allow for lazy initialization. - client: OnceLock<(reqwest::Client, ClientWithMiddleware)>, - /// The repodata gateway to use for answering queries about repodata. - /// This is wrapped in a `OnceLock` to allow for lazy initialization. - repodata_gateway: OnceLock, /// The manifest for the project pub(crate) manifest: Manifest, /// The global configuration as loaded from the config file(s) @@ -189,8 +176,6 @@ async fn package_from_conda_meta( executable: &str, prefix: &Prefix, ) -> miette::Result<(Option, PrioritizedChannel, PackageName)> { - let channel_config = default_channel_config(); - let read_dir = tokio::fs::read_dir(conda_meta) .await .into_diagnostic() @@ -206,35 +191,32 @@ async fn package_from_conda_meta( .path(); // Check if the entry is a file and has a .json extension if path.is_file() && path.extension().and_then(OsStr::to_str) == Some("json") { - let content = std::fs::read_to_string(&path).into_diagnostic()?; let prefix_record = PrefixRecord::from_path(&path) .into_diagnostic() .wrap_err_with(|| format!("Could not parse json from {}", path.display()))?; - let binaries = find_executables(prefix, &prefix_record); - let Some(found_executable) = binaries + if find_executables(prefix, &prefix_record) .iter() - .find(|exe_path| exe_path.file_stem().and_then(OsStr::to_str) == Some(executable)) - else { - continue; - }; - - let platform = - match Platform::from_str(&prefix_record.repodata_record.package_record.subdir) { + .any(|exe_path| exe_path.file_stem().and_then(OsStr::to_str) == Some(executable)) + { + let platform = match Platform::from_str( + &prefix_record.repodata_record.package_record.subdir, + ) { Ok(Platform::NoArch) => None, Ok(platform) if platform == Platform::current() => None, Err(_) => None, Ok(p) => Some(p), }; - let channel: PrioritizedChannel = - NamedChannelOrUrl::from_str(&prefix_record.repodata_record.channel) - .into_diagnostic()? - .into(); + let channel: PrioritizedChannel = + NamedChannelOrUrl::from_str(&prefix_record.repodata_record.channel) + .into_diagnostic()? + .into(); - let name = prefix_record.repodata_record.package_record.name; + let name = prefix_record.repodata_record.package_record.name; - return Ok((platform, channel, name)); + return Ok((platform, channel, name)); + } } } @@ -254,8 +236,6 @@ impl Project { Self { root, - client: Default::default(), - repodata_gateway: Default::default(), manifest, config, } @@ -286,7 +266,6 @@ impl Project { let manifest_path = manifest_dir.join(MANIFEST_DEFAULT_NAME); if !manifest_path.exists() { - let warn = console::style(console::Emoji("⚠️ ", "")).yellow(); let prompt = format!( "{} You don't have a global manifest yet.\n\ Do you want to create one based on your existing installation?\n\ diff --git a/src/global/project/parsed_manifest.rs b/src/global/project/parsed_manifest.rs index 454a12221..1f753e26e 100644 --- a/src/global/project/parsed_manifest.rs +++ b/src/global/project/parsed_manifest.rs @@ -1,13 +1,11 @@ use std::fmt; -use std::path::PathBuf; use std::str::FromStr; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; -use miette::IntoDiagnostic; use pixi_manifest::PrioritizedChannel; use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform}; -use serde::de::{Deserialize, DeserializeSeed, Deserializer, MapAccess, Visitor}; +use serde::de::{Deserialize, Deserializer, Visitor}; use serde::Serialize; use serde_with::{serde_as, serde_derive::Deserialize}; @@ -39,7 +37,7 @@ where executable_name, exposed, } = data; - let mut parsed_environment = envs.entry(env_name).or_default(); + let parsed_environment = envs.entry(env_name).or_default(); parsed_environment.channels.insert(channel); parsed_environment.platform = platform; parsed_environment @@ -77,7 +75,7 @@ impl<'de> serde::Deserialize<'de> for ParsedManifest { envs: IndexMap, } - let mut manifest = TomlManifest::deserialize(deserializer)?; + let manifest = TomlManifest::deserialize(deserializer)?; // Check for duplicate keys in the exposed fields let mut exposed_keys = IndexSet::new(); diff --git a/src/lib.rs b/src/lib.rs index 711298c00..af7df88d7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -13,7 +13,6 @@ pub mod task; mod uv_reporter; -mod repodata; mod rlimit; pub use lock_file::load_lock_file; diff --git a/src/repodata.rs b/src/repodata.rs deleted file mode 100644 index d0d1786dc..000000000 --- a/src/repodata.rs +++ /dev/null @@ -1,10 +0,0 @@ -use rattler_conda_types::Channel; - -/// Returns a friendly name for the specified channel. -pub(crate) fn friendly_channel_name(channel: &Channel) -> String { - channel - .name - .as_ref() - .map(String::from) - .unwrap_or_else(|| channel.canonical_name()) -} From 6358be969a0a9d1a66123e0ff2fb5c3018e8a320 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 20 Sep 2024 16:04:23 +0200 Subject: [PATCH 6/8] test: for `local_environment_matches_spec` (#2093) --- src/global/install.rs | 188 +++++++++++++++--- src/global/test_data/lockfiles/ripgrep.lock | 106 ++++++++++ .../test_data/lockfiles/ripgrep_bat.lock | 121 +++++++++++ 3 files changed, 386 insertions(+), 29 deletions(-) create mode 100644 src/global/test_data/lockfiles/ripgrep.lock create mode 100644 src/global/test_data/lockfiles/ripgrep_bat.lock diff --git a/src/global/install.rs b/src/global/install.rs index 54b0bc9b8..6ce0a04ae 100644 --- a/src/global/install.rs +++ b/src/global/install.rs @@ -498,9 +498,15 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette let env_dir = EnvDir::new(env_root.clone(), env_name.clone()).await?; let prefix = Prefix::new(env_dir.path()); - let prefix_records = prefix.find_installed_packages(Some(50)).await?; + let repodata_records = prefix + .find_installed_packages(Some(50)) + .await? + .into_iter() + .map(|r| r.repodata_record) + .collect_vec(); - if !specs_match_local_environment(&specs, prefix_records, parsed_environment.platform()) { + if !local_environment_matches_spec(repodata_records, &specs, parsed_environment.platform()) + { install_environment( &specs, &parsed_environment, @@ -530,18 +536,16 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette /// This function verifies that all the given specifications are present in the /// local environment's prefix records and that there are no extra entries in /// the prefix records that do not match any of the specifications. -fn specs_match_local_environment>( +fn local_environment_matches_spec( + prefix_records: Vec, specs: &IndexMap, - prefix_records: Vec, platform: Option, ) -> bool { // Check whether all specs in the manifest are present in the installed // environment - let specs_in_manifest_are_present = specs.values().all(|spec| { - prefix_records - .iter() - .any(|record| spec.matches(record.as_ref())) - }); + let specs_in_manifest_are_present = specs + .values() + .all(|spec| prefix_records.iter().any(|record| spec.matches(record))); if !specs_in_manifest_are_present { return false; @@ -549,31 +553,32 @@ fn specs_match_local_environment>( // Check whether all packages in the installed environment have the correct // platform - let platform_specs_match_env = prefix_records.iter().all(|record| { - let Ok(package_platform) = Platform::from_str(&record.as_ref().package_record.subdir) - else { - return true; - }; + if let Some(platform) = platform { + let platform_specs_match_env = prefix_records.iter().all(|record| { + let Ok(package_platform) = Platform::from_str(&record.package_record.subdir) else { + return true; + }; + + match package_platform { + Platform::NoArch => true, + p if p == platform => true, + _ => false, + } + }); - match package_platform { - Platform::NoArch => true, - p if Some(p) == platform => true, - _ => false, + if !platform_specs_match_env { + return false; } - }); - - if !platform_specs_match_env { - return false; } - fn prune_dependencies>( - mut remaining_prefix_records: Vec, - matched_record: &T, - ) -> Vec { + fn prune_dependencies( + mut remaining_prefix_records: Vec, + matched_record: &RepoDataRecord, + ) -> Vec { let mut work_queue = Vec::from([matched_record.as_ref().clone()]); while let Some(current_record) = work_queue.pop() { - let dependencies = ¤t_record.as_ref().depends; + let dependencies = ¤t_record.depends; for dependency in dependencies { let Ok(match_spec) = MatchSpec::from_str(dependency, ParseStrictness::Lenient) else { @@ -581,7 +586,7 @@ fn specs_match_local_environment>( }; let Some(index) = remaining_prefix_records .iter() - .position(|record| match_spec.matches(&record.as_ref().package_record)) + .position(|record| match_spec.matches(&record.package_record)) else { continue; }; @@ -597,7 +602,7 @@ fn specs_match_local_environment>( // Process each spec and remove matched entries and their dependencies let remaining_prefix_records = specs.iter().fold(prefix_records, |mut acc, (name, spec)| { let Some(index) = acc.iter().position(|record| { - record.as_ref().package_record.name == *name && spec.matches(record.as_ref()) + record.package_record.name == *name && spec.matches(record.as_ref()) }) else { return acc; }; @@ -609,3 +614,128 @@ fn specs_match_local_environment>( // the environment doesn't contain records that don't match the manifest remaining_prefix_records.is_empty() } + +#[cfg(test)] +mod tests { + use indexmap::IndexMap; + use rattler_conda_types::{MatchSpec, PackageName, ParseStrictness, Platform}; + use rattler_lock::LockFile; + use rstest::{fixture, rstest}; + + use super::*; + + #[fixture] + fn ripgrep_specs() -> IndexMap { + IndexMap::from([( + PackageName::from_str("ripgrep").unwrap(), + MatchSpec::from_str("ripgrep=14.1.0", ParseStrictness::Strict).unwrap(), + )]) + } + + #[fixture] + fn ripgrep_records() -> Vec { + LockFile::from_str(include_str!("./test_data/lockfiles/ripgrep.lock")) + .unwrap() + .default_environment() + .unwrap() + .conda_repodata_records_for_platform(Platform::Linux64) + .unwrap() + .unwrap() + } + + #[fixture] + fn ripgrep_bat_specs() -> IndexMap { + IndexMap::from([ + ( + PackageName::from_str("ripgrep").unwrap(), + MatchSpec::from_str("ripgrep=14.1.0", ParseStrictness::Strict).unwrap(), + ), + ( + PackageName::from_str("bat").unwrap(), + MatchSpec::from_str("bat=0.24.0", ParseStrictness::Strict).unwrap(), + ), + ]) + } + + #[fixture] + fn ripgrep_bat_records() -> Vec { + LockFile::from_str(include_str!("./test_data/lockfiles/ripgrep_bat.lock")) + .unwrap() + .default_environment() + .unwrap() + .conda_repodata_records_for_platform(Platform::Linux64) + .unwrap() + .unwrap() + } + + #[rstest] + fn test_local_environment_matches_spec( + ripgrep_records: Vec, + ripgrep_specs: IndexMap, + ) { + assert!(local_environment_matches_spec( + ripgrep_records, + &ripgrep_specs, + None + )); + } + + #[rstest] + fn test_local_environment_misses_entries_for_specs( + mut ripgrep_records: Vec, + ripgrep_specs: IndexMap, + ) { + // Remove last repodata record + ripgrep_records.pop(); + + assert!(!local_environment_matches_spec( + ripgrep_records, + &ripgrep_specs, + None + )); + } + + #[rstest] + fn test_local_environment_has_too_many_entries_to_match_spec( + ripgrep_bat_records: Vec, + ripgrep_specs: IndexMap, + ripgrep_bat_specs: IndexMap, + ) { + assert!(!local_environment_matches_spec( + ripgrep_bat_records.clone(), + &ripgrep_specs, + None + ), "The function needs to detect that records coming from ripgrep and bat don't match ripgrep alone."); + + assert!( + local_environment_matches_spec(ripgrep_bat_records, &ripgrep_bat_specs, None), + "The records and specs match and the function should return `true`." + ); + } + + #[rstest] + fn test_local_environment_matches_given_platform( + ripgrep_records: Vec, + ripgrep_specs: IndexMap, + ) { + assert!( + local_environment_matches_spec( + ripgrep_records, + &ripgrep_specs, + Some(Platform::Linux64) + ), + "The records contains only linux-64 entries" + ); + } + + #[rstest] + fn test_local_environment_doesnt_match_given_platform( + ripgrep_records: Vec, + ripgrep_specs: IndexMap, + ) { + assert!( + !local_environment_matches_spec(ripgrep_records, &ripgrep_specs, Some(Platform::Win64),), + "The record contains linux-64 entries, so the function should always return `false`" + ); + } +} diff --git a/src/global/test_data/lockfiles/ripgrep.lock b/src/global/test_data/lockfiles/ripgrep.lock new file mode 100644 index 000000000..58467b6cc --- /dev/null +++ b/src/global/test_data/lockfiles/ripgrep.lock @@ -0,0 +1,106 @@ +version: 5 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ripgrep-14.1.0-he8a937b_0.conda +packages: +- kind: conda + name: _libgcc_mutex + version: '0.1' + build: conda_forge + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 + md5: d7c89558ba9fa0495403155b64376d81 + license: None + size: 2562 + timestamp: 1578324546067 +- kind: conda + name: _openmp_mutex + version: '4.5' + build: 2_gnu + build_number: 16 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 + md5: 73aaf86a425cc6e73fcf236a5a46396d + depends: + - _libgcc_mutex 0.1 conda_forge + - libgomp >=7.5.0 + constrains: + - openmp_impl 9999 + license: BSD-3-Clause + license_family: BSD + size: 23621 + timestamp: 1650670423406 +- kind: conda + name: libgcc + version: 14.1.0 + build: h77fa898_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 + md5: 002ef4463dd1e2b44a94a4ace468f5d2 + depends: + - _libgcc_mutex 0.1 conda_forge + - _openmp_mutex >=4.5 + constrains: + - libgomp 14.1.0 h77fa898_1 + - libgcc-ng ==14.1.0=*_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 846380 + timestamp: 1724801836552 +- kind: conda + name: libgcc-ng + version: 14.1.0 + build: h69a702a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 + md5: 1efc0ad219877a73ef977af7dbb51f17 + depends: + - libgcc 14.1.0 h77fa898_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 52170 + timestamp: 1724801842101 +- kind: conda + name: libgomp + version: 14.1.0 + build: h77fa898_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda + sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 + md5: 23c255b008c4f2ae008f81edcabaca89 + depends: + - _libgcc_mutex 0.1 conda_forge + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 460218 + timestamp: 1724801743478 +- kind: conda + name: ripgrep + version: 14.1.0 + build: he8a937b_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ripgrep-14.1.0-he8a937b_0.conda + sha256: 4fcf37724b87440765cb3c6cf573e99d12fc631001426a0309d132f495c3d62a + md5: 5a476f7033a8a1b9175626b5ebf86d1d + depends: + - libgcc-ng >=12 + license: MIT + license_family: MIT + size: 1683808 + timestamp: 1705520837423 diff --git a/src/global/test_data/lockfiles/ripgrep_bat.lock b/src/global/test_data/lockfiles/ripgrep_bat.lock new file mode 100644 index 000000000..db8c94dbe --- /dev/null +++ b/src/global/test_data/lockfiles/ripgrep_bat.lock @@ -0,0 +1,121 @@ +version: 5 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/bat-0.24.0-he8a937b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ripgrep-14.1.0-he8a937b_0.conda +packages: +- kind: conda + name: _libgcc_mutex + version: '0.1' + build: conda_forge + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 + md5: d7c89558ba9fa0495403155b64376d81 + license: None + size: 2562 + timestamp: 1578324546067 +- kind: conda + name: _openmp_mutex + version: '4.5' + build: 2_gnu + build_number: 16 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 + md5: 73aaf86a425cc6e73fcf236a5a46396d + depends: + - _libgcc_mutex 0.1 conda_forge + - libgomp >=7.5.0 + constrains: + - openmp_impl 9999 + license: BSD-3-Clause + license_family: BSD + size: 23621 + timestamp: 1650670423406 +- kind: conda + name: bat + version: 0.24.0 + build: he8a937b_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/bat-0.24.0-he8a937b_0.conda + sha256: fd0a7aae7f4c52ddf2ac5098dcb9a8f4b7ab1ccdd88633390a73a9d1be3b7de2 + md5: 18da2a0103ba121e1425266e7ba51327 + depends: + - libgcc-ng >=12 + license: MIT + license_family: MIT + size: 2527297 + timestamp: 1697062695381 +- kind: conda + name: libgcc + version: 14.1.0 + build: h77fa898_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 + md5: 002ef4463dd1e2b44a94a4ace468f5d2 + depends: + - _libgcc_mutex 0.1 conda_forge + - _openmp_mutex >=4.5 + constrains: + - libgomp 14.1.0 h77fa898_1 + - libgcc-ng ==14.1.0=*_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 846380 + timestamp: 1724801836552 +- kind: conda + name: libgcc-ng + version: 14.1.0 + build: h69a702a_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 + md5: 1efc0ad219877a73ef977af7dbb51f17 + depends: + - libgcc 14.1.0 h77fa898_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 52170 + timestamp: 1724801842101 +- kind: conda + name: libgomp + version: 14.1.0 + build: h77fa898_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda + sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 + md5: 23c255b008c4f2ae008f81edcabaca89 + depends: + - _libgcc_mutex 0.1 conda_forge + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 460218 + timestamp: 1724801743478 +- kind: conda + name: ripgrep + version: 14.1.0 + build: he8a937b_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ripgrep-14.1.0-he8a937b_0.conda + sha256: 4fcf37724b87440765cb3c6cf573e99d12fc631001426a0309d132f495c3d62a + md5: 5a476f7033a8a1b9175626b5ebf86d1d + depends: + - libgcc-ng >=12 + license: MIT + license_family: MIT + size: 1683808 + timestamp: 1705520837423 From e943408877b54ae0d3752f371c46b7aa8fd3d592 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 20 Sep 2024 16:54:30 +0200 Subject: [PATCH 7/8] feat: Give better info message for `pixi global sync` (#2100) It now informs you if `pixi global sync` did nothing --- src/cli/global/sync.rs | 11 ++++++++++- src/global/install.rs | 21 ++++++++++++++++----- 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/src/cli/global/sync.rs b/src/cli/global/sync.rs index 85b0edf50..2ec0d87be 100644 --- a/src/cli/global/sync.rs +++ b/src/cli/global/sync.rs @@ -16,5 +16,14 @@ pub struct Args { pub async fn execute(args: Args) -> miette::Result<()> { let config = Config::with_cli_config(&args.config); - global::sync(&config, args.assume_yes).await + let updated_env = global::sync(&config, args.assume_yes).await?; + + if !updated_env { + eprintln!( + "{} Nothing to do. The pixi global installation is already up-to-date", + console::style(console::Emoji("✔ ", "")).green() + ); + } + + Ok(()) } diff --git a/src/global/install.rs b/src/global/install.rs index 6ce0a04ae..3b4f00dcf 100644 --- a/src/global/install.rs +++ b/src/global/install.rs @@ -429,7 +429,9 @@ pub(crate) fn prompt_user_to_continue( Ok(true) } -pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette::Error> { +// Syncs the manifest with the local environment +// Returns true if the global installation had to be updated +pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result { // Create directories let bin_dir = BinDir::from_env().await?; let env_root = EnvRoot::from_env().await?; @@ -476,6 +478,8 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette } } + let mut updated_env = false; + for (env_name, parsed_environment) in project.environments() { let specs = parsed_environment .dependencies @@ -505,8 +509,15 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette .map(|r| r.repodata_record) .collect_vec(); - if !local_environment_matches_spec(repodata_records, &specs, parsed_environment.platform()) - { + let install_env = !local_environment_matches_spec( + repodata_records, + &specs, + parsed_environment.platform(), + ); + + updated_env |= install_env; + + if install_env { install_environment( &specs, &parsed_environment, @@ -518,7 +529,7 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette .await?; } - expose_executables( + updated_env |= expose_executables( &env_name, &parsed_environment, specs.keys().cloned().collect(), @@ -528,7 +539,7 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result<(), miette .await?; } - Ok(()) + Ok(updated_env) } /// Checks if the local environment matches the given specifications. From c17fd802fc60cfb4b130f2d55891d5fa630a94a0 Mon Sep 17 00:00:00 2001 From: nichmor Date: Tue, 24 Sep 2024 14:12:05 +0200 Subject: [PATCH 8/8] feat: add pixi global expose command (#2030) Some missing things: - [x] : public doc strings - [x] : tests - [x] : better errors - [x] : running pixi global sync at the end it is also should be merged after https://github.com/prefix-dev/pixi/pull/1975 lands first --------- Co-authored-by: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Co-authored-by: Julian Hofer --- crates/pixi_manifest/src/lib.rs | 2 + crates/pixi_manifest/src/manifests/mod.rs | 10 +- src/cli/global/expose.rs | 146 ++++++++++ src/cli/global/mod.rs | 5 + src/cli/global/sync.rs | 8 +- src/global/common.rs | 66 ++--- src/global/install.rs | 82 +++--- src/global/mod.rs | 5 +- src/global/project/document.rs | 11 - src/global/project/error.rs | 63 ----- src/global/project/manifest.rs | 254 ++++++++++++++++-- src/global/project/mod.rs | 111 +++++--- src/global/project/parsed_manifest.rs | 45 ++-- ...ed_manifest__tests__duplicate_exposed.snap | 2 +- ...__tests__expose_add_when_binary_exist.snap | 12 + src/prefix.rs | 53 ++++ tests/integration/common.py | 1 - .../noarch/dummy-a-0.1.0-h4616a5c_0.conda | Bin 0 -> 2834 bytes .../noarch/dummy-b-0.1.0-h4616a5c_0.conda | Bin 0 -> 5807 bytes .../output/noarch/repodata.json | 35 +++ .../test_data/dummy_channel_a/recipe.yaml | 27 ++ tests/integration/test_global.py | 189 +++++++++---- 22 files changed, 808 insertions(+), 319 deletions(-) create mode 100644 src/cli/global/expose.rs delete mode 100644 src/global/project/document.rs delete mode 100644 src/global/project/error.rs create mode 100644 src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap create mode 100644 tests/integration/test_data/dummy_channel_a/output/noarch/dummy-a-0.1.0-h4616a5c_0.conda create mode 100644 tests/integration/test_data/dummy_channel_a/output/noarch/dummy-b-0.1.0-h4616a5c_0.conda create mode 100644 tests/integration/test_data/dummy_channel_a/output/noarch/repodata.json create mode 100644 tests/integration/test_data/dummy_channel_a/recipe.yaml diff --git a/crates/pixi_manifest/src/lib.rs b/crates/pixi_manifest/src/lib.rs index 43271190a..326de2259 100644 --- a/crates/pixi_manifest/src/lib.rs +++ b/crates/pixi_manifest/src/lib.rs @@ -24,6 +24,7 @@ mod validation; pub use dependencies::{CondaDependencies, Dependencies, PyPiDependencies}; pub use manifests::manifest::{Manifest, ManifestKind}; +pub use manifests::TomlManifest; pub use crate::environments::Environments; pub use crate::parsed_manifest::{deserialize_package_map, ParsedManifest}; @@ -31,6 +32,7 @@ pub use crate::solve_group::{SolveGroup, SolveGroups}; pub use activation::Activation; pub use channel::{PrioritizedChannel, TomlPrioritizedChannelStrOrMap}; pub use environment::{Environment, EnvironmentName}; +pub use error::TomlError; pub use feature::{Feature, FeatureName}; use itertools::Itertools; pub use metadata::ProjectMetadata; diff --git a/crates/pixi_manifest/src/manifests/mod.rs b/crates/pixi_manifest/src/manifests/mod.rs index cb9a28cae..e0be90baa 100644 --- a/crates/pixi_manifest/src/manifests/mod.rs +++ b/crates/pixi_manifest/src/manifests/mod.rs @@ -1,3 +1,5 @@ +use std::fmt; + use toml_edit::{self, Array, Item, Table, Value}; pub mod project; @@ -23,7 +25,7 @@ impl TomlManifest { /// Retrieve a mutable reference to a target table `table_name` /// in dotted form (e.g. `table1.table2`) from the root of the document. /// If the table is not found, it is inserted into the document. - fn get_or_insert_nested_table<'a>( + pub fn get_or_insert_nested_table<'a>( &'a mut self, table_name: &str, ) -> Result<&'a mut Table, TomlError> { @@ -75,3 +77,9 @@ impl TomlManifest { Ok(array) } } + +impl fmt::Display for TomlManifest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/src/cli/global/expose.rs b/src/cli/global/expose.rs new file mode 100644 index 000000000..4062cd08c --- /dev/null +++ b/src/cli/global/expose.rs @@ -0,0 +1,146 @@ +use std::str::FromStr; + +use clap::Parser; +use miette::Context; +use pixi_config::{Config, ConfigCli}; + +use crate::global::{self, EnvironmentName, ExposedName}; + +#[derive(Parser, Debug)] +pub struct AddArgs { + /// Add one or more `MAPPING` for environment `ENV` which describe which executables are exposed. + /// The syntax for `MAPPING` is `exposed_name=executable_name`, so for example `python3.10=python`. + #[arg(value_parser = parse_mapping)] + mappings: Vec, + + #[clap(short, long)] + environment: EnvironmentName, + + /// Answer yes to all questions. + #[clap(short = 'y', long = "yes", long = "assume-yes")] + assume_yes: bool, + + #[clap(flatten)] + config: ConfigCli, +} + +/// Parse mapping between exposed name and executable name +fn parse_mapping(input: &str) -> miette::Result { + input + .split_once('=') + .ok_or_else(|| { + miette::miette!("Could not parse mapping `exposed_name=executable_name` from {input}") + }) + .and_then(|(key, value)| { + Ok(global::Mapping::new( + ExposedName::from_str(key)?, + value.to_string(), + )) + }) +} +#[derive(Parser, Debug)] +pub struct RemoveArgs { + /// The exposed names that should be removed + exposed_names: Vec, + + #[clap(short, long)] + environment: EnvironmentName, + + /// Answer yes to all questions. + #[clap(short = 'y', long = "yes", long = "assume-yes")] + assume_yes: bool, + + #[clap(flatten)] + config: ConfigCli, +} + +#[derive(Parser, Debug)] +#[clap(group(clap::ArgGroup::new("command")))] +pub enum SubCommand { + #[clap(name = "add")] + Add(AddArgs), + #[clap(name = "remove")] + Remove(RemoveArgs), +} + +/// Expose some binaries +pub async fn execute(args: SubCommand) -> miette::Result<()> { + match args { + SubCommand::Add(args) => add(args).await?, + SubCommand::Remove(args) => remove(args).await?, + } + Ok(()) +} + +async fn revert_after_error( + mut project_original: global::Project, + config: &Config, +) -> miette::Result<()> { + project_original.manifest.save().await?; + global::sync(&project_original, config).await?; + Ok(()) +} + +pub async fn add(args: AddArgs) -> miette::Result<()> { + let config = Config::with_cli_config(&args.config); + let project_original = global::Project::discover_or_create(args.assume_yes) + .await? + .with_cli_config(config.clone()); + + async fn apply_changes( + args: AddArgs, + project_original: global::Project, + config: &Config, + ) -> Result<(), miette::Error> { + let mut project_modified = project_original; + + for mapping in args.mappings { + project_modified + .manifest + .add_exposed_mapping(&args.environment, &mapping)?; + } + project_modified.manifest.save().await?; + global::sync(&project_modified, config).await?; + Ok(()) + } + + if let Err(err) = apply_changes(args, project_original.clone(), &config).await { + revert_after_error(project_original, &config) + .await + .wrap_err("Could not add exposed mappings. Reverting also failed.")?; + return Err(err); + } + Ok(()) +} + +pub async fn remove(args: RemoveArgs) -> miette::Result<()> { + let config = Config::with_cli_config(&args.config); + let project_original = global::Project::discover_or_create(args.assume_yes) + .await? + .with_cli_config(config.clone()); + + async fn apply_changes( + args: RemoveArgs, + project_original: global::Project, + config: &Config, + ) -> Result<(), miette::Error> { + let mut project_modified = project_original; + + for exposed_name in args.exposed_names { + project_modified + .manifest + .remove_exposed_name(&args.environment, &exposed_name)?; + } + project_modified.manifest.save().await?; + global::sync(&project_modified, config).await?; + Ok(()) + } + + if let Err(err) = apply_changes(args, project_original.clone(), &config).await { + revert_after_error(project_original, &config) + .await + .wrap_err("Could not remove exposed name. Reverting also failed.")?; + return Err(err); + } + Ok(()) +} diff --git a/src/cli/global/mod.rs b/src/cli/global/mod.rs index fcc901b43..87f895b45 100644 --- a/src/cli/global/mod.rs +++ b/src/cli/global/mod.rs @@ -1,5 +1,6 @@ use clap::Parser; +mod expose; mod install; mod list; mod remove; @@ -18,6 +19,9 @@ pub enum Command { List(list::Args), #[clap(visible_alias = "s")] Sync(sync::Args), + #[clap(visible_alias = "e")] + #[command(subcommand)] + Expose(expose::SubCommand), } /// Subcommand for global package management actions @@ -38,6 +42,7 @@ pub async fn execute(cmd: Args) -> miette::Result<()> { Command::Remove(args) => remove::execute(args).await?, Command::List(args) => list::execute(args).await?, Command::Sync(args) => sync::execute(args).await?, + Command::Expose(subcommand) => expose::execute(subcommand).await?, }; Ok(()) } diff --git a/src/cli/global/sync.rs b/src/cli/global/sync.rs index 2ec0d87be..521a8d889 100644 --- a/src/cli/global/sync.rs +++ b/src/cli/global/sync.rs @@ -1,4 +1,4 @@ -use crate::global::{self}; +use crate::global; use clap::Parser; use pixi_config::{Config, ConfigCli}; @@ -15,8 +15,10 @@ pub struct Args { /// Sync global manifest with installed environments pub async fn execute(args: Args) -> miette::Result<()> { let config = Config::with_cli_config(&args.config); - - let updated_env = global::sync(&config, args.assume_yes).await?; + let project = global::Project::discover_or_create(args.assume_yes) + .await? + .with_cli_config(config.clone()); + let updated_env = global::sync(&project, &config).await?; if !updated_env { eprintln!( diff --git a/src/global/common.rs b/src/global/common.rs index 08d985d58..c3dfc7ac2 100644 --- a/src/global/common.rs +++ b/src/global/common.rs @@ -3,14 +3,14 @@ use std::{ path::{Path, PathBuf}, }; -use itertools::Itertools; use miette::{Context, IntoDiagnostic}; use pixi_config::home_path; -use super::{EnvironmentName, ExposedKey}; +use super::{EnvironmentName, ExposedName}; /// Global binaries directory, default to `$HOME/.pixi/bin` +#[derive(Debug, Clone)] pub struct BinDir(PathBuf); impl BinDir { @@ -59,53 +59,16 @@ impl BinDir { /// This function constructs the path to the executable script by joining the /// `bin_dir` with the provided `exposed_name`. If the target platform is /// Windows, it sets the file extension to `.bat`. - pub(crate) fn executable_script_path(&self, exposed_name: &ExposedKey) -> PathBuf { + pub(crate) fn executable_script_path(&self, exposed_name: &ExposedName) -> PathBuf { let mut executable_script_path = self.0.join(exposed_name.to_string()); if cfg!(windows) { executable_script_path.set_extension("bat"); } executable_script_path } - - pub async fn print_executables_available( - &self, - executables: Vec, - ) -> miette::Result<()> { - let whitespace = console::Emoji(" ", "").to_string(); - let executable = executables - .into_iter() - .map(|path| { - path.strip_prefix(self.path()) - .expect("script paths were constructed by joining onto BinDir") - .to_string_lossy() - .to_string() - }) - .join(&format!("\n{whitespace} - ")); - - if self.is_on_path() { - eprintln!( - "{whitespace}These executables are now globally available:\n{whitespace} - {executable}", - ) - } else { - eprintln!("{whitespace}These executables have been added to {}\n{whitespace} - {executable}\n\n{} To use them, make sure to add {} to your PATH", - console::style(&self.path().display()).bold(), - console::style("!").yellow().bold(), - console::style(&self.path().display()).bold() - ) - } - - Ok(()) - } - - /// Returns true if the bin folder is available on the PATH. - fn is_on_path(&self) -> bool { - let Some(path_content) = std::env::var_os("PATH") else { - return false; - }; - std::env::split_paths(&path_content).contains(&self.path().to_owned()) - } } +/// Global environoments directory, default to `$HOME/.pixi/envs` #[derive(Debug, Clone)] pub struct EnvRoot(PathBuf); @@ -116,7 +79,7 @@ impl EnvRoot { tokio::fs::create_dir_all(&path) .await .into_diagnostic() - .wrap_err_with(|| format!("Couldn't create directory {}", path.display()))?; + .wrap_err_with(|| format!("Could not create directory {}", path.display()))?; Ok(Self(path)) } @@ -128,7 +91,7 @@ impl EnvRoot { tokio::fs::create_dir_all(&path) .await .into_diagnostic() - .wrap_err_with(|| format!("Couldn't create directory {}", path.display()))?; + .wrap_err_with(|| format!("Could not create directory {}", path.display()))?; Ok(Self(path)) } @@ -189,16 +152,16 @@ impl EnvRoot { /// A global environment directory pub(crate) struct EnvDir { - path: PathBuf, + pub(crate) path: PathBuf, } impl EnvDir { - /// Create a global environment directory - pub(crate) async fn new( - root: EnvRoot, + /// Create a global environment directory based on passed global environment root + pub(crate) async fn from_env_root( + env_root: EnvRoot, environment_name: EnvironmentName, ) -> miette::Result { - let path = root.path().join(environment_name.as_str()); + let path = env_root.path().join(environment_name.as_str()); tokio::fs::create_dir_all(&path).await.into_diagnostic()?; Ok(Self { path }) @@ -234,6 +197,7 @@ pub(crate) fn is_text(file_path: impl AsRef) -> miette::Result { #[cfg(test)] mod tests { use super::*; + use itertools::Itertools; use tempfile::tempdir; @@ -249,7 +213,9 @@ mod tests { let environment_name = "test-env".parse().unwrap(); // Create a new binary env dir - let bin_env_dir = EnvDir::new(env_root, environment_name).await.unwrap(); + let bin_env_dir = EnvDir::from_env_root(env_root, environment_name) + .await + .unwrap(); // Verify that the directory was created assert!(bin_env_dir.path().exists()); @@ -267,7 +233,7 @@ mod tests { // Create some directories in the temporary directory let envs = ["env1", "env2", "env3"]; for env in &envs { - EnvDir::new(env_root.clone(), env.parse().unwrap()) + EnvDir::from_env_root(env_root.clone(), env.parse().unwrap()) .await .unwrap(); } diff --git a/src/global/install.rs b/src/global/install.rs index 3b4f00dcf..8d00d262c 100644 --- a/src/global/install.rs +++ b/src/global/install.rs @@ -1,4 +1,9 @@ -use std::{collections::HashMap, ffi::OsStr, path::PathBuf, str::FromStr}; +use std::{ + collections::{HashMap, HashSet}, + ffi::OsStr, + path::PathBuf, + str::FromStr, +}; use indexmap::IndexMap; use itertools::Itertools; @@ -23,7 +28,7 @@ use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; use rattler_virtual_packages::{VirtualPackage, VirtualPackageOverrides}; use reqwest_middleware::ClientWithMiddleware; -use super::{common::EnvRoot, project::ParsedEnvironment, EnvironmentName, ExposedKey}; +use super::{project::ParsedEnvironment, EnvironmentName, ExposedName}; use crate::{ global::{self, BinDir, EnvDir}, prefix::Prefix, @@ -118,7 +123,6 @@ pub(crate) async fn install_environment( pub(crate) async fn expose_executables( env_name: &EnvironmentName, parsed_environment: &ParsedEnvironment, - packages: Vec, prefix: &Prefix, bin_dir: &BinDir, ) -> miette::Result { @@ -136,21 +140,13 @@ pub(crate) async fn expose_executables( let prefix_records = prefix.find_installed_packages(None).await?; - // Processes prefix records to filter and collect executable files. - let executables: Vec<(String, PathBuf)> = prefix_records + let all_executables = prefix.find_executables(prefix_records.as_slice()); + + let exposed: HashSet<&String> = parsed_environment.exposed.values().collect(); + + let exposed_executables: Vec<_> = all_executables .into_iter() - // Filters records to only include direct dependencies - .filter(|record| packages.contains(&record.repodata_record.package_record.name)) - // Finds executables for each filtered record. - .flat_map(|record| global::find_executables(prefix, &record)) - // Maps executables to a tuple of file name (as a string) and file path. - .filter_map(|path| { - path.file_stem() - .and_then(OsStr::to_str) - .map(|name| (name.to_string(), path.clone())) - }) - // Filters tuples to include only those whose names are in the `exposed` values - .filter(|(name, _)| parsed_environment.exposed.values().contains(&name)) + .filter(|(name, _)| exposed.contains(name)) .collect(); let script_mapping = parsed_environment @@ -160,7 +156,7 @@ pub(crate) async fn expose_executables( script_exec_mapping( exposed_name, entry_point, - executables.clone(), + exposed_executables.iter(), bin_dir, env_name, ) @@ -182,21 +178,19 @@ pub(crate) async fn expose_executables( /// /// # Errors /// -/// Returns an error if the entry point is not found in the list of executable -/// names. -fn script_exec_mapping( - exposed_name: &ExposedKey, +/// Returns an error if the entry point is not found in the list of executable names. +pub(crate) fn script_exec_mapping<'a>( + exposed_name: &ExposedName, entry_point: &str, - executables: impl IntoIterator, + mut executables: impl Iterator, bin_dir: &BinDir, environment_name: &EnvironmentName, ) -> miette::Result { executables - .into_iter() .find(|(executable_name, _)| *executable_name == entry_point) .map(|(_, executable_path)| ScriptExecMapping { global_script_path: bin_dir.executable_script_path(exposed_name), - original_executable: executable_path, + original_executable: executable_path.clone(), }) .ok_or_else(|| miette::miette!("Could not find {entry_point} in {environment_name}")) } @@ -303,9 +297,7 @@ async fn map_executables_to_global_bin_scripts( /// Create the executable scripts by modifying the activation script /// to activate the environment and run the executable. -/// -/// Returns true if a change was made. -async fn create_executable_scripts( +pub(crate) async fn create_executable_scripts( mapped_executables: &[ScriptExecMapping], prefix: &Prefix, shell: &ShellEnum, @@ -431,22 +423,18 @@ pub(crate) fn prompt_user_to_continue( // Syncs the manifest with the local environment // Returns true if the global installation had to be updated -pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result { - // Create directories - let bin_dir = BinDir::from_env().await?; - let env_root = EnvRoot::from_env().await?; - - let project = global::Project::discover_or_create(&bin_dir, &env_root, assume_yes) - .await? - .with_cli_config(config.clone()); - +pub(crate) async fn sync( + project: &global::Project, + config: &Config, +) -> Result { // Fetch the repodata let (_, auth_client) = build_reqwest_clients(Some(config)); let gateway = config.gateway(auth_client.clone()); // Prune environments that are not listed - env_root + project + .env_root .prune(project.environments().keys().cloned()) .await?; @@ -458,10 +446,10 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result Result, miette::Report>>()?; - let env_dir = EnvDir::new(env_root.clone(), env_name.clone()).await?; + let env_dir = EnvDir::from_env_root(project.env_root.clone(), env_name.clone()).await?; let prefix = Prefix::new(env_dir.path()); let repodata_records = prefix @@ -520,7 +508,7 @@ pub(crate) async fn sync(config: &Config, assume_yes: bool) -> Result Result fmt::Result { - write!(f, "{}", self.0) - } -} diff --git a/src/global/project/error.rs b/src/global/project/error.rs deleted file mode 100644 index 1b28d1108..000000000 --- a/src/global/project/error.rs +++ /dev/null @@ -1,63 +0,0 @@ -use miette::{Diagnostic, IntoDiagnostic, LabeledSpan, NamedSource, Report}; - -use thiserror::Error; - -/// Represents errors that can occur when working with a pixi global manifest -#[derive(Error, Debug, Clone, Diagnostic)] -pub enum ManifestError { - #[error(transparent)] - Error(#[from] toml_edit::TomlError), - #[error("Could not find or access the part '{part}' in the path '[{table_name}]'")] - TableError { part: String, table_name: String }, - #[error("Could not find or access array '{array_name}' in '[{table_name}]'")] - ArrayError { - array_name: String, - table_name: String, - }, -} - -impl ManifestError { - pub fn to_fancy(&self, file_name: &str, contents: impl Into) -> Result { - if let Some(span) = self.span() { - Err(miette::miette!( - labels = vec![LabeledSpan::at(span, self.message())], - "failed to parse project manifest" - ) - .with_source_code(NamedSource::new(file_name, contents.into()))) - } else { - Err(self.clone()).into_diagnostic() - } - } - - fn span(&self) -> Option> { - match self { - ManifestError::Error(e) => e.span(), - _ => None, - } - } - fn message(&self) -> String { - match self { - ManifestError::Error(e) => e.message().to_owned(), - _ => self.to_string(), - } - } - - pub fn table_error(part: &str, table_name: &str) -> Self { - Self::TableError { - part: part.into(), - table_name: table_name.into(), - } - } - - pub fn array_error(array_name: &str, table_name: &str) -> Self { - Self::ArrayError { - array_name: array_name.into(), - table_name: table_name.into(), - } - } -} -impl From for ManifestError { - fn from(e: toml_edit::de::Error) -> Self { - ManifestError::Error(e.into()) - } -} diff --git a/src/global/project/manifest.rs b/src/global/project/manifest.rs index 90d098d11..8a41225ff 100644 --- a/src/global/project/manifest.rs +++ b/src/global/project/manifest.rs @@ -1,16 +1,12 @@ +use std::fmt; use std::path::{Path, PathBuf}; use miette::IntoDiagnostic; -use rattler_conda_types::{MatchSpec, PackageName}; -use toml_edit::DocumentMut; +use pixi_manifest::{TomlError, TomlManifest}; +use toml_edit::{DocumentMut, Item}; -use super::error::ManifestError; - -use super::MANIFEST_DEFAULT_NAME; -use super::{document::ManifestSource, parsed_manifest::ParsedManifest}; - -// TODO: remove -#[allow(unused)] +use super::parsed_manifest::ParsedManifest; +use super::{EnvironmentName, ExposedName, MANIFEST_DEFAULT_NAME}; /// Handles the global project's manifest file. /// This struct is responsible for reading, parsing, editing, and saving the @@ -22,11 +18,8 @@ pub struct Manifest { /// The path to the manifest file pub path: PathBuf, - /// The raw contents of the manifest file - pub contents: String, - /// Editable toml document - pub document: ManifestSource, + pub document: TomlManifest, /// The parsed manifest pub parsed: ParsedManifest, @@ -49,40 +42,243 @@ impl Manifest { contents .parse::() .map(|doc| (manifest, doc)) - .map_err(ManifestError::from) + .map_err(TomlError::from) }) { Ok(result) => result, Err(e) => e.to_fancy(MANIFEST_DEFAULT_NAME, &contents)?, }; - let source = ManifestSource(document); let manifest = Self { path: manifest_path.to_path_buf(), - contents, - document: source, + document: TomlManifest::new(document), parsed: manifest, }; Ok(manifest) } - /// Adds an environment to the project. - pub fn add_environment(&mut self, _name: String) -> miette::Result<()> { - todo!() + pub fn add_exposed_mapping( + &mut self, + env_name: &EnvironmentName, + mapping: &Mapping, + ) -> miette::Result<()> { + self.parsed + .envs + .entry(env_name.clone()) + .or_default() + .exposed + .insert( + mapping.exposed_name.clone(), + mapping.executable_name.clone(), + ); + + self.document + .get_or_insert_nested_table(&format!("envs.{env_name}.exposed"))? + .insert( + &mapping.exposed_name.to_string(), + Item::Value(toml_edit::Value::from(mapping.executable_name.clone())), + ); + + tracing::debug!("Added exposed mapping {mapping} to toml document"); + Ok(()) } - /// Removes an environment from the project. - pub fn remove_environment(&mut self, _name: &str) -> miette::Result { - todo!() + pub fn remove_exposed_name( + &mut self, + env_name: &EnvironmentName, + exposed_name: &ExposedName, + ) -> miette::Result<()> { + self.parsed + .envs + .get_mut(env_name) + .ok_or_else(|| miette::miette!("[envs.{env_name}] needs to exist"))? + .exposed + .shift_remove(exposed_name); + + self.document + .get_or_insert_nested_table(&format!("envs.{env_name}.exposed"))? + .remove(&exposed_name.to_string()) + .ok_or_else(|| miette::miette!("The exposed name {exposed_name} doesn't exist"))?; + + tracing::debug!("Removed exposed mapping {exposed_name} from toml document"); + Ok(()) } - /// Add a matchspec to the manifest - pub fn add_dependency(&mut self, _spec: &MatchSpec) -> miette::Result { - todo!() + /// Save the manifest to the file and update the parsed_manifest + pub async fn save(&mut self) -> miette::Result<()> { + let contents = self.document.to_string(); + tokio::fs::write(&self.path, contents) + .await + .into_diagnostic()?; + Ok(()) } +} + +#[derive(Debug, Clone)] +pub struct Mapping { + exposed_name: ExposedName, + executable_name: String, +} + +impl Mapping { + pub fn new(exposed_name: ExposedName, executable_name: String) -> Self { + Self { + exposed_name, + executable_name, + } + } +} + +impl fmt::Display for Mapping { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}={}", self.exposed_name, self.executable_name) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use super::*; + + #[test] + fn test_add_exposed_mapping_new_env() { + let mut manifest = Manifest::from_str(&PathBuf::from("pixi-global.toml"), "").unwrap(); + let exposed_name = ExposedName::from_str("test_exposed").unwrap(); + let executable_name = "test_executable".to_string(); + let mapping = Mapping::new(exposed_name.clone(), executable_name); + let env_name = EnvironmentName::from_str("test-env").unwrap(); + let result = manifest.add_exposed_mapping(&env_name, &mapping); + assert!(result.is_ok()); + + let expected_value = "test_executable"; + + // Check document + let actual_value = manifest + .document + .get_or_insert_nested_table(&format!("envs.{}.exposed", env_name)) + .unwrap() + .get(&exposed_name.to_string()) + .unwrap() + .as_str() + .unwrap(); + assert_eq!(expected_value, actual_value); + + // Check parsed + let actual_value = manifest + .parsed + .envs + .get(&env_name) + .unwrap() + .exposed + .get(&exposed_name) + .unwrap(); + assert_eq!(expected_value, actual_value) + } + + #[test] + fn test_add_exposed_mapping_existing_env() { + let mut manifest = Manifest::from_str(&PathBuf::from("pixi-global.toml"), "").unwrap(); + let exposed_name1 = ExposedName::from_str("test_exposed1").unwrap(); + let executable_name1 = "test_executable1".to_string(); + let mapping1 = Mapping::new(exposed_name1.clone(), executable_name1); + let env_name = EnvironmentName::from_str("test-env").unwrap(); + manifest.add_exposed_mapping(&env_name, &mapping1).unwrap(); + + let exposed_name2 = ExposedName::from_str("test_exposed2").unwrap(); + let executable_name2 = "test_executable2".to_string(); + let mapping2 = Mapping::new(exposed_name2.clone(), executable_name2); + let result = manifest.add_exposed_mapping(&env_name, &mapping2); + assert!(result.is_ok()); + + // Check document for executable1 + let expected_value1 = "test_executable1"; + let actual_value1 = manifest + .document + .get_or_insert_nested_table(&format!("envs.{env_name}.exposed")) + .unwrap() + .get(&exposed_name1.to_string()) + .unwrap() + .as_str() + .unwrap(); + assert_eq!(expected_value1, actual_value1); + + // Check parsed for executable1 + let actual_value1 = manifest + .parsed + .envs + .get(&env_name) + .unwrap() + .exposed + .get(&exposed_name1) + .unwrap(); + assert_eq!(expected_value1, actual_value1); + + // Check document for executable2 + let expected_value2 = "test_executable2"; + let actual_value2 = manifest + .document + .get_or_insert_nested_table(&format!("envs.{env_name}.exposed")) + .unwrap() + .get(&exposed_name2.to_string()) + .unwrap() + .as_str() + .unwrap(); + assert_eq!(expected_value2, actual_value2); + + // Check parsed for executable2 + let actual_value2 = manifest + .parsed + .envs + .get(&env_name) + .unwrap() + .exposed + .get(&exposed_name2) + .unwrap(); + assert_eq!(expected_value2, actual_value2) + } + + #[test] + fn test_remove_exposed_mapping() { + let mut manifest = Manifest::from_str(&PathBuf::from("pixi-global.toml"), "").unwrap(); + let exposed_name = ExposedName::from_str("test_exposed").unwrap(); + let executable_name = "test_executable".to_string(); + let mapping = Mapping::new(exposed_name.clone(), executable_name); + let env_name = EnvironmentName::from_str("test-env").unwrap(); + + // Add and remove mapping again + manifest.add_exposed_mapping(&env_name, &mapping).unwrap(); + manifest + .remove_exposed_name(&env_name, &exposed_name) + .unwrap(); + + // Check document + let actual_value = manifest + .document + .get_or_insert_nested_table(&format!("envs.{env_name}.exposed")) + .unwrap() + .get(&exposed_name.to_string()); + assert!(actual_value.is_none()); + + // Check parsed + let actual_value = manifest + .parsed + .envs + .get(&env_name) + .unwrap() + .exposed + .get(&exposed_name); + assert!(actual_value.is_none()) + } + + #[test] + fn test_remove_exposed_mapping_nonexistent() { + let mut manifest = Manifest::from_str(&PathBuf::from("pixi-global.toml"), "").unwrap(); + let exposed_name = ExposedName::from_str("test_exposed").unwrap(); + let env_name = EnvironmentName::from_str("test-env").unwrap(); - /// Removes a dependency based on `SpecType`. - pub fn remove_dependency(&mut self, _dep: &PackageName) -> miette::Result<()> { - todo!() + // Removing an exposed name that doesn't exist should return an error + let result = manifest.remove_exposed_name(&env_name, &exposed_name); + assert!(result.is_err()) } } diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index 46b078721..c4b65a776 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -7,11 +7,12 @@ use std::{ pub(crate) use environment::EnvironmentName; use indexmap::IndexMap; -use manifest::Manifest; +pub(crate) use manifest::{Manifest, Mapping}; use miette::{Context, IntoDiagnostic}; use once_cell::sync::Lazy; +pub(crate) use parsed_manifest::ExposedName; +pub(crate) use parsed_manifest::ParsedEnvironment; use parsed_manifest::ParsedManifest; -pub(crate) use parsed_manifest::{ExposedKey, ParsedEnvironment}; use pixi_config::{home_path, Config}; use pixi_manifest::PrioritizedChannel; use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform, PrefixRecord}; @@ -24,9 +25,7 @@ use crate::{ prefix::Prefix, }; -mod document; mod environment; -mod error; mod manifest; mod parsed_manifest; @@ -44,6 +43,10 @@ pub struct Project { pub(crate) manifest: Manifest, /// The global configuration as loaded from the config file(s) config: Config, + /// Root directory of the global environments + pub(crate) env_root: EnvRoot, + /// Binary directory + pub(crate) bin_dir: BinDir, } impl Debug for Project { @@ -62,7 +65,7 @@ struct ExposedData { platform: Option, channel: PrioritizedChannel, package: PackageName, - exposed: ExposedKey, + exposed: ExposedName, executable_name: String, } @@ -77,7 +80,7 @@ impl ExposedData { .file_stem() .and_then(OsStr::to_str) .ok_or_else(|| miette::miette!("Could not get file stem of {}", path.display())) - .and_then(ExposedKey::from_str)?; + .and_then(ExposedName::from_str)?; let executable_path = extract_executable_from_script(path)?; let executable = executable_path @@ -100,7 +103,7 @@ impl ExposedData { let conda_meta = env_path.join("conda-meta"); - let bin_env_dir = EnvDir::new(env_root.clone(), env_name.clone()).await?; + let bin_env_dir = EnvDir::from_env_root(env_root.clone(), env_name.clone()).await?; let prefix = Prefix::new(bin_env_dir.path()); let (platform, channel, package) = @@ -160,7 +163,7 @@ fn determine_env_path(executable_path: &Path, env_root: &Path) -> miette::Result } miette::bail!( - "Couldn't determine environment path: no parent of '{}' has '{}' as its direct parent", + "Could not determine environment path: no parent of '{}' has '{}' as its direct parent", executable_path.display(), env_root.display() ) @@ -179,14 +182,17 @@ async fn package_from_conda_meta( let read_dir = tokio::fs::read_dir(conda_meta) .await .into_diagnostic() - .wrap_err_with(|| format!("Couldn't read directory {}", conda_meta.display()))?; + .wrap_err_with(|| format!("Could not read directory {}", conda_meta.display()))?; let mut entries = ReadDirStream::new(read_dir); while let Some(entry) = entries.next().await { let path = entry .into_diagnostic() .wrap_err_with(|| { - format!("Couldn't read file from directory {}", conda_meta.display()) + format!( + "Could not read file from directory {}", + conda_meta.display() + ) })? .path(); // Check if the entry is a file and has a .json extension @@ -225,7 +231,7 @@ async fn package_from_conda_meta( impl Project { /// Constructs a new instance from an internal manifest representation - fn from_manifest(manifest: Manifest) -> Self { + pub(crate) fn from_manifest(manifest: Manifest, env_root: EnvRoot, bin_dir: BinDir) -> Self { let root = manifest .path .parent() @@ -238,33 +244,39 @@ impl Project { root, manifest, config, + env_root, + bin_dir, } } /// Constructs a project from a manifest. - pub(crate) fn from_str(manifest_path: &Path, content: &str) -> miette::Result { + pub(crate) fn from_str( + manifest_path: &Path, + content: &str, + env_root: EnvRoot, + bin_dir: BinDir, + ) -> miette::Result { let manifest = Manifest::from_str(manifest_path, content)?; - Ok(Self::from_manifest(manifest)) + Ok(Self::from_manifest(manifest, env_root, bin_dir)) } /// Discovers the project manifest file in path at /// `~/.pixi/manifests/pixi-global.toml`. If the manifest doesn't exist /// yet, and the function will try to create one from the existing /// installation. If that one fails, an empty one will be created. - pub(crate) async fn discover_or_create( - bin_dir: &BinDir, - env_root: &EnvRoot, - assume_yes: bool, - ) -> miette::Result { + pub(crate) async fn discover_or_create(assume_yes: bool) -> miette::Result { let manifest_dir = Self::manifest_dir()?; tokio::fs::create_dir_all(&manifest_dir) .await .into_diagnostic() - .wrap_err_with(|| format!("Couldn't create directory {}", manifest_dir.display()))?; + .wrap_err_with(|| format!("Could not create directory {}", manifest_dir.display()))?; let manifest_path = manifest_dir.join(MANIFEST_DEFAULT_NAME); + let bin_dir = BinDir::from_env().await?; + let env_root = EnvRoot::from_env().await?; + if !manifest_path.exists() { let prompt = format!( "{} You don't have a global manifest yet.\n\ @@ -281,7 +293,7 @@ impl Project { .interact() .into_diagnostic()?) { - return Self::try_from_existing_installation(&manifest_path, bin_dir, env_root) + return Self::try_from_existing_installation(&manifest_path, env_root, bin_dir) .await .wrap_err_with(|| { "Failed to create global manifest from existing installation" @@ -291,16 +303,16 @@ impl Project { tokio::fs::File::create(&manifest_path) .await .into_diagnostic() - .wrap_err_with(|| format!("Couldn't create file {}", manifest_path.display()))?; + .wrap_err_with(|| format!("Could not create file {}", manifest_path.display()))?; } - Self::from_path(&manifest_path) + Self::from_path(&manifest_path, env_root, bin_dir) } async fn try_from_existing_installation( manifest_path: &Path, - bin_dir: &BinDir, - env_root: &EnvRoot, + env_root: EnvRoot, + bin_dir: BinDir, ) -> miette::Result { let futures = bin_dir .files() @@ -311,9 +323,9 @@ impl Project { Ok(false) => None, // Success and isn't text, filter out Err(e) => Some(Err(e)), // Failure, continue with error }) - .map(|result| async move { + .map(|result| async { match result { - Ok(path) => ExposedData::from_exposed_path(&path, env_root).await, + Ok(path) => ExposedData::from_exposed_path(&path, &env_root).await, Err(e) => Err(e), } }); @@ -325,7 +337,7 @@ impl Project { tokio::fs::write(&manifest_path, &toml) .await .into_diagnostic()?; - Self::from_str(manifest_path, &toml) + Self::from_str(manifest_path, &toml, env_root, bin_dir) } /// Get default dir for the pixi global manifest @@ -336,9 +348,13 @@ impl Project { } /// Loads a project from manifest file. - pub(crate) fn from_path(manifest_path: &Path) -> miette::Result { + pub(crate) fn from_path( + manifest_path: &Path, + env_root: EnvRoot, + bin_dir: BinDir, + ) -> miette::Result { let manifest = Manifest::from_path(manifest_path)?; - Ok(Project::from_manifest(manifest)) + Ok(Project::from_manifest(manifest, env_root, bin_dir)) } /// Merge config with existing config project @@ -351,8 +367,8 @@ impl Project { } /// Returns the environments in this project. - pub(crate) fn environments(&self) -> IndexMap { - self.manifest.parsed.environments() + pub(crate) fn environments(&self) -> &IndexMap { + &self.manifest.parsed.envs } } @@ -366,30 +382,36 @@ mod tests { const SIMPLE_MANIFEST: &str = r#" [envs.python] - channels = ["conda-forge"] + channels = ["dummy-channel"] [envs.python.dependencies] - python = "3.11.*" + dummy = "3.11.*" [envs.python.exposed] - python = "python" + dummy = "dummy" "#; - #[test] - fn test_project_from_str() { + #[tokio::test] + async fn test_project_from_str() { let manifest_path: PathBuf = FilePath().fake(); + let env_root = EnvRoot::from_env().await.unwrap(); + let bin_dir = BinDir::from_env().await.unwrap(); - let project = Project::from_str(&manifest_path, SIMPLE_MANIFEST).unwrap(); + let project = + Project::from_str(&manifest_path, SIMPLE_MANIFEST, env_root, bin_dir).unwrap(); assert_eq!(project.root, manifest_path.parent().unwrap()); } - #[test] - fn test_project_from_path() { + #[tokio::test] + async fn test_project_from_path() { let tempdir = tempfile::tempdir().unwrap(); let manifest_path = tempdir.path().join(MANIFEST_DEFAULT_NAME); + let env_root = EnvRoot::from_env().await.unwrap(); + let bin_dir = BinDir::from_env().await.unwrap(); + // Create and write global manifest let mut file = std::fs::File::create(&manifest_path).unwrap(); file.write_all(SIMPLE_MANIFEST.as_bytes()).unwrap(); - let project = Project::from_path(&manifest_path).unwrap(); + let project = Project::from_path(&manifest_path, env_root, bin_dir).unwrap(); // Canonicalize both paths let canonical_root = project.root.canonicalize().unwrap(); @@ -398,12 +420,15 @@ mod tests { assert_eq!(canonical_root, canonical_manifest_parent); } - #[test] - fn test_project_from_manifest() { + #[tokio::test] + async fn test_project_from_manifest() { let manifest_path: PathBuf = FilePath().fake(); + let env_root = EnvRoot::from_env().await.unwrap(); + let bin_dir = BinDir::from_env().await.unwrap(); + let manifest = Manifest::from_str(&manifest_path, SIMPLE_MANIFEST).unwrap(); - let project = Project::from_manifest(manifest); + let project = Project::from_manifest(manifest, env_root, bin_dir); assert_eq!(project.root, manifest_path.parent().unwrap()); } diff --git a/src/global/project/parsed_manifest.rs b/src/global/project/parsed_manifest.rs index 1f753e26e..bfe5b0de7 100644 --- a/src/global/project/parsed_manifest.rs +++ b/src/global/project/parsed_manifest.rs @@ -3,15 +3,16 @@ use std::str::FromStr; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; -use pixi_manifest::PrioritizedChannel; +use miette::Diagnostic; +use pixi_manifest::{PrioritizedChannel, TomlError}; use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform}; use serde::de::{Deserialize, Deserializer, Visitor}; use serde::Serialize; use serde_with::{serde_as, serde_derive::Deserialize}; +use thiserror::Error; use super::environment::EnvironmentName; -use super::error::ManifestError; use super::ExposedData; use pixi_spec::PixiSpec; @@ -19,7 +20,7 @@ use pixi_spec::PixiSpec; #[derive(Debug, Clone, Serialize)] pub struct ParsedManifest { /// The environments the project can create. - envs: IndexMap, + pub(crate) envs: IndexMap, } impl From for ParsedManifest @@ -52,12 +53,8 @@ where impl ParsedManifest { /// Parses a toml string into a project manifest. - pub(crate) fn from_toml_str(source: &str) -> Result { - toml_edit::de::from_str(source).map_err(ManifestError::from) - } - - pub(crate) fn environments(&self) -> IndexMap { - self.envs.clone() + pub(crate) fn from_toml_str(source: &str) -> Result { + toml_edit::de::from_str(source).map_err(TomlError::from) } } @@ -78,17 +75,17 @@ impl<'de> serde::Deserialize<'de> for ParsedManifest { let manifest = TomlManifest::deserialize(deserializer)?; // Check for duplicate keys in the exposed fields - let mut exposed_keys = IndexSet::new(); + let mut exposed_names = IndexSet::new(); let mut duplicates = IndexMap::new(); for key in manifest.envs.values().flat_map(|env| env.exposed.keys()) { - if !exposed_keys.insert(key) { + if !exposed_names.insert(key) { duplicates.entry(key).or_insert_with(Vec::new).push(key); } } if !duplicates.is_empty() { let duplicate_keys = duplicates.keys().map(|k| k.to_string()).collect_vec(); return Err(serde::de::Error::custom(format!( - "Duplicate exposed keys found: '{}'", + "Duplicate exposed names found: '{}'", duplicate_keys.join(", ") ))); } @@ -109,7 +106,8 @@ pub(crate) struct ParsedEnvironment { platform: Option, #[serde(default, deserialize_with = "pixi_manifest::deserialize_package_map")] pub(crate) dependencies: IndexMap, - pub(crate) exposed: IndexMap, + #[serde(default)] + pub(crate) exposed: IndexMap, } impl ParsedEnvironment { @@ -125,27 +123,27 @@ impl ParsedEnvironment { } #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)] -pub(crate) struct ExposedKey(String); +pub(crate) struct ExposedName(String); -impl fmt::Display for ExposedKey { +impl fmt::Display for ExposedName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } -impl FromStr for ExposedKey { +impl FromStr for ExposedName { type Err = miette::Report; fn from_str(value: &str) -> Result { if value == "pixi" { miette::bail!("The key 'pixi' is not allowed in the exposed map"); } else { - Ok(ExposedKey(value.to_string())) + Ok(ExposedName(value.to_string())) } } } -impl<'de> Deserialize<'de> for ExposedKey { +impl<'de> Deserialize<'de> for ExposedName { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, @@ -153,7 +151,7 @@ impl<'de> Deserialize<'de> for ExposedKey { struct ExposedKeyVisitor; impl<'de> Visitor<'de> for ExposedKeyVisitor { - type Value = ExposedKey; + type Value = ExposedName; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a string that is not 'pixi'") @@ -163,7 +161,7 @@ impl<'de> Deserialize<'de> for ExposedKey { where E: serde::de::Error, { - ExposedKey::from_str(value).map_err(serde::de::Error::custom) + ExposedName::from_str(value).map_err(serde::de::Error::custom) } } @@ -171,6 +169,13 @@ impl<'de> Deserialize<'de> for ExposedKey { } } +/// Represents an error that occurs when parsing an binary exposed name. +/// +/// This error is returned when a string fails to be parsed as an environment name. +#[derive(Debug, Clone, Error, Diagnostic, PartialEq)] +#[error("pixi is not allowed as exposed name in the map")] +pub struct ParseExposedKeyError {} + #[cfg(test)] mod tests { use insta::assert_snapshot; diff --git a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap index d2ebc752f..c73d2bb8a 100644 --- a/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap +++ b/src/global/project/snapshots/pixi__global__project__parsed_manifest__tests__duplicate_exposed.snap @@ -2,4 +2,4 @@ source: src/global/project/parsed_manifest.rs expression: manifest.unwrap_err() --- -Duplicate exposed keys found: 'python, python3' +Duplicate exposed names found: 'python, python3' diff --git a/src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap b/src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap new file mode 100644 index 000000000..c345f0cd9 --- /dev/null +++ b/src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap @@ -0,0 +1,12 @@ +--- +source: src/global/expose.rs +expression: project.manifest.document.to_string() +--- +[envs.python-3-10] +channels = ["conda-forge"] +[envs.python-3-10.dependencies] +python = "3.10" +[envs.python-3-10.exposed] +python = "python" +python3 = "python" +atuin = "atuin" diff --git a/src/prefix.rs b/src/prefix.rs index f626d373f..771ddf9f4 100644 --- a/src/prefix.rs +++ b/src/prefix.rs @@ -1,5 +1,6 @@ use std::{ collections::HashMap, + ffi::OsStr, path::{Path, PathBuf}, }; @@ -105,4 +106,56 @@ impl Prefix { Ok(result) } + + /// Processes prefix records (that you can get by using `find_installed_packages`) + /// to filter and collect executable files. + pub fn find_executables(&self, prefix_packages: &[PrefixRecord]) -> Vec<(String, PathBuf)> { + prefix_packages + .iter() + .flat_map(|record| { + record + .files + .iter() + .filter(|relative_path| self.is_executable(relative_path)) + .filter_map(|path| { + path.file_stem() + .and_then(OsStr::to_str) + .map(|name| (name.to_string(), path.clone())) + }) + }) + .collect() + } + + /// Checks if the given relative path points to an executable file. + pub(crate) fn is_executable(&self, relative_path: &Path) -> bool { + // Check if the file is in a known executable directory. + let binary_folders = if cfg!(windows) { + &([ + "", + "Library/mingw-w64/bin/", + "Library/usr/bin/", + "Library/bin/", + "Scripts/", + "bin/", + ][..]) + } else { + &(["bin"][..]) + }; + + let parent_folder = match relative_path.parent() { + Some(dir) => dir, + None => return false, + }; + + if !binary_folders + .iter() + .any(|bin_path| Path::new(bin_path) == parent_folder) + { + return false; + } + + // Check if the file is executable + let absolute_path = self.root().join(relative_path); + is_executable::is_executable(absolute_path) + } } diff --git a/tests/integration/common.py b/tests/integration/common.py index 31d00b3e4..8351246fe 100644 --- a/tests/integration/common.py +++ b/tests/integration/common.py @@ -10,7 +10,6 @@ class ExitCode(IntEnum): SUCCESS = 0 FAILURE = 1 INCORRECT_USAGE = 2 - LIFE = 42 def verify_cli_command( diff --git a/tests/integration/test_data/dummy_channel_a/output/noarch/dummy-a-0.1.0-h4616a5c_0.conda b/tests/integration/test_data/dummy_channel_a/output/noarch/dummy-a-0.1.0-h4616a5c_0.conda new file mode 100644 index 0000000000000000000000000000000000000000..3a1003351b3c9704ecaa0525c5662fbe58d2cef7 GIT binary patch literal 2834 zcma);XHXO97KRf7gc3Rk0ffLNEK)-6pp*a`YN!fI2~Fvp4IqRj&Ct1sbP$xHgd)8! z(z`STDS}JAN(oXV!o|Hm?yNI+X3sb8ob%^>=bRtk%!APfQPKf^RFAfz)d;{D$#*sX z5W|%`odd8=*Z?fj&EMNARM64e%L!}e<9^@H#oNyl8(xZ^z;lC$BIiLrI3>14`k(~lu% z&E#|eiFasNL~1+|6=+NzTjqwCGa17ijH{lFsZc}_NN;!C8{0z9&MGf2GA}Z}c=XZ& zfK)(6AapS$LO{yJHu{$wnohs&ZZT3=~K=!p3={GiS*Gnl(PxUT=HSfOiMKH-s!Ud0e{+`jEPt>$2s(2ow0 z`asU}z9HR!WMyvnvKX9*V`ih$o+QBpX)0g3)>YDKVwjFp<6SHHP;(zNrp>O~>SjQ} z&Ukf|2_SNni8Cs=#~9=ZYV>)S-7C#2+$U;n?(cNxc$@kumDai!mLK7&ZfIJW za!ER0O5cO3w3GH2GrKtm%9;FZ90nIU`I6!)S&A#a?y2v^Wk|@IfyWD51CGlSM=9#0 z#G)4k>FH%G67+Ivhs4}gxL-X)7-q6-AiRa{YTe=6Nq`SR4Q)DchBKyzi{pVq0`QSK z7=2{6(3N$QKc~MDoZSOGc9t_$?7yowHtsq#U89uqX9vDAdMGRsgLa$l2`)M2BWQH zQOc5Qa_U8&r&gVh&sBfhP?H^r$MI71iyl}BS>oeTz)su ztWS&=it3r0gwdy^tV`zlj^#RbIz;6zDe(vP@2xF$92#wIe@PHk#ziSR;Jw`MFqb%I zol6gbz4%e*g0?3@LU=zOH>ypVc#YRajy3Biv?6o3_F{DS{`ZTvHxc-z& zvby+sH+LOlIvkzCp_Zh%>ou`Qj=Ud7t+e#~#s)v>*(FO&u#6Dr@WbKUtohv=%_p1k z22d1BnYu}8iCf8L<#YXyz!$>BI99SgEs_|PSkZ=43uu??^yoNUf7N!{@JYe$9x>n} z&kLq-iLdES?J7+tWYbSmD77^B`-77dB}i|Xt%3>LL#wP8S6|8YYb$)PZj^LW)}+_Y z@7J}M5UDf^6Yf_uz6<*n>^TFew`373~QlHH(Ja=fXpdvHzZ(xyphH|5*QbXjuk zGMS@0YN)KU*=&sTq=VI`oK1uRPFc>v+Mt0rICQ}+uRIVxV-T(Iz7zof9-CX3njiLu zTSqRG_vD+#x}Z>|simomSY`*9*|1XO4zL6#HmSj7z=DEE$1rQs2vBHJqF^W#3eL=O z!wpQauKI)stz>;=}BD)P| z>r7klo4pn>l}>adtl1XF9L^zBpHxdz$W~m)z?$WCvE;*_djstV=|@1X517d zoX2}-4nAqRO!-s?fk=Mab8SoL3tOi!eS@X1H$RiJ#vAsiL?X*enXa5EBI4doN%?Sj ziG)n<*zd*cet()UtNroZ(T-(}GQ1;foCJQXIauhIM%o8h=sl{-;iP^Id@vua6BlMZ z?4FyI!J_oL&Zad-Jo`~Ta<6XM*x(Nr1>Q2eGW)&=!BLLBW+^f{wKK@)dja&)FBZxJ ziyPzX9)vt_jwn?O7kl>}q?oQ0m$YAR$$ca*Q4kZloj+SlsrqXJRwq0vOYLGF3IOyP(%*u^7x?mVciiH-x>N89BJW2$+ZvwvMfWrdQrEY zQyAql&ZU27!bH+i8sIBj`z5O_nq5q3f_K>P3}Z7_czHdZh*9afVDYr2l(MA#czgK_ zHxL1uv1hQiee7#M*)v09h1!uBm`&G;{+11FT6p^6G(&Kxsv-gtv%#1sE>f0c!F0W+ zIv|yVf3n(Bv(}eD@MA;7${mXT{-Nh6mG1KCu*fIBq}wJ>iylwz+&KHU?Ujn&l^vCE{qEFOLhD?O6! zwxLeR888V4_j2|42IL<>=xVqvURRp63-ar2$e+Y&=Y}Ha_4+AfPb{}W&85$wJXSdO zi~GOKiY+d904?qcoLTz9GJXjXPCv%SRkV@lQFOAjv`nK+-yQjc-Bi%B?f(2g$ zaw^U3la%%n;G{baQO>6Jn>Iwb2CHzp_g%!P4f`A5ZyQ)-6w0#UW^*X~>oV2>l%BB< zDaUiIw!~}Fonf+#JoXlWvL6v^E6!t?Hsy?5+v`%?d1Rv&l+!KG8<6 zYpOd$sIJ}hsnM-x<>kD{hf{$W_k`OvBGf-X3+OSZTiFyCeINxL=>OB<6*v7=rIA0{ z-&pZy>(4y+Un2le8i~Fd{|O!bocJ@r{V#F*YWzn$hta2^{$T@Mz16GE``4-e0d>a2 AjQ{`u literal 0 HcmV?d00001 diff --git a/tests/integration/test_data/dummy_channel_a/output/noarch/dummy-b-0.1.0-h4616a5c_0.conda b/tests/integration/test_data/dummy_channel_a/output/noarch/dummy-b-0.1.0-h4616a5c_0.conda new file mode 100644 index 0000000000000000000000000000000000000000..35ea0ce4b505644da20eb5b5b4defe51e3fd52c3 GIT binary patch literal 5807 zcma)gWl$9S*Y+08(jl^R_p&rB(n!kEA>9ok;Ub+&mvo4Pf&$)vl!PGNNH<6~A_yq; z;QM}f=6}yTGyn6OYtH$dnd_P}bH1FpQ0ia^KHzV3D2W>n0_^zd@9V#S-owq#+sf9; z+lt@$xrh5pW*ZN8TPq7s7Y7S_4=*<>Zwp^Lujfu4?#z;iK-7N-g(OytnBNQh1?xTT zIefN0Zf1J0U~ z?k1sRD1ay~XYyscX0H+zAwGbe9f19x^NqRA-q zjpX)Qx6THH-_7);UDWWmyd_7xlIOsiJn!KR;ioi<9HUgz~@zben(W@ zSf#yxaq)*C_!&BHLE2d0zQa;hZ!ooIH5hkX$uv#*bW^!<)@`cCJY8dLu3~%bzQt4N z)iBFr_(>A_taRSgMeMUuzL!$v)5)r`8RB&@_sr9pdda$S`V#H({L0u(%l!Dbg!F`E z(}0EZr4@^(9NRaoaX03p2?3*b%SPm<&&@?MZUj#9!tcyQb2n!l7T}U+r-1ge*#F(pbj)6#jJRS@^(>SoZs)lCGJc2cB0+IpT`uE+k{GdMH4*V+uZw~@CJY&~3(gATa3X4t+d2yV5fAHDb(EGY2J6)75gcR8JOb&)vy*-ZpFfBDmXTEp&=XJmMp zT{i}K_pyMu^D-#y7Za2&@#?8*{Z%sqBB{^zr_uCtsiBqcj5#E(j@On&vrpQ7HNOb< zLAVRc|MpsmRv#N<61e>w6n(;^NiZ5F+`1y|K|~)uV3$D1yq`0t$DONTM`udEzb;s& zhe(pkA9~GsZeMdDn%dz{@AK7wEULn2kL#)0cZd{@=|TLMw3@rp(=c{D##1R@@#Y#i zV;$}Zh4<9YqH)w8P)P`#x4XTx^!$w{)UHPDr!~Z*JQEwOMxlyVR!WYbE>hu&yw=2% zP!9<-{xGq#aIo&vyaviiDbjLJ-yIfQ#{sS&lx%-dt-J9LX=iBWl5=@fnQSeDD}ntJ z<0-HOw}gCyKDo7wn6$SPd{yZm1B=L;gvBB$-NwI*l!oD9Npe(lAx}=^l*Q%>JH|#L z^`$xVdhl?{8W0=t-!hzrfC>WY!~n?=I>RoTWIovsA44VqIwi&C2Oh3V3SdvOQMt7l78`ejP_gW#>b@)rn)h zRiL(v!Gx1srr!6e)RFNY<<>ta)8}u|3KDwdNtigTdygw%8xLjiX~yosu~HFV%JFv? zXP?^yB=JrQdFidDYPs7i2k@qtm(RBB{ASl4w~oyKzrrgOR~d1#T=yL%B;m#`2{+6x zm!({#Y*2BKv;`e#lKx}{{n|fM!s?i!!Yf=G%rk~z3DKRKGH{8SBDB<0YB)G9EotaK zaSFpVSgCsR({$6Yb3-a~`QEX(=&=}lj+n9o8ak6w5=eh_PS33`dRzApg56fhFP?J+ z4A043(oNPuX`_YGQK*m2D1YZf4-m-L$Q}JEsbJ{!E={4(m}^JziF>%eniQJqSY>7AwMQW zzH6XO*ns$%W@!~L&u+%?w1|p;sSelKF_*P`~$q$=v5T6;zPWw0& zFinQRIe*K{IdrY^aAmEht96&gQte4X5}zt0Ys3DNqYmRAI>nTJ`7x2QagZE{8%er1 zJ(-m=j|g#06it}SC8Y|{tIG^=%cXCb;;Q6LFh~eey1Qby7IK`66f%!@UCBlcHyZa)GS+zNLwB z>OJAf7_riGq|)azG|)ezfa6cdt!>XK#v3h%0hao?x8Gzx5m!JyaO{VbGj(x_gwP1! zCM%MR|&8Q4cO+BXxjJ$5sVja zNZ6~t%yeg?eeCM^PWMNSsES!0jEdDD;G}cPw@Emx0~)%)3QGl%c6x8+bp-v)K7G(2L@_AaDcs~1+d%B?&0|w$Ro})B@1OduBol`zIV+%N+>1{&* z5-XZhwlpOo#xX8MAZov^2oUBdL9&|w3Gi(uz_kOswA=csVw}x*5JG3c%^2bs)!`%O z|M}!s42uug<$VwEkYVK&53dD`{DGmkc%eMFbrIk@0sP#?qj7L%N9tm1y<0tEm^vhz z#)H`T^U{-F$43?gHdA@Eh?Y9B2P;y+CeHc?S~?#qb<&GV1;uL%DueXLhwyX@Q(DWA zP?e(s0!vKw<%B}<@i*sAFRu@3-%&Zm?VIu`?7et_sCY?~r40_I(NP(r648vGS51hN zIl4ZXdlqIA??v-2EG=P9}M<=L9oNdB}dtJXkD?u8p};Pm*vhgDP7|48}_eq2m&iZvme$M zgPbf)`bCA=WI6X>;qhcVqY5}OvR@K&a51)gCy8U_KL?evXzkzDvyuzD2GEhvg>e%Y zP(%7{5oksJBrQZ!L+_XE;TL9N$JT;X!B;$OzhJzkvB7iH3juQ#qgS`5^)nhpRk;Ip zwLyruK9*PI5t2!!Y^j$>vh+TdyH0ELWqojV*5W#<_EGe_@Y%LhQ?Cr&mdDM=MRR;q zPULk#ZvQQR$s23`wu_d)q~HhJX{wuE_t(zh@yeay#j+T*HAXmv8b7jkty)FlO}`1U zZ8%otX|^|W_9zFr414}SaoX=+{Q0Z2Xb8hI*g?zwc6AKL>k-xmrt8C-Cs&Y{x&0p@ z>wO-9GOw5KZ%Y?@vM;Se_GaPf%iBvxMCRyLW)spsf8>j4P_L!MO{UCU_mLnBnq843 z(sb=d;1fkfv009745J~#$MUg@^v8t{i<128lFcw#PUn_pD`qZd3?bu1hJ4*DF$H}L z&k<7}e3CwJ;c@_IMzHS4iM*(D&~%=$(xjg!CkUkcfe}>XTGar7m=^x z@~;e7c(6mkZ$-Zu#hBz8KJJUk{TW`p1Uc>55AJ+~Y4 zQMx8H!bI@>WOg!DbQcFsBGa+71E^ex8#or3RnNJMrVpcV1nuiSEsVo#9YSIie}!N% zx#MF~x2@9E`L1?BMU+I!s`5%C%e=5NhE*DjmxthQ1vMLd{%ep&5#zjOx{jyt%|l znQ!pM)21|ctUrMu4;b7D+8a6VU~h`!xyV#XI+xqK>$dDW0Yu4G)=Fokd8`EBiN!|6H%Y24)IpVG?`V__5(sYTsdV@d0eI_ z5(SJbh%`l3QBl!$4+h%Ns$9O~B%bc-!Jef%>HswT05V$+rOtpc(Z3;Yh$PDVY{9ZP zKqs)#`eIn;VfW+mEDf?J(IooT@4DcVZ4U_!`5kC%3u67m5i?YQ83>JJjgGgjbq)9f2{Nqq zT@=_;u`EX4mFl(yaC_K^Bw>++7Qm%%~^qY)9j?Gs=9QQ}2ccHJ_@R(tXvau^}`NN;gMP_TXlc zd$15Uhh*pEr%%04(B@5X-{M2zg5|b(mhT=x&s_h^ExAI&9@F&G;r0+kdoxK60IqLy zW>1{GBH1qwxUMfK*g1RGfTw7M(##ImpXRR{o<5P>Gp5*ig(bi_<}wh&8btQt2*^72 zEaf_9qVt<9Q-D`4(CtCjY&!sp^fDv*A=5GA82#!f!&uZ5(HA@-Yr+>F4F+qcX3qB} z8iE(nOhtvl0ALtRFZOD+*N|Js&w!+@yra$#n}-Es=MCQYqI~a6_;N>_Wx8XuNh2au z%s0s8FOX|WiVrvg9UpSMIj)ic``z;#*i)oh+rxk1~PK zc0a4a;L}ifSF@wAFpRyWr{_Mq;%+U@tm1Ye+W3r&yoQKG6`8i-gjw$vZ>m{*u4AgK zZO7HY%gAaJ?0gzxK`zD9xPzrVCn?BqmhgG9S(|y4aRG}r_x-dy9JKtlB{QQ8x zbsPuvrJMq{1_efWaIGGtjYvI;g(i2XGr&vXvhzwCKS-9x>+7Xfy+9Q8;E27=rfVKI zqeTpMnH|@Xs~2IXl;hg3;W9!-ynjmZmpAGqaQc3^u-px^gieC{7IEcMSj6vYDZ;@N z?%9cL^wHt#Z8cr0vYws=COK?D%aTDE1C~R_fOtY&N2eMfd?7Z_zW#C`<$1chH|S8Tca86B89J>ltDEi5Nb zg8?x>=coo*M#N_YR}P<=%zx $PREFIX/bin/dummy-a.com + - chmod +x $PREFIX/bin/dummy-a.com + + - package: + name: dummy-b + version: 0.1.0 + + build: + noarch: generic + script: + - mkdir -p $PREFIX/bin + - echo "dummy-b" > $PREFIX/bin/dummy-b.com + - chmod +x $PREFIX/bin/dummy-b.com diff --git a/tests/integration/test_global.py b/tests/integration/test_global.py index a3ba059ab..ae81ab62e 100644 --- a/tests/integration/test_global.py +++ b/tests/integration/test_global.py @@ -5,6 +5,13 @@ import platform +def exe_extension(exe_name: str) -> str: + if platform.system() == "Windows": + return exe_name + ".bat" + else: + return exe_name + + def test_global_sync_dependencies(pixi: Path, tmp_path: Path) -> None: env = {"PIXI_HOME": str(tmp_path)} manifests = tmp_path.joinpath("manifests") @@ -21,8 +28,7 @@ def test_global_sync_dependencies(pixi: Path, tmp_path: Path) -> None: """ parsed_toml = tomllib.loads(toml) manifest.write_text(toml) - exposed_exec = "python-injected.bat" if platform.system() == "Windows" else "python-injected" - python_injected = tmp_path / "bin" / exposed_exec + python_injected = tmp_path / "bin" / exe_extension("python-injected") # Test basic commands verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) @@ -83,101 +89,92 @@ def test_global_sync_platform(pixi: Path, tmp_path: Path) -> None: ) -def test_global_sync_change_expose(pixi: Path, tmp_path: Path) -> None: +def test_global_sync_change_expose(pixi: Path, tmp_path: Path, test_data: Path) -> None: env = {"PIXI_HOME": str(tmp_path)} manifests = tmp_path.joinpath("manifests") manifests.mkdir() manifest = manifests.joinpath("pixi-global.toml") - toml = """ + dummy_channel = test_data.joinpath("dummy_channel_a/output").as_uri() + toml = f""" [envs.test] - channels = ["conda-forge"] + channels = ["{dummy_channel}"] [envs.test.dependencies] - python = "3.12" + dummy-a = "*" [envs.test.exposed] - "python-injected" = "python" + "dummy-a" = "dummy-a" """ parsed_toml = tomllib.loads(toml) manifest.write_text(toml) - exposed_exec = "python-injected.bat" if platform.system() == "Windows" else "python-injected" - python_injected = tmp_path / "bin" / exposed_exec + dummy_a = tmp_path / "bin" / exe_extension("dummy-a") # Test basic commands verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) - verify_cli_command( - [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" - ) - verify_cli_command([python_injected], ExitCode.SUCCESS, env=env) + assert dummy_a.is_file() # Add another expose - python_in_disguise_str = ( - "python-in-disguise.bat" if platform.system() == "Windows" else "python-in-disguise" - ) - python_in_disguise = tmp_path / "bin" / python_in_disguise_str - parsed_toml["envs"]["test"]["exposed"][python_in_disguise_str] = "python" + dummy_in_disguise_str = exe_extension("dummy-in-disguise") + dummy_in_disguise = tmp_path / "bin" / dummy_in_disguise_str + parsed_toml["envs"]["test"]["exposed"][dummy_in_disguise_str] = "dummy-a" manifest.write_text(tomli_w.dumps(parsed_toml)) verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) - verify_cli_command([python_in_disguise, "--version"], ExitCode.SUCCESS, env=env) + assert dummy_in_disguise.is_file() # Remove expose again - del parsed_toml["envs"]["test"]["exposed"][python_in_disguise_str] + del parsed_toml["envs"]["test"]["exposed"][dummy_in_disguise_str] manifest.write_text(tomli_w.dumps(parsed_toml)) verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) - assert not python_in_disguise.is_file() + assert not dummy_in_disguise.is_file() -def test_global_sync_manually_remove_binary(pixi: Path, tmp_path: Path) -> None: +def test_global_sync_manually_remove_binary(pixi: Path, tmp_path: Path, test_data: Path) -> None: env = {"PIXI_HOME": str(tmp_path)} manifests = tmp_path.joinpath("manifests") manifests.mkdir() manifest = manifests.joinpath("pixi-global.toml") - toml = """ + dummy_channel = test_data.joinpath("dummy_channel_a/output").as_uri() + toml = f""" [envs.test] - channels = ["conda-forge"] + channels = ["{dummy_channel}"] [envs.test.dependencies] - python = "3.12" + dummy-a = "*" [envs.test.exposed] - "python-injected" = "python" + "dummy-a" = "dummy-a" """ manifest.write_text(toml) - exposed_exec = "python-injected.bat" if platform.system() == "Windows" else "python-injected" - python_injected = tmp_path / "bin" / exposed_exec + dummy_a = tmp_path / "bin" / exe_extension("dummy-a") # Test basic commands verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) - verify_cli_command( - [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" - ) - verify_cli_command([python_injected], ExitCode.SUCCESS, env=env) + assert dummy_a.is_file() # Remove binary manually - python_injected.unlink() + dummy_a.unlink() # Binary is added again verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) - verify_cli_command( - [python_injected, "--version"], ExitCode.SUCCESS, env=env, stdout_contains="3.12" - ) + assert dummy_a.is_file() -def test_global_sync_migrate(pixi: Path, tmp_path: Path) -> None: +def test_global_sync_migrate(pixi: Path, tmp_path: Path, test_data: Path) -> None: env = {"PIXI_HOME": str(tmp_path)} manifests = tmp_path.joinpath("manifests") manifests.mkdir() manifest = manifests.joinpath("pixi-global.toml") - toml = """ + dummy_channel = test_data.joinpath("dummy_channel_a/output").as_uri() + toml = f""" [envs.test] - channels = ["https://conda.anaconda.org/conda-forge"] + channels = ["{dummy_channel}"] [envs.test.dependencies] - ripgrep = "*" - python = "*" + dummy-a = "*" + dummy-b = "*" [envs.test.exposed] - rg = "rg" - grep = "rg" - python = "python" - python3 = "python" + dummy-1 = "dummy-a" + dummy-2 = "dummy-a" + dummy-3 = "dummy-b" + dummy-4 = "dummy-b" """ manifest.write_text(toml) verify_cli_command([pixi, "global", "sync"], ExitCode.SUCCESS, env=env) @@ -188,3 +185,105 @@ def test_global_sync_migrate(pixi: Path, tmp_path: Path) -> None: verify_cli_command([pixi, "global", "sync", "--assume-yes"], ExitCode.SUCCESS, env=env) migrated_manifest = tomllib.loads(manifest.read_text()) assert original_manifest == migrated_manifest + + +def test_global_expose_basic(pixi: Path, tmp_path: Path, test_data: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + dummy_channel = test_data.joinpath("dummy_channel_a/output").as_uri() + toml = f""" + [envs.test] + channels = ["{dummy_channel}"] + [envs.test.dependencies] + dummy-a = "*" + """ + manifest.write_text(toml) + dummy1 = tmp_path / "bin" / exe_extension("dummy1") + dummy3 = tmp_path / "bin" / exe_extension("dummy3") + + # Add dummy1 + verify_cli_command( + [pixi, "global", "expose", "add", "--environment=test", "dummy1=dummy-a"], + ExitCode.SUCCESS, + env=env, + ) + assert dummy1.is_file() + + # Add dummy3 + verify_cli_command( + [pixi, "global", "expose", "add", "--environment=test", "dummy3=dummy-a"], + ExitCode.SUCCESS, + env=env, + ) + assert dummy3.is_file() + + # Remove dummy1 + verify_cli_command( + [pixi, "global", "expose", "remove", "--environment=test", "dummy1"], + ExitCode.SUCCESS, + env=env, + ) + assert not dummy1.is_file() + + # Attempt to remove python2 + verify_cli_command( + [pixi, "global", "expose", "remove", "--environment=test", "dummy2"], + ExitCode.FAILURE, + env=env, + stderr_contains="The exposed name dummy2 doesn't exist", + ) + + +def test_global_expose_revert_working(pixi: Path, tmp_path: Path, test_data: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + dummy_channel = test_data.joinpath("dummy_channel_a/output").as_uri() + original_toml = f""" + [envs.test] + channels = ["{dummy_channel}"] + [envs.test.dependencies] + dummy-a = "*" + """ + manifest.write_text(original_toml) + + # Attempt to add executable dummy-b that is not in our dependencies + verify_cli_command( + [pixi, "global", "expose", "add", "--environment=test", "dummy-b=dummy-b"], + ExitCode.FAILURE, + env=env, + stderr_contains="Could not find dummy-b in test", + ) + + # The TOML has been reverted to the original state + assert original_toml == manifest.read_text() + + +def test_global_expose_revert_failure(pixi: Path, tmp_path: Path, test_data: Path) -> None: + env = {"PIXI_HOME": str(tmp_path)} + manifests = tmp_path.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + dummy_channel = test_data.joinpath("dummy_channel_a/output").as_uri() + original_toml = f""" + [envs.test] + channels = ["{dummy_channel}"] + [envs.test.dependencies] + dummy-a = "*" + [envs.test.exposed] + dummy1 = "dummy-b" + """ + manifest.write_text(original_toml) + + # Attempt to add executable dummy-b that isn't in our dependencies + # It should fail since the original manifest contains "dummy-b", + # which is not in our dependencies + verify_cli_command( + [pixi, "global", "expose", "add", "--environment=test", "dummy2=dummyb"], + ExitCode.FAILURE, + env=env, + stderr_contains="Could not add exposed mappings. Reverting also failed", + )