diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock index 2320afeecb..8e27a1655b 100644 --- a/codex-rs/Cargo.lock +++ b/codex-rs/Cargo.lock @@ -1281,6 +1281,7 @@ dependencies = [ "serde_json", "shlex", "starlark", + "tempfile", "thiserror 2.0.17", ] diff --git a/codex-rs/execpolicy/Cargo.toml b/codex-rs/execpolicy/Cargo.toml index 77278bb118..890c23faa7 100644 --- a/codex-rs/execpolicy/Cargo.toml +++ b/codex-rs/execpolicy/Cargo.toml @@ -28,3 +28,4 @@ thiserror = { workspace = true } [dev-dependencies] pretty_assertions = { workspace = true } +tempfile = { workspace = true } diff --git a/codex-rs/execpolicy/src/amend.rs b/codex-rs/execpolicy/src/amend.rs new file mode 100644 index 0000000000..36a02cebf9 --- /dev/null +++ b/codex-rs/execpolicy/src/amend.rs @@ -0,0 +1,226 @@ +use std::fs::OpenOptions; +use std::io::Read; +use std::io::Seek; +use std::io::SeekFrom; +use std::io::Write; +use std::path::Path; +use std::path::PathBuf; + +use serde_json; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum AmendError { + #[error("prefix rule requires at least one token")] + EmptyPrefix, + #[error("policy path has no parent: {path}")] + MissingParent { path: PathBuf }, + #[error("failed to create policy directory {dir}: {source}")] + CreatePolicyDir { + dir: PathBuf, + source: std::io::Error, + }, + #[error("failed to format prefix tokens: {source}")] + SerializePrefix { source: serde_json::Error }, + #[error("failed to open policy file {path}: {source}")] + OpenPolicyFile { + path: PathBuf, + source: std::io::Error, + }, + #[error("failed to write to policy file {path}: {source}")] + WritePolicyFile { + path: PathBuf, + source: std::io::Error, + }, + #[error("failed to lock policy file {path}: {source}")] + LockPolicyFile { + path: PathBuf, + source: std::io::Error, + }, + #[error("failed to seek policy file {path}: {source}")] + SeekPolicyFile { + path: PathBuf, + source: std::io::Error, + }, + #[error("failed to read policy file {path}: {source}")] + ReadPolicyFile { + path: PathBuf, + source: std::io::Error, + }, + #[error("failed to read metadata for policy file {path}: {source}")] + PolicyMetadata { + path: PathBuf, + source: std::io::Error, + }, +} + +/// Note this thread uses advisory file locking and performs blocking I/O, so it should be used with +/// [`tokio::task::spawn_blocking`] when called from an async context. +pub fn blocking_append_allow_prefix_rule( + policy_path: &Path, + prefix: &[String], +) -> Result<(), AmendError> { + if prefix.is_empty() { + return Err(AmendError::EmptyPrefix); + } + + let tokens = prefix + .iter() + .map(serde_json::to_string) + .collect::, _>>() + .map_err(|source| AmendError::SerializePrefix { source })?; + let pattern = format!("[{}]", tokens.join(", ")); + let rule = format!(r#"prefix_rule(pattern={pattern}, decision="allow")"#); + + let dir = policy_path + .parent() + .ok_or_else(|| AmendError::MissingParent { + path: policy_path.to_path_buf(), + })?; + match std::fs::create_dir(dir) { + Ok(()) => {} + Err(ref source) if source.kind() == std::io::ErrorKind::AlreadyExists => {} + Err(source) => { + return Err(AmendError::CreatePolicyDir { + dir: dir.to_path_buf(), + source, + }); + } + } + append_locked_line(policy_path, &rule) +} + +fn append_locked_line(policy_path: &Path, line: &str) -> Result<(), AmendError> { + let mut file = OpenOptions::new() + .create(true) + .read(true) + .append(true) + .open(policy_path) + .map_err(|source| AmendError::OpenPolicyFile { + path: policy_path.to_path_buf(), + source, + })?; + file.lock().map_err(|source| AmendError::LockPolicyFile { + path: policy_path.to_path_buf(), + source, + })?; + + let len = file + .metadata() + .map_err(|source| AmendError::PolicyMetadata { + path: policy_path.to_path_buf(), + source, + })? + .len(); + + // Ensure file ends in a newline before appending. + if len > 0 { + file.seek(SeekFrom::End(-1)) + .map_err(|source| AmendError::SeekPolicyFile { + path: policy_path.to_path_buf(), + source, + })?; + let mut last = [0; 1]; + file.read_exact(&mut last) + .map_err(|source| AmendError::ReadPolicyFile { + path: policy_path.to_path_buf(), + source, + })?; + + if last[0] != b'\n' { + file.write_all(b"\n") + .map_err(|source| AmendError::WritePolicyFile { + path: policy_path.to_path_buf(), + source, + })?; + } + } + + file.write_all(format!("{line}\n").as_bytes()) + .map_err(|source| AmendError::WritePolicyFile { + path: policy_path.to_path_buf(), + source, + })?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + use tempfile::tempdir; + + #[test] + fn appends_rule_and_creates_directories() { + let tmp = tempdir().expect("create temp dir"); + let policy_path = tmp.path().join("policy").join("default.codexpolicy"); + + blocking_append_allow_prefix_rule( + &policy_path, + &[String::from("echo"), String::from("Hello, world!")], + ) + .expect("append rule"); + + let contents = + std::fs::read_to_string(&policy_path).expect("default.codexpolicy should exist"); + assert_eq!( + contents, + r#"prefix_rule(pattern=["echo", "Hello, world!"], decision="allow") +"# + ); + } + + #[test] + fn appends_rule_without_duplicate_newline() { + let tmp = tempdir().expect("create temp dir"); + let policy_path = tmp.path().join("policy").join("default.codexpolicy"); + std::fs::create_dir_all(policy_path.parent().unwrap()).expect("create policy dir"); + std::fs::write( + &policy_path, + r#"prefix_rule(pattern=["ls"], decision="allow") +"#, + ) + .expect("write seed rule"); + + blocking_append_allow_prefix_rule( + &policy_path, + &[String::from("echo"), String::from("Hello, world!")], + ) + .expect("append rule"); + + let contents = std::fs::read_to_string(&policy_path).expect("read policy"); + assert_eq!( + contents, + r#"prefix_rule(pattern=["ls"], decision="allow") +prefix_rule(pattern=["echo", "Hello, world!"], decision="allow") +"# + ); + } + + #[test] + fn inserts_newline_when_missing_before_append() { + let tmp = tempdir().expect("create temp dir"); + let policy_path = tmp.path().join("policy").join("default.codexpolicy"); + std::fs::create_dir_all(policy_path.parent().unwrap()).expect("create policy dir"); + std::fs::write( + &policy_path, + r#"prefix_rule(pattern=["ls"], decision="allow")"#, + ) + .expect("write seed rule without newline"); + + blocking_append_allow_prefix_rule( + &policy_path, + &[String::from("echo"), String::from("Hello, world!")], + ) + .expect("append rule"); + + let contents = std::fs::read_to_string(&policy_path).expect("read policy"); + assert_eq!( + contents, + r#"prefix_rule(pattern=["ls"], decision="allow") +prefix_rule(pattern=["echo", "Hello, world!"], decision="allow") +"# + ); + } +} diff --git a/codex-rs/execpolicy/src/lib.rs b/codex-rs/execpolicy/src/lib.rs index b459caea16..31062f1cb6 100644 --- a/codex-rs/execpolicy/src/lib.rs +++ b/codex-rs/execpolicy/src/lib.rs @@ -1,3 +1,4 @@ +pub mod amend; pub mod decision; pub mod error; pub mod execpolicycheck; @@ -5,6 +6,8 @@ pub mod parser; pub mod policy; pub mod rule; +pub use amend::AmendError; +pub use amend::blocking_append_allow_prefix_rule; pub use decision::Decision; pub use error::Error; pub use error::Result; diff --git a/codex-rs/execpolicy/src/policy.rs b/codex-rs/execpolicy/src/policy.rs index e048fce1ff..10858c9fad 100644 --- a/codex-rs/execpolicy/src/policy.rs +++ b/codex-rs/execpolicy/src/policy.rs @@ -1,9 +1,15 @@ use crate::decision::Decision; +use crate::error::Error; +use crate::error::Result; +use crate::rule::PatternToken; +use crate::rule::PrefixPattern; +use crate::rule::PrefixRule; use crate::rule::RuleMatch; use crate::rule::RuleRef; use multimap::MultiMap; use serde::Deserialize; use serde::Serialize; +use std::sync::Arc; #[derive(Clone, Debug)] pub struct Policy { @@ -23,6 +29,27 @@ impl Policy { &self.rules_by_program } + pub fn add_prefix_rule(&mut self, prefix: &[String], decision: Decision) -> Result<()> { + let (first_token, rest) = prefix + .split_first() + .ok_or_else(|| Error::InvalidPattern("prefix cannot be empty".to_string()))?; + + let rule: RuleRef = Arc::new(PrefixRule { + pattern: PrefixPattern { + first: Arc::from(first_token.as_str()), + rest: rest + .iter() + .map(|token| PatternToken::Single(token.clone())) + .collect::>() + .into(), + }, + decision, + }); + + self.rules_by_program.insert(first_token.clone(), rule); + Ok(()) + } + pub fn check(&self, cmd: &[String]) -> Evaluation { let rules = match cmd.first() { Some(first) => match self.rules_by_program.get_vec(first) { diff --git a/codex-rs/execpolicy/tests/basic.rs b/codex-rs/execpolicy/tests/basic.rs index e4189caa21..9a7ec58b1e 100644 --- a/codex-rs/execpolicy/tests/basic.rs +++ b/codex-rs/execpolicy/tests/basic.rs @@ -1,8 +1,12 @@ use std::any::Any; use std::sync::Arc; +use anyhow::Context; +use anyhow::Result; use codex_execpolicy::Decision; +use codex_execpolicy::Error; use codex_execpolicy::Evaluation; +use codex_execpolicy::Policy; use codex_execpolicy::PolicyParser; use codex_execpolicy::RuleMatch; use codex_execpolicy::RuleRef; @@ -35,16 +39,14 @@ fn rule_snapshots(rules: &[RuleRef]) -> Vec { } #[test] -fn basic_match() { +fn basic_match() -> Result<()> { let policy_src = r#" prefix_rule( pattern = ["git", "status"], ) "#; let mut parser = PolicyParser::new(); - parser - .parse("test.codexpolicy", policy_src) - .expect("parse policy"); + parser.parse("test.codexpolicy", policy_src)?; let policy = parser.build(); let cmd = tokens(&["git", "status"]); let evaluation = policy.check(&cmd); @@ -58,10 +60,54 @@ prefix_rule( }, evaluation ); + Ok(()) } #[test] -fn parses_multiple_policy_files() { +fn add_prefix_rule_extends_policy() -> Result<()> { + let mut policy = Policy::empty(); + policy.add_prefix_rule(&tokens(&["ls", "-l"]), Decision::Prompt)?; + + let rules = rule_snapshots(policy.rules().get_vec("ls").context("missing ls rules")?); + assert_eq!( + vec![RuleSnapshot::Prefix(PrefixRule { + pattern: PrefixPattern { + first: Arc::from("ls"), + rest: vec![PatternToken::Single(String::from("-l"))].into(), + }, + decision: Decision::Prompt, + })], + rules + ); + + let evaluation = policy.check(&tokens(&["ls", "-l", "/tmp"])); + assert_eq!( + Evaluation::Match { + decision: Decision::Prompt, + matched_rules: vec![RuleMatch::PrefixRuleMatch { + matched_prefix: tokens(&["ls", "-l"]), + decision: Decision::Prompt, + }], + }, + evaluation + ); + Ok(()) +} + +#[test] +fn add_prefix_rule_rejects_empty_prefix() -> Result<()> { + let mut policy = Policy::empty(); + let result = policy.add_prefix_rule(&[], Decision::Allow); + + match result.unwrap_err() { + Error::InvalidPattern(message) => assert_eq!(message, "prefix cannot be empty"), + other => panic!("expected InvalidPattern(..), got {other:?}"), + } + Ok(()) +} + +#[test] +fn parses_multiple_policy_files() -> Result<()> { let first_policy = r#" prefix_rule( pattern = ["git"], @@ -75,15 +121,11 @@ prefix_rule( ) "#; let mut parser = PolicyParser::new(); - parser - .parse("first.codexpolicy", first_policy) - .expect("parse policy"); - parser - .parse("second.codexpolicy", second_policy) - .expect("parse policy"); + parser.parse("first.codexpolicy", first_policy)?; + parser.parse("second.codexpolicy", second_policy)?; let policy = parser.build(); - let git_rules = rule_snapshots(policy.rules().get_vec("git").expect("git rules")); + let git_rules = rule_snapshots(policy.rules().get_vec("git").context("missing git rules")?); assert_eq!( vec![ RuleSnapshot::Prefix(PrefixRule { @@ -133,23 +175,27 @@ prefix_rule( }, commit_eval ); + Ok(()) } #[test] -fn only_first_token_alias_expands_to_multiple_rules() { +fn only_first_token_alias_expands_to_multiple_rules() -> Result<()> { let policy_src = r#" prefix_rule( pattern = [["bash", "sh"], ["-c", "-l"]], ) "#; let mut parser = PolicyParser::new(); - parser - .parse("test.codexpolicy", policy_src) - .expect("parse policy"); + parser.parse("test.codexpolicy", policy_src)?; let policy = parser.build(); - let bash_rules = rule_snapshots(policy.rules().get_vec("bash").expect("bash rules")); - let sh_rules = rule_snapshots(policy.rules().get_vec("sh").expect("sh rules")); + let bash_rules = rule_snapshots( + policy + .rules() + .get_vec("bash") + .context("missing bash rules")?, + ); + let sh_rules = rule_snapshots(policy.rules().get_vec("sh").context("missing sh rules")?); assert_eq!( vec![RuleSnapshot::Prefix(PrefixRule { pattern: PrefixPattern { @@ -194,22 +240,21 @@ prefix_rule( }, sh_eval ); + Ok(()) } #[test] -fn tail_aliases_are_not_cartesian_expanded() { +fn tail_aliases_are_not_cartesian_expanded() -> Result<()> { let policy_src = r#" prefix_rule( pattern = ["npm", ["i", "install"], ["--legacy-peer-deps", "--no-save"]], ) "#; let mut parser = PolicyParser::new(); - parser - .parse("test.codexpolicy", policy_src) - .expect("parse policy"); + parser.parse("test.codexpolicy", policy_src)?; let policy = parser.build(); - let rules = rule_snapshots(policy.rules().get_vec("npm").expect("npm rules")); + let rules = rule_snapshots(policy.rules().get_vec("npm").context("missing npm rules")?); assert_eq!( vec![RuleSnapshot::Prefix(PrefixRule { pattern: PrefixPattern { @@ -251,10 +296,11 @@ prefix_rule( }, npm_install ); + Ok(()) } #[test] -fn match_and_not_match_examples_are_enforced() { +fn match_and_not_match_examples_are_enforced() -> Result<()> { let policy_src = r#" prefix_rule( pattern = ["git", "status"], @@ -266,9 +312,7 @@ prefix_rule( ) "#; let mut parser = PolicyParser::new(); - parser - .parse("test.codexpolicy", policy_src) - .expect("parse policy"); + parser.parse("test.codexpolicy", policy_src)?; let policy = parser.build(); let match_eval = policy.check(&tokens(&["git", "status"])); assert_eq!( @@ -289,10 +333,11 @@ prefix_rule( "status", ])); assert_eq!(Evaluation::NoMatch {}, no_match_eval); + Ok(()) } #[test] -fn strictest_decision_wins_across_matches() { +fn strictest_decision_wins_across_matches() -> Result<()> { let policy_src = r#" prefix_rule( pattern = ["git"], @@ -304,9 +349,7 @@ prefix_rule( ) "#; let mut parser = PolicyParser::new(); - parser - .parse("test.codexpolicy", policy_src) - .expect("parse policy"); + parser.parse("test.codexpolicy", policy_src)?; let policy = parser.build(); let commit = policy.check(&tokens(&["git", "commit", "-m", "hi"])); @@ -326,10 +369,11 @@ prefix_rule( }, commit ); + Ok(()) } #[test] -fn strictest_decision_across_multiple_commands() { +fn strictest_decision_across_multiple_commands() -> Result<()> { let policy_src = r#" prefix_rule( pattern = ["git"], @@ -341,9 +385,7 @@ prefix_rule( ) "#; let mut parser = PolicyParser::new(); - parser - .parse("test.codexpolicy", policy_src) - .expect("parse policy"); + parser.parse("test.codexpolicy", policy_src)?; let policy = parser.build(); let commands = vec![ @@ -372,4 +414,5 @@ prefix_rule( }, evaluation ); + Ok(()) }