diff --git a/.claude/hooks/rtk-rewrite.sh b/.claude/hooks/rtk-rewrite.sh index b67beee2..a5699462 100755 --- a/.claude/hooks/rtk-rewrite.sh +++ b/.claude/hooks/rtk-rewrite.sh @@ -89,6 +89,10 @@ elif echo "$MATCH_CMD" | grep -qE '^cargo[[:space:]]+nextest([[:space:]]|$)'; th elif echo "$MATCH_CMD" | grep -qE '^cargo[[:space:]]+fmt([[:space:]]|$)'; then REWRITTEN="${ENV_PREFIX}$(echo "$CMD_BODY" | sed 's/^cargo fmt/rtk cargo fmt/')" +# --- .NET --- +elif echo "$MATCH_CMD" | grep -qE '^dotnet[[:space:]]+(build|test|restore|format)([[:space:]]|$)'; then + REWRITTEN="${ENV_PREFIX}$(echo "$CMD_BODY" | sed 's/^dotnet/rtk dotnet/')" + # --- File operations --- elif echo "$MATCH_CMD" | grep -qE '^cat[[:space:]]+'; then REWRITTEN="${ENV_PREFIX}$(echo "$CMD_BODY" | sed 's/^cat /rtk read /')" diff --git a/Cargo.lock b/Cargo.lock index 4b22c386..92dd9df7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,12 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + [[package]] name = "ahash" version = "0.8.12" @@ -207,6 +213,15 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -293,6 +308,16 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "getrandom" version = "0.2.17" @@ -482,6 +507,16 @@ version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -524,6 +559,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "quick-xml" +version = "0.37.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" +dependencies = [ + "memchr", +] + [[package]] name = "quote" version = "1.0.43" @@ -588,8 +632,10 @@ dependencies = [ "clap", "colored", "dirs", + "flate2", "ignore", "lazy_static", + "quick-xml", "regex", "rusqlite", "serde", @@ -701,6 +747,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + [[package]] name = "smallvec" version = "1.15.1" diff --git a/Cargo.toml b/Cargo.toml index f333d7c2..38164af9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,8 @@ toml = "0.8" chrono = "0.4" thiserror = "1.0" tempfile = "3" +flate2 = "1.0" +quick-xml = "0.37" [dev-dependencies] diff --git a/README.md b/README.md index 19a2250f..6401bb0e 100644 --- a/README.md +++ b/README.md @@ -164,6 +164,9 @@ rtk pytest # Python tests (failures only, 90% reduction) rtk pip list # Python packages (auto-detect uv, 70% reduction) rtk go test # Go tests (NDJSON, 90% reduction) rtk golangci-lint run # Go linting (JSON, 85% reduction) +rtk dotnet build # .NET build summary with binlog +rtk dotnet test # .NET failures only (auto TRX + fallback parsing) +rtk dotnet restore # .NET restore summary ``` ### Data & Analytics @@ -256,7 +259,7 @@ rtk prisma migrate dev --name x # Migration summary rtk prisma db-push # Schema push summary ``` -### Python & Go Stack +### Python, Go & .NET Stack ```bash # Python rtk ruff check # Ruff linter (JSON, 80% reduction) @@ -271,8 +274,19 @@ rtk go test # NDJSON streaming parser (90% reduction) rtk go build # Build errors only (80% reduction) rtk go vet # Vet issues (75% reduction) rtk golangci-lint run # JSON grouped by rule (85% reduction) + +# .NET +rtk dotnet build # Build errors/warnings summary with binlog +rtk dotnet test # Failed tests only (auto TRX cleanup, TestResults fallback) +rtk dotnet restore # Restore project/package summary ``` +Dotnet behavior notes: +- RTK forwards your dotnet args as-is (`--configuration`, `--framework`, `--project`, `--no-build`, `--no-restore`, `--filter`, etc.). +- RTK only injects defaults when missing (`-bl`, `-v:minimal`, `-nologo`) and does not override your explicit `-v` / `--logger`. +- For `rtk dotnet test`, RTK auto-generates a TRX file, parses it when binlog/console counts are unavailable, then cleans up that temp TRX file. +- If temp TRX is missing, RTK falls back to the newest `./TestResults/*.trx` file. + ## Examples ### Standard vs rtk diff --git a/hooks/rtk-rewrite.sh b/hooks/rtk-rewrite.sh index 59e02caa..6b383a2a 100644 --- a/hooks/rtk-rewrite.sh +++ b/hooks/rtk-rewrite.sh @@ -72,6 +72,10 @@ elif echo "$MATCH_CMD" | grep -qE '^cargo[[:space:]]'; then ;; esac +# --- .NET --- +elif echo "$MATCH_CMD" | grep -qE '^dotnet[[:space:]]+(build|test|restore)([[:space:]]|$)'; then + REWRITTEN="${ENV_PREFIX}$(echo "$CMD_BODY" | sed 's/^dotnet/rtk dotnet/')" + # --- File operations --- elif echo "$MATCH_CMD" | grep -qE '^cat[[:space:]]+'; then REWRITTEN="${ENV_PREFIX}$(echo "$CMD_BODY" | sed 's/^cat /rtk read /')" diff --git a/hooks/test-rtk-rewrite.sh b/hooks/test-rtk-rewrite.sh index 2a68ff8f..c155a716 100755 --- a/hooks/test-rtk-rewrite.sh +++ b/hooks/test-rtk-rewrite.sh @@ -149,10 +149,16 @@ test_rewrite "env + docker compose" \ "COMPOSE_PROJECT_NAME=test docker compose up -d" \ "COMPOSE_PROJECT_NAME=test rtk docker compose up -d" +test_rewrite "env + dotnet test" \ + "DOTNET_CLI_TELEMETRY_OPTOUT=1 dotnet test --nologo" \ + "DOTNET_CLI_TELEMETRY_OPTOUT=1 rtk dotnet test --nologo" + echo "" # ---- SECTION 3: New patterns ---- echo "--- New patterns ---" + +# JS/TS test_rewrite "npm run test:e2e" \ "npm run test:e2e" \ "rtk npm test:e2e" @@ -173,6 +179,20 @@ test_rewrite "npx vue-tsc --noEmit" \ "npx vue-tsc --noEmit" \ "rtk tsc --noEmit" +# .NET +test_rewrite "dotnet build" \ + "dotnet build" \ + "rtk dotnet build" + +test_rewrite "dotnet test --nologo" \ + "dotnet test --nologo" \ + "rtk dotnet test --nologo" + +test_rewrite "dotnet restore src/App.sln" \ + "dotnet restore src/App.sln" \ + "rtk dotnet restore src/App.sln" + +# Containers test_rewrite "docker compose up -d" \ "docker compose up -d" \ "rtk docker compose up -d" @@ -193,18 +213,7 @@ test_rewrite "docker exec -it db psql" \ "docker exec -it db psql" \ "rtk docker exec -it db psql" -test_rewrite "find (NOT rewritten — different arg format)" \ - "find . -name '*.ts'" \ - "" - -test_rewrite "tree (NOT rewritten — different arg format)" \ - "tree src/" \ - "" - -test_rewrite "wget (NOT rewritten — different arg format)" \ - "wget https://example.com/file" \ - "" - +# GitHub / Kubernetes test_rewrite "gh api repos/owner/repo" \ "gh api repos/owner/repo" \ "rtk gh api repos/owner/repo" @@ -221,6 +230,19 @@ test_rewrite "kubectl apply -f deploy.yaml" \ "kubectl apply -f deploy.yaml" \ "rtk kubectl apply -f deploy.yaml" +# Intentionally not rewritten (format-specific) +test_rewrite "find (NOT rewritten — different arg format)" \ + "find . -name '*.ts'" \ + "" + +test_rewrite "tree (NOT rewritten — different arg format)" \ + "tree src/" \ + "" + +test_rewrite "wget (NOT rewritten — different arg format)" \ + "wget https://example.com/file" \ + "" + echo "" # ---- SECTION 4: Vitest edge case (fixed double "run" bug) ---- diff --git a/src/binlog.rs b/src/binlog.rs new file mode 100644 index 00000000..bd85f6ca --- /dev/null +++ b/src/binlog.rs @@ -0,0 +1,1617 @@ +use crate::utils::strip_ansi; +use anyhow::{Context, Result}; +use flate2::read::GzDecoder; +use lazy_static::lazy_static; +use regex::Regex; +use std::collections::HashSet; +use std::io::{Cursor, Read}; +use std::path::Path; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BinlogIssue { + pub code: String, + pub file: String, + pub line: u32, + pub column: u32, + pub message: String, +} + +#[derive(Debug, Clone, Default)] +pub struct BuildSummary { + pub succeeded: bool, + pub project_count: usize, + pub errors: Vec, + pub warnings: Vec, + pub duration_text: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FailedTest { + pub name: String, + pub details: Vec, +} + +#[derive(Debug, Clone, Default)] +pub struct TestSummary { + pub passed: usize, + pub failed: usize, + pub skipped: usize, + pub total: usize, + pub project_count: usize, + pub failed_tests: Vec, + pub duration_text: Option, +} + +#[derive(Debug, Clone, Default)] +pub struct RestoreSummary { + pub restored_projects: usize, + pub warnings: usize, + pub errors: usize, + pub duration_text: Option, +} + +lazy_static! { + static ref ISSUE_RE: Regex = Regex::new( + r"(?m)^\s*(?P[^\r\n:(]+)\((?P\d+),(?P\d+)\):\s*(?Perror|warning)\s*(?:(?P[A-Za-z]+\d+)\s*:\s*)?(?P.*)$" + ) + .expect("valid regex"); + static ref BUILD_SUMMARY_RE: Regex = Regex::new(r"(?mi)^\s*(?P\d+)\s+(?Pwarning|error)\(s\)") + .expect("valid regex"); + static ref ERROR_COUNT_RE: Regex = + Regex::new(r"(?i)\b(?P\d+)\s+error\(s\)").expect("valid regex"); + static ref WARNING_COUNT_RE: Regex = + Regex::new(r"(?i)\b(?P\d+)\s+warning\(s\)").expect("valid regex"); + static ref FALLBACK_ERROR_LINE_RE: Regex = + Regex::new(r"(?mi)^.+\(\d+,\d+\):\s*error(?:\s+[A-Za-z]{2,}\d{3,})?(?:\s*:.*)?$") + .expect("valid regex"); + static ref FALLBACK_WARNING_LINE_RE: Regex = + Regex::new(r"(?mi)^.+\(\d+,\d+\):\s*warning(?:\s+[A-Za-z]{2,}\d{3,})?(?:\s*:.*)?$") + .expect("valid regex"); + static ref DURATION_RE: Regex = + Regex::new(r"(?m)^\s*Time Elapsed\s+(?P[^\r\n]+)$").expect("valid regex"); + static ref TEST_RESULT_RE: Regex = Regex::new( + r"(?m)(?:Passed!|Failed!)\s*-\s*Failed:\s*(?P\d+),\s*Passed:\s*(?P\d+),\s*Skipped:\s*(?P\d+),\s*Total:\s*(?P\d+),\s*Duration:\s*(?P[^\r\n-]+)" + ) + .expect("valid regex"); + static ref TEST_SUMMARY_RE: Regex = Regex::new( + r"(?mi)^\s*Test summary:\s*total:\s*(?P\d+),\s*failed:\s*(?P\d+),\s*(?:succeeded|passed):\s*(?P\d+),\s*skipped:\s*(?P\d+),\s*duration:\s*(?P[^\r\n]+)$" + ) + .expect("valid regex"); + static ref FAILED_TEST_HEAD_RE: Regex = Regex::new( + r"(?m)^\s*Failed\s+(?P[^\r\n\[]+)\s+\[[^\]\r\n]+\]\s*$" + ) + .expect("valid regex"); + static ref RESTORE_PROJECT_RE: Regex = + Regex::new(r"(?m)^\s*Restored\s+.+\.csproj\s*\(").expect("valid regex"); + static ref RESTORE_DIAGNOSTIC_RE: Regex = Regex::new( + r"(?mi)^\s*(?:(?P.+?)\s+:\s+)?(?Pwarning|error)\s+(?P[A-Za-z]{2,}\d{3,})\s*:\s*(?P.+)$" + ) + .expect("valid regex"); + static ref PROJECT_PATH_RE: Regex = + Regex::new(r"(?m)^\s*([A-Za-z]:)?[^\r\n]*\.csproj(?:\s|$)").expect("valid regex"); + static ref PRINTABLE_RUN_RE: Regex = Regex::new(r"[\x20-\x7E]{5,}").expect("valid regex"); + static ref DIAGNOSTIC_CODE_RE: Regex = + Regex::new(r"^[A-Za-z]{2,}\d{3,}$").expect("valid regex"); + static ref SOURCE_FILE_RE: Regex = Regex::new(r"(?i)([A-Za-z]:)?[/\\][^\s]+\.(cs|vb|fs)") + .expect("valid regex"); + static ref SENSITIVE_ENV_RE: Regex = { + let keys = SENSITIVE_ENV_VARS + .iter() + .map(|key| regex::escape(key)) + .collect::>() + .join("|"); + Regex::new(&format!( + r"(?P\b(?:{})\s*(?:=|:)\s*)(?P[^\s;]+)", + keys + )) + .expect("valid regex") + }; +} + +const SENSITIVE_ENV_VARS: &[&str] = &[ + "PATH", + "HOME", + "USERPROFILE", + "USERNAME", + "USER", + "APPDATA", + "LOCALAPPDATA", + "TEMP", + "TMP", + "SSH_AUTH_SOCK", + "SSH_AGENT_LAUNCHER", + "GH_TOKEN", + "GITHUB_TOKEN", + "GITHUB_PAT", + "NUGET_API_KEY", + "NUGET_AUTH_TOKEN", + "VSS_NUGET_EXTERNAL_FEED_ENDPOINTS", + "AZURE_DEVOPS_TOKEN", + "AZURE_CLIENT_SECRET", + "AZURE_TENANT_ID", + "AZURE_CLIENT_ID", + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "AWS_SESSION_TOKEN", + "API_TOKEN", + "AUTH_TOKEN", + "ACCESS_TOKEN", + "BEARER_TOKEN", + "PASSWORD", + "CONNECTION_STRING", + "DATABASE_URL", + "DOCKER_CONFIG", + "KUBECONFIG", +]; + +const RECORD_END_OF_FILE: i32 = 0; +const RECORD_BUILD_STARTED: i32 = 1; +const RECORD_BUILD_FINISHED: i32 = 2; +const RECORD_PROJECT_STARTED: i32 = 3; +const RECORD_PROJECT_FINISHED: i32 = 4; +const RECORD_ERROR: i32 = 9; +const RECORD_WARNING: i32 = 10; +const RECORD_MESSAGE: i32 = 11; +const RECORD_CRITICAL_BUILD_MESSAGE: i32 = 13; +const RECORD_PROJECT_IMPORT_ARCHIVE: i32 = 17; +const RECORD_NAME_VALUE_LIST: i32 = 23; +const RECORD_STRING: i32 = 24; + +const FLAG_BUILD_EVENT_CONTEXT: i32 = 1 << 0; +const FLAG_MESSAGE: i32 = 1 << 2; +const FLAG_TIMESTAMP: i32 = 1 << 5; +const FLAG_ARGUMENTS: i32 = 1 << 14; +const FLAG_IMPORTANCE: i32 = 1 << 15; +const FLAG_EXTENDED: i32 = 1 << 16; + +const STRING_RECORD_START_INDEX: i32 = 10; + +pub fn parse_build(binlog_path: &Path) -> Result { + let parsed = parse_events_from_binlog(binlog_path) + .with_context(|| format!("Failed to parse binlog at {}", binlog_path.display()))?; + let strings_blob = parsed.string_records.join("\n"); + let text_fallback = parse_build_from_text(&strings_blob); + + let duration_text = match (parsed.build_started_ticks, parsed.build_finished_ticks) { + (Some(start), Some(end)) if end >= start => Some(format_ticks_duration(end - start)), + _ => None, + }; + + let parsed_project_count = parsed.project_files.len(); + + Ok(BuildSummary { + succeeded: parsed.build_succeeded.unwrap_or(false), + project_count: if parsed_project_count > 0 { + parsed_project_count + } else { + text_fallback.project_count + }, + errors: select_best_issues(parsed.errors, text_fallback.errors), + warnings: select_best_issues(parsed.warnings, text_fallback.warnings), + duration_text, + }) +} + +fn select_best_issues(primary: Vec, fallback: Vec) -> Vec { + if primary.is_empty() { + return fallback; + } + if fallback.is_empty() { + return primary; + } + if primary.iter().all(is_suspicious_issue) && fallback.iter().any(is_contextual_issue) { + return fallback; + } + if issues_quality_score(&fallback) > issues_quality_score(&primary) { + fallback + } else { + primary + } +} + +fn issues_quality_score(issues: &[BinlogIssue]) -> usize { + issues.iter().map(issue_quality_score).sum() +} + +fn issue_quality_score(issue: &BinlogIssue) -> usize { + let mut score = 0; + if is_contextual_issue(issue) { + score += 4; + } + if !issue.code.is_empty() && is_likely_diagnostic_code(&issue.code) { + score += 2; + } + if issue.line > 0 { + score += 1; + } + if issue.column > 0 { + score += 1; + } + if !issue.message.is_empty() && issue.message != "Build issue" { + score += 1; + } + score +} + +fn is_contextual_issue(issue: &BinlogIssue) -> bool { + !issue.file.is_empty() && !is_likely_diagnostic_code(&issue.file) +} + +fn is_suspicious_issue(issue: &BinlogIssue) -> bool { + issue.code.is_empty() && is_likely_diagnostic_code(&issue.file) +} + +pub fn parse_test(binlog_path: &Path) -> Result { + let parsed = parse_events_from_binlog(binlog_path) + .with_context(|| format!("Failed to parse binlog at {}", binlog_path.display()))?; + let blob = parsed.string_records.join("\n"); + let mut summary = parse_test_from_text(&blob); + let parsed_project_count = parsed.project_files.len(); + if parsed_project_count > 0 { + summary.project_count = parsed_project_count; + } + Ok(summary) +} + +pub fn parse_restore(binlog_path: &Path) -> Result { + let parsed = parse_events_from_binlog(binlog_path) + .with_context(|| format!("Failed to parse binlog at {}", binlog_path.display()))?; + let blob = parsed.string_records.join("\n"); + let mut summary = parse_restore_from_text(&blob); + let parsed_project_count = parsed.project_files.len(); + if parsed_project_count > 0 { + summary.restored_projects = parsed_project_count; + } + Ok(summary) +} + +#[derive(Default)] +struct ParsedBinlog { + string_records: Vec, + messages: Vec, + project_files: HashSet, + errors: Vec, + warnings: Vec, + build_succeeded: Option, + build_started_ticks: Option, + build_finished_ticks: Option, +} + +#[derive(Default)] +struct ParsedEventFields { + message: Option, + timestamp_ticks: Option, +} + +fn parse_events_from_binlog(path: &Path) -> Result { + let bytes = std::fs::read(path) + .with_context(|| format!("Failed to read binlog at {}", path.display()))?; + if bytes.is_empty() { + anyhow::bail!("Failed to parse binlog at {}: empty file", path.display()); + } + + let mut decoder = GzDecoder::new(bytes.as_slice()); + let mut payload = Vec::new(); + decoder.read_to_end(&mut payload).with_context(|| { + format!( + "Failed to parse binlog at {}: gzip decode failed", + path.display() + ) + })?; + + let mut reader = BinReader::new(&payload); + let file_format_version = reader + .read_i32_le() + .context("binlog header missing file format version")?; + let _minimum_reader_version = reader + .read_i32_le() + .context("binlog header missing minimum reader version")?; + + if file_format_version < 18 { + anyhow::bail!( + "Failed to parse binlog at {}: unsupported binlog format {}", + path.display(), + file_format_version + ); + } + + let mut parsed = ParsedBinlog::default(); + + while !reader.is_eof() { + let kind = reader + .read_7bit_i32() + .context("failed to read record kind")?; + if kind == RECORD_END_OF_FILE { + break; + } + + match kind { + RECORD_STRING => { + let text = reader + .read_dotnet_string() + .context("failed to read string record")?; + parsed.string_records.push(text); + } + RECORD_NAME_VALUE_LIST | RECORD_PROJECT_IMPORT_ARCHIVE => { + let len = reader + .read_7bit_i32() + .context("failed to read record length")?; + if len < 0 { + anyhow::bail!("negative record length: {}", len); + } + reader + .skip(len as usize) + .context("failed to skip auxiliary record payload")?; + } + _ => { + let len = reader + .read_7bit_i32() + .context("failed to read event length")?; + if len < 0 { + anyhow::bail!("negative event length: {}", len); + } + + let payload = reader + .read_exact(len as usize) + .context("failed to read event payload")?; + let mut event_reader = BinReader::new(payload); + let _ = + parse_event_record(kind, &mut event_reader, file_format_version, &mut parsed); + } + } + } + + Ok(parsed) +} + +fn parse_event_record( + kind: i32, + reader: &mut BinReader<'_>, + file_format_version: i32, + parsed: &mut ParsedBinlog, +) -> Result<()> { + match kind { + RECORD_BUILD_STARTED => { + let fields = read_event_fields(reader, file_format_version, parsed, false)?; + parsed.build_started_ticks = fields.timestamp_ticks; + } + RECORD_BUILD_FINISHED => { + let fields = read_event_fields(reader, file_format_version, parsed, false)?; + parsed.build_finished_ticks = fields.timestamp_ticks; + parsed.build_succeeded = Some(reader.read_bool()?); + } + RECORD_PROJECT_STARTED => { + let _fields = read_event_fields(reader, file_format_version, parsed, false)?; + if reader.read_bool()? { + skip_build_event_context(reader, file_format_version)?; + } + if let Some(project_file) = read_optional_string(reader, parsed)? { + if !project_file.is_empty() { + parsed.project_files.insert(project_file); + } + } + } + RECORD_PROJECT_FINISHED => { + let _fields = read_event_fields(reader, file_format_version, parsed, false)?; + if let Some(project_file) = read_optional_string(reader, parsed)? { + if !project_file.is_empty() { + parsed.project_files.insert(project_file); + } + } + let _ = reader.read_bool()?; + } + RECORD_ERROR | RECORD_WARNING => { + let fields = read_event_fields(reader, file_format_version, parsed, false)?; + + let _subcategory = read_optional_string(reader, parsed)?; + let code = read_optional_string(reader, parsed)?.unwrap_or_default(); + let file = read_optional_string(reader, parsed)?.unwrap_or_default(); + let _project_file = read_optional_string(reader, parsed)?; + let line = reader.read_7bit_i32()?.max(0) as u32; + let column = reader.read_7bit_i32()?.max(0) as u32; + let _ = reader.read_7bit_i32()?; + let _ = reader.read_7bit_i32()?; + + let issue = BinlogIssue { + code, + file, + line, + column, + message: fields.message.unwrap_or_default(), + }; + + if kind == RECORD_ERROR { + parsed.errors.push(issue); + } else { + parsed.warnings.push(issue); + } + } + RECORD_MESSAGE => { + let fields = read_event_fields(reader, file_format_version, parsed, true)?; + if let Some(message) = fields.message { + parsed.messages.push(message); + } + } + RECORD_CRITICAL_BUILD_MESSAGE => { + let fields = read_event_fields(reader, file_format_version, parsed, false)?; + if let Some(message) = fields.message { + parsed.messages.push(message); + } + } + _ => {} + } + + Ok(()) +} + +fn read_event_fields( + reader: &mut BinReader<'_>, + file_format_version: i32, + parsed: &ParsedBinlog, + read_importance: bool, +) -> Result { + let flags = reader.read_7bit_i32()?; + let mut result = ParsedEventFields::default(); + + if flags & FLAG_MESSAGE != 0 { + result.message = read_deduplicated_string(reader, parsed)?; + } + + if flags & FLAG_BUILD_EVENT_CONTEXT != 0 { + skip_build_event_context(reader, file_format_version)?; + } + + if flags & FLAG_TIMESTAMP != 0 { + result.timestamp_ticks = Some(reader.read_i64_le()?); + let _ = reader.read_7bit_i32()?; + } + + if flags & FLAG_EXTENDED != 0 { + let _ = read_optional_string(reader, parsed)?; + skip_string_dictionary(reader, file_format_version)?; + let _ = read_optional_string(reader, parsed)?; + } + + if flags & FLAG_ARGUMENTS != 0 { + let count = reader.read_7bit_i32()?.max(0) as usize; + for _ in 0..count { + let _ = read_deduplicated_string(reader, parsed)?; + } + } + + if (file_format_version < 13 && read_importance) || (flags & FLAG_IMPORTANCE != 0) { + let _ = reader.read_7bit_i32()?; + } + + Ok(result) +} + +fn skip_build_event_context(reader: &mut BinReader<'_>, file_format_version: i32) -> Result<()> { + let count = if file_format_version > 1 { 7 } else { 6 }; + for _ in 0..count { + let _ = reader.read_7bit_i32()?; + } + Ok(()) +} + +fn skip_string_dictionary(reader: &mut BinReader<'_>, file_format_version: i32) -> Result<()> { + if file_format_version < 10 { + anyhow::bail!("legacy dictionary format is unsupported"); + } + + let _ = reader.read_7bit_i32()?; + Ok(()) +} + +fn read_optional_string( + reader: &mut BinReader<'_>, + parsed: &ParsedBinlog, +) -> Result> { + read_deduplicated_string(reader, parsed) +} + +fn read_deduplicated_string( + reader: &mut BinReader<'_>, + parsed: &ParsedBinlog, +) -> Result> { + let index = reader.read_7bit_i32()?; + if index == 0 { + return Ok(None); + } + if index == 1 { + return Ok(Some(String::new())); + } + if index < STRING_RECORD_START_INDEX { + return Ok(None); + } + let record_idx = (index - STRING_RECORD_START_INDEX) as usize; + parsed + .string_records + .get(record_idx) + .cloned() + .map(Some) + .with_context(|| format!("invalid string record index {}", index)) +} + +fn format_ticks_duration(ticks: i64) -> String { + let total_seconds = ticks.div_euclid(10_000_000); + let centiseconds = ticks.rem_euclid(10_000_000) / 100_000; + let hours = total_seconds / 3600; + let minutes = (total_seconds % 3600) / 60; + let seconds = total_seconds % 60; + format!( + "{:02}:{:02}:{:02}.{:02}", + hours, minutes, seconds, centiseconds + ) +} + +struct BinReader<'a> { + cursor: Cursor<&'a [u8]>, +} + +impl<'a> BinReader<'a> { + fn new(bytes: &'a [u8]) -> Self { + Self { + cursor: Cursor::new(bytes), + } + } + + fn is_eof(&self) -> bool { + (self.cursor.position() as usize) >= self.cursor.get_ref().len() + } + + fn read_exact(&mut self, len: usize) -> Result<&'a [u8]> { + let start = self.cursor.position() as usize; + let end = start.saturating_add(len); + if end > self.cursor.get_ref().len() { + anyhow::bail!("unexpected end of stream"); + } + self.cursor.set_position(end as u64); + Ok(&self.cursor.get_ref()[start..end]) + } + + fn skip(&mut self, len: usize) -> Result<()> { + let _ = self.read_exact(len)?; + Ok(()) + } + + fn read_u8(&mut self) -> Result { + Ok(self.read_exact(1)?[0]) + } + + fn read_bool(&mut self) -> Result { + Ok(self.read_u8()? != 0) + } + + fn read_i32_le(&mut self) -> Result { + let b = self.read_exact(4)?; + Ok(i32::from_le_bytes([b[0], b[1], b[2], b[3]])) + } + + fn read_i64_le(&mut self) -> Result { + let b = self.read_exact(8)?; + Ok(i64::from_le_bytes([ + b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], + ])) + } + + fn read_7bit_i32(&mut self) -> Result { + let mut value: u32 = 0; + let mut shift = 0; + loop { + let byte = self.read_u8()?; + value |= ((byte & 0x7F) as u32) << shift; + if (byte & 0x80) == 0 { + return Ok(value as i32); + } + + shift += 7; + if shift >= 35 { + anyhow::bail!("invalid 7-bit encoded integer"); + } + } + } + + fn read_dotnet_string(&mut self) -> Result { + let len = self.read_7bit_i32()?; + if len < 0 { + anyhow::bail!("negative string length: {}", len); + } + let bytes = self.read_exact(len as usize)?; + String::from_utf8(bytes.to_vec()).context("invalid UTF-8 string") + } +} + +pub fn scrub_sensitive_env_vars(input: &str) -> String { + SENSITIVE_ENV_RE + .replace_all(input, "${prefix}[REDACTED]") + .into_owned() +} + +pub fn parse_build_from_text(text: &str) -> BuildSummary { + let clean = strip_ansi(text); + let scrubbed = scrub_sensitive_env_vars(&clean); + let mut seen_errors: HashSet<(String, String, u32, u32, String)> = HashSet::new(); + let mut seen_warnings: HashSet<(String, String, u32, u32, String)> = HashSet::new(); + let mut summary = BuildSummary { + succeeded: scrubbed.contains("Build succeeded") && !scrubbed.contains("Build FAILED"), + project_count: count_projects(&scrubbed), + errors: Vec::new(), + warnings: Vec::new(), + duration_text: extract_duration(&scrubbed), + }; + + for captures in ISSUE_RE.captures_iter(&scrubbed) { + let issue = BinlogIssue { + code: captures + .name("code") + .map(|m| m.as_str().to_string()) + .unwrap_or_default(), + file: captures + .name("file") + .map(|m| m.as_str().to_string()) + .unwrap_or_default(), + line: captures + .name("line") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0), + column: captures + .name("column") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0), + message: captures + .name("msg") + .map(|m| { + let msg = m.as_str().trim(); + if msg.is_empty() { + "diagnostic without message".to_string() + } else { + msg.to_string() + } + }) + .unwrap_or_default(), + }; + + let key = ( + issue.code.clone(), + issue.file.clone(), + issue.line, + issue.column, + issue.message.clone(), + ); + + match captures.name("kind").map(|m| m.as_str()) { + Some("error") => { + if seen_errors.insert(key) { + summary.errors.push(issue); + } + } + Some("warning") => { + if seen_warnings.insert(key) { + summary.warnings.push(issue); + } + } + _ => {} + } + } + + if summary.errors.is_empty() || summary.warnings.is_empty() { + let mut warning_count_from_summary = 0; + let mut error_count_from_summary = 0; + + for captures in BUILD_SUMMARY_RE.captures_iter(&scrubbed) { + let count = captures + .name("count") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + + match captures + .name("kind") + .map(|m| m.as_str().to_ascii_lowercase()) + .as_deref() + { + Some("warning") => { + warning_count_from_summary = warning_count_from_summary.max(count) + } + Some("error") => error_count_from_summary = error_count_from_summary.max(count), + _ => {} + } + } + + let inline_error_count = ERROR_COUNT_RE + .captures_iter(&scrubbed) + .filter_map(|captures| { + captures + .name("count") + .and_then(|m| m.as_str().parse::().ok()) + }) + .max() + .unwrap_or(0); + let inline_warning_count = WARNING_COUNT_RE + .captures_iter(&scrubbed) + .filter_map(|captures| { + captures + .name("count") + .and_then(|m| m.as_str().parse::().ok()) + }) + .max() + .unwrap_or(0); + + warning_count_from_summary = warning_count_from_summary.max(inline_warning_count); + error_count_from_summary = error_count_from_summary.max(inline_error_count); + + if summary.errors.is_empty() { + for idx in 0..error_count_from_summary { + summary.errors.push(BinlogIssue { + code: String::new(), + file: String::new(), + line: 0, + column: 0, + message: format!("Build error #{} (details omitted)", idx + 1), + }); + } + } + + if summary.warnings.is_empty() { + for idx in 0..warning_count_from_summary { + summary.warnings.push(BinlogIssue { + code: String::new(), + file: String::new(), + line: 0, + column: 0, + message: format!("Build warning #{} (details omitted)", idx + 1), + }); + } + } + + if summary.errors.is_empty() { + let fallback_error_lines = FALLBACK_ERROR_LINE_RE.captures_iter(&scrubbed).count(); + for idx in 0..fallback_error_lines { + summary.errors.push(BinlogIssue { + code: String::new(), + file: String::new(), + line: 0, + column: 0, + message: format!("Build error #{} (details omitted)", idx + 1), + }); + } + } + + if summary.warnings.is_empty() { + let fallback_warning_lines = FALLBACK_WARNING_LINE_RE.captures_iter(&scrubbed).count(); + for idx in 0..fallback_warning_lines { + summary.warnings.push(BinlogIssue { + code: String::new(), + file: String::new(), + line: 0, + column: 0, + message: format!("Build warning #{} (details omitted)", idx + 1), + }); + } + } + } + + let has_error_signal = scrubbed.contains("Build FAILED") + || scrubbed.contains(": error ") + || BUILD_SUMMARY_RE.captures_iter(&scrubbed).any(|captures| { + let is_error = matches!( + captures + .name("kind") + .map(|m| m.as_str().to_ascii_lowercase()) + .as_deref(), + Some("error") + ); + let count = captures + .name("count") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + is_error && count > 0 + }); + + if summary.errors.is_empty() && !summary.succeeded && has_error_signal { + summary.errors = extract_binary_like_issues(&scrubbed); + } + + if summary.project_count == 0 + && (scrubbed.contains("Build succeeded") + || scrubbed.contains("Build FAILED") + || scrubbed.contains(" -> ")) + { + summary.project_count = 1; + } + + summary +} + +pub fn parse_test_from_text(text: &str) -> TestSummary { + let clean = strip_ansi(text); + let scrubbed = scrub_sensitive_env_vars(&clean); + let mut summary = TestSummary { + passed: 0, + failed: 0, + skipped: 0, + total: 0, + project_count: count_projects(&scrubbed).max(1), + failed_tests: Vec::new(), + duration_text: extract_duration(&scrubbed), + }; + + let mut found_summary_line = false; + let mut fallback_duration = None; + for captures in TEST_RESULT_RE.captures_iter(&scrubbed) { + found_summary_line = true; + summary.passed += captures + .name("passed") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + summary.failed += captures + .name("failed") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + summary.skipped += captures + .name("skipped") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + summary.total += captures + .name("total") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + + if let Some(duration) = captures.name("duration") { + fallback_duration = Some(duration.as_str().trim().to_string()); + } + } + + if found_summary_line && summary.duration_text.is_none() { + summary.duration_text = fallback_duration; + } + + if let Some(captures) = TEST_SUMMARY_RE.captures_iter(&scrubbed).last() { + summary.passed = captures + .name("passed") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(summary.passed); + summary.failed = captures + .name("failed") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(summary.failed); + summary.skipped = captures + .name("skipped") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(summary.skipped); + summary.total = captures + .name("total") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(summary.total); + + if let Some(duration) = captures.name("duration") { + summary.duration_text = Some(duration.as_str().trim().to_string()); + } + } + + let lines: Vec<&str> = scrubbed.lines().collect(); + let mut idx = 0; + while idx < lines.len() { + let line = lines[idx]; + if let Some(captures) = FAILED_TEST_HEAD_RE.captures(line) { + let name = captures + .name("name") + .map(|m| m.as_str().trim().to_string()) + .unwrap_or_else(|| "unknown".to_string()); + let mut details = Vec::new(); + idx += 1; + while idx < lines.len() { + let detail_line = lines[idx].trim_end(); + if FAILED_TEST_HEAD_RE.is_match(detail_line) { + idx = idx.saturating_sub(1); + break; + } + let detail_trimmed = detail_line.trim_start(); + if detail_trimmed.starts_with("Failed! -") + || detail_trimmed.starts_with("Passed! -") + || detail_trimmed.starts_with("Test summary:") + || detail_trimmed.starts_with("Build ") + { + idx = idx.saturating_sub(1); + break; + } + + if detail_line.trim().is_empty() { + if !details.is_empty() { + details.push(String::new()); + } + } else { + details.push(detail_line.trim().to_string()); + } + if details.len() >= 20 { + break; + } + idx += 1; + } + summary.failed_tests.push(FailedTest { name, details }); + } + idx += 1; + } + + if summary.failed == 0 { + summary.failed = summary.failed_tests.len(); + } + if summary.total == 0 { + summary.total = summary.passed + summary.failed + summary.skipped; + } + + summary +} + +pub fn parse_restore_from_text(text: &str) -> RestoreSummary { + let (errors, warnings) = parse_restore_issues_from_text(text); + let clean = strip_ansi(text); + let scrubbed = scrub_sensitive_env_vars(&clean); + + RestoreSummary { + restored_projects: RESTORE_PROJECT_RE.captures_iter(&scrubbed).count(), + warnings: warnings.len(), + errors: errors.len(), + duration_text: extract_duration(&scrubbed), + } +} + +pub fn parse_restore_issues_from_text(text: &str) -> (Vec, Vec) { + let clean = strip_ansi(text); + let scrubbed = scrub_sensitive_env_vars(&clean); + let mut errors = Vec::new(); + let mut warnings = Vec::new(); + let mut seen_errors: HashSet<(String, String, u32, u32, String)> = HashSet::new(); + let mut seen_warnings: HashSet<(String, String, u32, u32, String)> = HashSet::new(); + + for captures in RESTORE_DIAGNOSTIC_RE.captures_iter(&scrubbed) { + let issue = BinlogIssue { + code: captures + .name("code") + .map(|m| m.as_str().trim().to_string()) + .unwrap_or_default(), + file: captures + .name("file") + .map(|m| m.as_str().trim().to_string()) + .unwrap_or_default(), + line: 0, + column: 0, + message: captures + .name("msg") + .map(|m| m.as_str().trim().to_string()) + .unwrap_or_default(), + }; + + let key = ( + issue.code.clone(), + issue.file.clone(), + issue.line, + issue.column, + issue.message.clone(), + ); + + match captures + .name("kind") + .map(|m| m.as_str().to_ascii_lowercase()) + { + Some(kind) if kind == "error" => { + if seen_errors.insert(key) { + errors.push(issue); + } + } + Some(kind) if kind == "warning" => { + if seen_warnings.insert(key) { + warnings.push(issue); + } + } + _ => {} + } + } + + (errors, warnings) +} + +fn count_projects(text: &str) -> usize { + PROJECT_PATH_RE.captures_iter(text).count() +} + +fn extract_duration(text: &str) -> Option { + DURATION_RE + .captures(text) + .and_then(|c| c.name("duration")) + .map(|m| m.as_str().trim().to_string()) +} + +fn extract_printable_runs(text: &str) -> Vec { + let mut runs = Vec::new(); + for captures in PRINTABLE_RUN_RE.captures_iter(text) { + let Some(matched) = captures.get(0) else { + continue; + }; + + let run = matched.as_str().trim(); + if run.len() < 5 { + continue; + } + runs.push(run.to_string()); + } + runs +} + +fn extract_binary_like_issues(text: &str) -> Vec { + let runs = extract_printable_runs(text); + if runs.is_empty() { + return Vec::new(); + } + + let mut issues = Vec::new(); + let mut seen: HashSet<(String, String, String)> = HashSet::new(); + + for idx in 0..runs.len() { + let code = runs[idx].trim(); + if !DIAGNOSTIC_CODE_RE.is_match(code) || !is_likely_diagnostic_code(code) { + continue; + } + + let message = (1..=4) + .filter_map(|delta| idx.checked_sub(delta)) + .map(|j| runs[j].trim()) + .find(|candidate| { + !DIAGNOSTIC_CODE_RE.is_match(candidate) + && !SOURCE_FILE_RE.is_match(candidate) + && candidate.chars().any(|c| c.is_ascii_alphabetic()) + && candidate.contains(' ') + && !candidate.contains("Copyright") + && !candidate.contains("Compiler version") + }) + .unwrap_or("Build issue") + .to_string(); + + let file = (1..=4) + .filter_map(|delta| runs.get(idx + delta)) + .find_map(|candidate| { + SOURCE_FILE_RE + .captures(candidate) + .and_then(|caps| caps.get(0)) + .map(|m| m.as_str().to_string()) + }) + .unwrap_or_default(); + + if file.is_empty() && message == "Build issue" { + continue; + } + + let key = (code.to_string(), file.clone(), message.clone()); + if !seen.insert(key) { + continue; + } + + issues.push(BinlogIssue { + code: code.to_string(), + file, + line: 0, + column: 0, + message, + }); + } + + issues +} + +fn is_likely_diagnostic_code(code: &str) -> bool { + const ALLOWED_PREFIXES: &[&str] = &[ + "CS", "MSB", "NU", "FS", "BC", "CA", "SA", "IDE", "IL", "VB", "AD", "TS", "C", "LNK", + ]; + + ALLOWED_PREFIXES + .iter() + .any(|prefix| code.starts_with(prefix)) +} + +#[cfg(test)] +mod tests { + use super::*; + use flate2::write::GzEncoder; + use flate2::Compression; + use std::io::Write; + + fn write_7bit_i32(buf: &mut Vec, value: i32) { + let mut v = value as u32; + while v >= 0x80 { + buf.push(((v as u8) & 0x7F) | 0x80); + v >>= 7; + } + buf.push(v as u8); + } + + fn write_dotnet_string(buf: &mut Vec, value: &str) { + write_7bit_i32(buf, value.len() as i32); + buf.extend_from_slice(value.as_bytes()); + } + + fn write_event_record(target: &mut Vec, kind: i32, payload: &[u8]) { + write_7bit_i32(target, kind); + write_7bit_i32(target, payload.len() as i32); + target.extend_from_slice(payload); + } + + fn build_minimal_binlog(records: &[u8]) -> Vec { + let mut plain = Vec::new(); + plain.extend_from_slice(&25_i32.to_le_bytes()); + plain.extend_from_slice(&18_i32.to_le_bytes()); + plain.extend_from_slice(records); + + let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); + encoder.write_all(&plain).expect("write plain payload"); + encoder.finish().expect("finish gzip") + } + + #[test] + fn test_scrub_sensitive_env_vars_masks_values() { + let input = "PATH=/usr/local/bin HOME: /Users/daniel GITHUB_TOKEN=ghp_123"; + let scrubbed = scrub_sensitive_env_vars(input); + + assert!(scrubbed.contains("PATH=[REDACTED]")); + assert!(scrubbed.contains("HOME: [REDACTED]")); + assert!(scrubbed.contains("GITHUB_TOKEN=[REDACTED]")); + assert!(!scrubbed.contains("/usr/local/bin")); + assert!(!scrubbed.contains("ghp_123")); + } + + #[test] + fn test_scrub_sensitive_env_vars_masks_token_and_connection_values() { + let input = "GH_TOKEN=ghs_abc AWS_SESSION_TOKEN=aws_xyz CONNECTION_STRING=Server=localhost"; + let scrubbed = scrub_sensitive_env_vars(input); + + assert!(scrubbed.contains("GH_TOKEN=[REDACTED]")); + assert!(scrubbed.contains("AWS_SESSION_TOKEN=[REDACTED]")); + assert!(scrubbed.contains("CONNECTION_STRING=[REDACTED]")); + assert!(!scrubbed.contains("ghs_abc")); + assert!(!scrubbed.contains("aws_xyz")); + assert!(!scrubbed.contains("Server=localhost")); + } + + #[test] + fn test_parse_build_from_text_extracts_issues() { + let input = r#" +Build FAILED. +src/Program.cs(42,15): error CS0103: The name 'foo' does not exist +src/Program.cs(25,10): warning CS0219: Variable 'x' is assigned but never used + 1 Warning(s) + 1 Error(s) +Time Elapsed 00:00:03.45 +"#; + + let summary = parse_build_from_text(input); + assert!(!summary.succeeded); + assert_eq!(summary.errors.len(), 1); + assert_eq!(summary.warnings.len(), 1); + assert_eq!(summary.errors[0].code, "CS0103"); + assert_eq!(summary.warnings[0].code, "CS0219"); + assert_eq!(summary.duration_text.as_deref(), Some("00:00:03.45")); + } + + #[test] + fn test_parse_build_from_text_extracts_warning_without_code() { + let input = r#" +/Users/dev/sdk/Microsoft.TestPlatform.targets(48,5): warning +Build succeeded with 1 warning(s) in 0.5s +"#; + + let summary = parse_build_from_text(input); + assert_eq!(summary.warnings.len(), 1); + assert_eq!( + summary.warnings[0].file, + "/Users/dev/sdk/Microsoft.TestPlatform.targets" + ); + assert_eq!(summary.warnings[0].code, ""); + } + + #[test] + fn test_parse_build_from_text_extracts_inline_warning_counts() { + let input = r#" +Build failed with 1 error(s) and 4 warning(s) in 4.7s +"#; + + let summary = parse_build_from_text(input); + assert_eq!(summary.errors.len(), 1); + assert_eq!(summary.warnings.len(), 4); + } + + #[test] + fn test_parse_test_from_text_extracts_failure_summary() { + let input = r#" +Failed! - Failed: 2, Passed: 245, Skipped: 0, Total: 247, Duration: 1 s + Failed MyApp.Tests.UnitTests.CalculatorTests.Add_ShouldReturnSum [5 ms] + Error Message: + Assert.Equal() Failure: Expected 5, Actual 4 + + Failed MyApp.Tests.IntegrationTests.DatabaseTests.CanConnect [20 ms] + Error Message: + System.InvalidOperationException: Connection refused +"#; + + let summary = parse_test_from_text(input); + assert_eq!(summary.passed, 245); + assert_eq!(summary.failed, 2); + assert_eq!(summary.total, 247); + assert_eq!(summary.failed_tests.len(), 2); + assert!(summary.failed_tests[0] + .name + .contains("CalculatorTests.Add_ShouldReturnSum")); + } + + #[test] + fn test_parse_test_from_text_keeps_multiline_failure_details() { + let input = r#" +Failed! - Failed: 1, Passed: 10, Skipped: 0, Total: 11, Duration: 1 s + Failed MyApp.Tests.SampleTests.ShouldFail [5 ms] + Error Message: + Assert.That(messageInstance, Is.Null) + Expected: null + But was: + + Stack Trace: + at MyApp.Tests.SampleTests.ShouldFail() in /repo/SampleTests.cs:line 42 +"#; + + let summary = parse_test_from_text(input); + assert_eq!(summary.failed, 1); + assert_eq!(summary.failed_tests.len(), 1); + let details = summary.failed_tests[0].details.join("\n"); + assert!(details.contains("Expected: null")); + assert!(details.contains("But was:")); + assert!(details.contains("Stack Trace:")); + } + + #[test] + fn test_parse_test_from_text_ignores_non_test_failed_prefix_lines() { + let input = r#" +Passed! - Failed: 0, Passed: 940, Skipped: 7, Total: 947, Duration: 1 s + Failed to load prune package data from PrunePackageData folder, loading from targeting packs instead +"#; + + let summary = parse_test_from_text(input); + assert_eq!(summary.failed, 0); + assert!(summary.failed_tests.is_empty()); + } + + #[test] + fn test_parse_test_from_text_aggregates_multiple_project_summaries() { + let input = r#" +Passed! - Failed: 0, Passed: 914, Skipped: 7, Total: 921, Duration: 00:00:08.20 +Failed! - Failed: 1, Passed: 26, Skipped: 0, Total: 27, Duration: 00:00:00.54 +Time Elapsed 00:00:12.34 +"#; + + let summary = parse_test_from_text(input); + assert_eq!(summary.passed, 940); + assert_eq!(summary.failed, 1); + assert_eq!(summary.skipped, 7); + assert_eq!(summary.total, 948); + assert_eq!(summary.duration_text.as_deref(), Some("00:00:12.34")); + } + + #[test] + fn test_parse_test_from_text_prefers_test_summary_duration_and_counts() { + let input = r#" +Failed! - Failed: 1, Passed: 940, Skipped: 7, Total: 948, Duration: 1 s +Test summary: total: 949, failed: 1, succeeded: 940, skipped: 7, duration: 2.7s +Build failed with 1 error(s) and 4 warning(s) in 6.0s +"#; + + let summary = parse_test_from_text(input); + assert_eq!(summary.passed, 940); + assert_eq!(summary.failed, 1); + assert_eq!(summary.skipped, 7); + assert_eq!(summary.total, 949); + assert_eq!(summary.duration_text.as_deref(), Some("2.7s")); + } + + #[test] + fn test_parse_restore_from_text_extracts_project_count() { + let input = r#" + Restored /tmp/App/App.csproj (in 1.1 sec). + Restored /tmp/App.Tests/App.Tests.csproj (in 1.2 sec). +"#; + + let summary = parse_restore_from_text(input); + assert_eq!(summary.restored_projects, 2); + assert_eq!(summary.errors, 0); + } + + #[test] + fn test_parse_restore_from_text_extracts_nuget_error_diagnostic() { + let input = r#" +/Users/dev/src/App/App.csproj : error NU1101: Unable to find package Foo.Bar. No packages exist with this id in source(s): nuget.org + +Restore failed with 1 error(s) in 1.0s +"#; + + let summary = parse_restore_from_text(input); + assert_eq!(summary.errors, 1); + assert_eq!(summary.warnings, 0); + } + + #[test] + fn test_parse_restore_issues_ignores_summary_warning_error_counts() { + let input = r#" + 0 Warning(s) + 1 Error(s) + + Time Elapsed 00:00:01.23 +"#; + + let (errors, warnings) = parse_restore_issues_from_text(input); + assert_eq!(errors.len(), 0); + assert_eq!(warnings.len(), 0); + } + + #[test] + fn test_parse_build_fails_when_binlog_is_unparseable() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let binlog_path = temp_dir.path().join("build.binlog"); + std::fs::write(&binlog_path, [0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00]) + .expect("write binary file"); + + let err = parse_build(&binlog_path).expect_err("parse should fail"); + assert!( + err.to_string().contains("Failed to parse binlog"), + "unexpected error: {}", + err + ); + } + + #[test] + fn test_parse_build_fails_when_binlog_missing() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let binlog_path = temp_dir.path().join("build.binlog"); + + let err = parse_build(&binlog_path).expect_err("parse should fail"); + assert!( + err.to_string().contains("Failed to parse binlog"), + "unexpected error: {}", + err + ); + } + + #[test] + fn test_parse_build_reads_structured_events() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let binlog_path = temp_dir.path().join("build.binlog"); + + let mut records = Vec::new(); + + // String records (index starts at 10) + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string(&mut records, "Build started"); // 10 + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string(&mut records, "Build finished"); // 11 + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string(&mut records, "src/App.csproj"); // 12 + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string(&mut records, "The name 'foo' does not exist"); // 13 + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string(&mut records, "CS0103"); // 14 + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string(&mut records, "src/Program.cs"); // 15 + + // BuildStarted (message + timestamp) + let mut build_started = Vec::new(); + write_7bit_i32(&mut build_started, FLAG_MESSAGE | FLAG_TIMESTAMP); + write_7bit_i32(&mut build_started, 10); + build_started.extend_from_slice(&1_000_000_000_i64.to_le_bytes()); + write_7bit_i32(&mut build_started, 1); + write_event_record(&mut records, RECORD_BUILD_STARTED, &build_started); + + // ProjectFinished + let mut project_finished = Vec::new(); + write_7bit_i32(&mut project_finished, 0); + write_7bit_i32(&mut project_finished, 12); + project_finished.push(1); + write_event_record(&mut records, RECORD_PROJECT_FINISHED, &project_finished); + + // Error event + let mut error_event = Vec::new(); + write_7bit_i32(&mut error_event, FLAG_MESSAGE); + write_7bit_i32(&mut error_event, 13); + write_7bit_i32(&mut error_event, 0); // subcategory + write_7bit_i32(&mut error_event, 14); // code + write_7bit_i32(&mut error_event, 15); // file + write_7bit_i32(&mut error_event, 0); // project file + write_7bit_i32(&mut error_event, 42); + write_7bit_i32(&mut error_event, 10); + write_7bit_i32(&mut error_event, 42); + write_7bit_i32(&mut error_event, 10); + write_event_record(&mut records, RECORD_ERROR, &error_event); + + // BuildFinished (message + timestamp + succeeded) + let mut build_finished = Vec::new(); + write_7bit_i32(&mut build_finished, FLAG_MESSAGE | FLAG_TIMESTAMP); + write_7bit_i32(&mut build_finished, 11); + build_finished.extend_from_slice(&1_010_000_000_i64.to_le_bytes()); + write_7bit_i32(&mut build_finished, 1); + build_finished.push(1); + write_event_record(&mut records, RECORD_BUILD_FINISHED, &build_finished); + + write_7bit_i32(&mut records, RECORD_END_OF_FILE); + + let binlog_bytes = build_minimal_binlog(&records); + std::fs::write(&binlog_path, binlog_bytes).expect("write binlog"); + + let summary = parse_build(&binlog_path).expect("parse should succeed"); + assert!(summary.succeeded); + assert_eq!(summary.project_count, 1); + assert_eq!(summary.errors.len(), 1); + assert_eq!(summary.errors[0].code, "CS0103"); + assert_eq!(summary.duration_text.as_deref(), Some("00:00:01.00")); + } + + #[test] + fn test_parse_test_reads_message_events() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let binlog_path = temp_dir.path().join("test.binlog"); + + let mut records = Vec::new(); + write_7bit_i32(&mut records, RECORD_STRING); + write_dotnet_string( + &mut records, + "Failed! - Failed: 1, Passed: 2, Skipped: 0, Total: 3, Duration: 1 s", + ); // 10 + + let mut message_event = Vec::new(); + write_7bit_i32(&mut message_event, FLAG_MESSAGE | FLAG_IMPORTANCE); + write_7bit_i32(&mut message_event, 10); + write_7bit_i32(&mut message_event, 1); + write_event_record(&mut records, RECORD_MESSAGE, &message_event); + + write_7bit_i32(&mut records, RECORD_END_OF_FILE); + let binlog_bytes = build_minimal_binlog(&records); + std::fs::write(&binlog_path, binlog_bytes).expect("write binlog"); + + let summary = parse_test(&binlog_path).expect("parse should succeed"); + assert_eq!(summary.failed, 1); + assert_eq!(summary.passed, 2); + assert_eq!(summary.total, 3); + } + + #[test] + fn test_parse_test_fails_when_binlog_missing() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let binlog_path = temp_dir.path().join("test.binlog"); + + let err = parse_test(&binlog_path).expect_err("parse should fail"); + assert!( + err.to_string().contains("Failed to parse binlog"), + "unexpected error: {}", + err + ); + } + + #[test] + fn test_parse_restore_fails_when_binlog_missing() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let binlog_path = temp_dir.path().join("restore.binlog"); + + let err = parse_restore(&binlog_path).expect_err("parse should fail"); + assert!( + err.to_string().contains("Failed to parse binlog"), + "unexpected error: {}", + err + ); + } + + #[test] + fn test_parse_build_from_fixture_text() { + let input = include_str!("../tests/fixtures/dotnet/build_failed.txt"); + let summary = parse_build_from_text(input); + + assert_eq!(summary.errors.len(), 1); + assert_eq!(summary.errors[0].code, "CS1525"); + assert_eq!(summary.duration_text.as_deref(), Some("00:00:00.76")); + } + + #[test] + fn test_parse_build_sets_project_count_floor() { + let input = r#" +RtkDotnetSmoke -> /tmp/RtkDotnetSmoke.dll + +Build succeeded. + 0 Warning(s) + 0 Error(s) + +Time Elapsed 00:00:00.12 +"#; + + let summary = parse_build_from_text(input); + assert_eq!(summary.project_count, 1); + assert!(summary.succeeded); + } + + #[test] + fn test_parse_build_does_not_infer_binary_errors_on_successful_build() { + let input = "\x0bInvalid expression term ';'\x18\x06CS1525\x18%/tmp/App/Broken.cs\x09\nBuild succeeded.\n 0 Warning(s)\n 0 Error(s)\n"; + + let summary = parse_build_from_text(input); + assert!(summary.succeeded); + assert!(summary.errors.is_empty()); + } + + #[test] + fn test_parse_test_from_fixture_text() { + let input = include_str!("../tests/fixtures/dotnet/test_failed.txt"); + let summary = parse_test_from_text(input); + + assert_eq!(summary.failed, 1); + assert_eq!(summary.passed, 0); + assert_eq!(summary.total, 1); + assert_eq!(summary.failed_tests.len(), 1); + assert!(summary.failed_tests[0] + .name + .contains("RtkDotnetSmoke.UnitTest1.Test1")); + } + + #[test] + fn test_extract_binary_like_issues_recovers_code_message_and_path() { + let noisy = + "\x0bInvalid expression term ';'\x18\x06CS1525\x18%/tmp/RtkDotnetSmoke/Broken.cs\x09"; + let issues = extract_binary_like_issues(noisy); + + assert_eq!(issues.len(), 1); + assert_eq!(issues[0].code, "CS1525"); + assert_eq!(issues[0].file, "/tmp/RtkDotnetSmoke/Broken.cs"); + assert!(issues[0].message.contains("Invalid expression term")); + } + + #[test] + fn test_is_likely_diagnostic_code_filters_framework_monikers() { + assert!(is_likely_diagnostic_code("CS1525")); + assert!(is_likely_diagnostic_code("MSB4018")); + assert!(!is_likely_diagnostic_code("NET451")); + assert!(!is_likely_diagnostic_code("NET10")); + } + + #[test] + fn test_select_best_issues_prefers_fallback_when_primary_loses_context() { + let primary = vec![BinlogIssue { + code: String::new(), + file: "CS1525".to_string(), + line: 51, + column: 1, + message: "Invalid expression term ';'".to_string(), + }]; + + let fallback = vec![BinlogIssue { + code: "CS1525".to_string(), + file: "/Users/dev/project/src/NServiceBus.Core/Class1.cs".to_string(), + line: 1, + column: 9, + message: "Invalid expression term ';'".to_string(), + }]; + + let selected = select_best_issues(primary, fallback.clone()); + assert_eq!(selected, fallback); + } + + #[test] + fn test_select_best_issues_keeps_primary_when_context_is_good() { + let primary = vec![BinlogIssue { + code: "CS0103".to_string(), + file: "src/Program.cs".to_string(), + line: 42, + column: 15, + message: "The name 'foo' does not exist".to_string(), + }]; + + let fallback = vec![BinlogIssue { + code: "CS0103".to_string(), + file: String::new(), + line: 0, + column: 0, + message: "Build error #1 (details omitted)".to_string(), + }]; + + let selected = select_best_issues(primary.clone(), fallback); + assert_eq!(selected, primary); + } +} diff --git a/src/dotnet_cmd.rs b/src/dotnet_cmd.rs new file mode 100644 index 00000000..7e87611c --- /dev/null +++ b/src/dotnet_cmd.rs @@ -0,0 +1,1771 @@ +use crate::binlog; +use crate::dotnet_format_report; +use crate::dotnet_trx; +use crate::tracking; +use crate::utils::truncate; +use anyhow::{Context, Result}; +use std::ffi::OsString; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::time::{SystemTime, UNIX_EPOCH}; + +const DOTNET_CLI_UI_LANGUAGE: &str = "DOTNET_CLI_UI_LANGUAGE"; +const DOTNET_CLI_UI_LANGUAGE_VALUE: &str = "en-US"; +static TEMP_PATH_COUNTER: AtomicU64 = AtomicU64::new(0); + +pub fn run_build(args: &[String], verbose: u8) -> Result<()> { + run_dotnet_with_binlog("build", args, verbose) +} + +pub fn run_test(args: &[String], verbose: u8) -> Result<()> { + run_dotnet_with_binlog("test", args, verbose) +} + +pub fn run_restore(args: &[String], verbose: u8) -> Result<()> { + run_dotnet_with_binlog("restore", args, verbose) +} + +pub fn run_format(args: &[String], verbose: u8) -> Result<()> { + let timer = tracking::TimedExecution::start(); + let (report_path, cleanup_report_path) = resolve_format_report_path(args); + let mut cmd = Command::new("dotnet"); + cmd.env(DOTNET_CLI_UI_LANGUAGE, DOTNET_CLI_UI_LANGUAGE_VALUE); + cmd.arg("format"); + + for arg in build_effective_dotnet_format_args(args, report_path.as_deref()) { + cmd.arg(arg); + } + + if verbose > 0 { + eprintln!("Running: dotnet format {}", args.join(" ")); + } + + let command_started_at = SystemTime::now(); + let output = cmd.output().context("Failed to run dotnet format")?; + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + let raw = format!("{}\n{}", stdout, stderr); + + let check_mode = !has_write_mode_override(args); + let filtered = + format_report_summary_or_raw(report_path.as_deref(), check_mode, &raw, command_started_at); + println!("{}", filtered); + + timer.track( + &format!("dotnet format {}", args.join(" ")), + &format!("rtk dotnet format {}", args.join(" ")), + &raw, + &filtered, + ); + + if cleanup_report_path { + if let Some(path) = report_path.as_deref() { + cleanup_temp_file(path); + } + } + + if !output.status.success() { + std::process::exit(output.status.code().unwrap_or(1)); + } + + Ok(()) +} + +pub fn run_passthrough(args: &[OsString], verbose: u8) -> Result<()> { + if args.is_empty() { + anyhow::bail!("dotnet: no subcommand specified"); + } + + let timer = tracking::TimedExecution::start(); + let subcommand = args[0].to_string_lossy().to_string(); + + let mut cmd = Command::new("dotnet"); + cmd.env(DOTNET_CLI_UI_LANGUAGE, DOTNET_CLI_UI_LANGUAGE_VALUE); + cmd.arg(&subcommand); + for arg in &args[1..] { + cmd.arg(arg); + } + + if verbose > 0 { + eprintln!("Running: dotnet {} ...", subcommand); + } + + let output = cmd + .output() + .with_context(|| format!("Failed to run dotnet {}", subcommand))?; + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + let raw = format!("{}\n{}", stdout, stderr); + + print!("{}", stdout); + eprint!("{}", stderr); + + timer.track( + &format!("dotnet {}", subcommand), + &format!("rtk dotnet {}", subcommand), + &raw, + &raw, + ); + + if !output.status.success() { + std::process::exit(output.status.code().unwrap_or(1)); + } + + Ok(()) +} + +fn run_dotnet_with_binlog(subcommand: &str, args: &[String], verbose: u8) -> Result<()> { + let timer = tracking::TimedExecution::start(); + let binlog_path = build_binlog_path(subcommand); + let should_expect_binlog = subcommand != "test" || has_binlog_arg(args); + + // For test commands, prefer user-provided results directory; otherwise create isolated one. + let (trx_results_dir, cleanup_trx_results_dir) = resolve_trx_results_dir(subcommand, args); + + let mut cmd = Command::new("dotnet"); + cmd.env(DOTNET_CLI_UI_LANGUAGE, DOTNET_CLI_UI_LANGUAGE_VALUE); + cmd.arg(subcommand); + + for arg in + build_effective_dotnet_args(subcommand, args, &binlog_path, trx_results_dir.as_deref()) + { + cmd.arg(arg); + } + + if verbose > 0 { + eprintln!("Running: dotnet {} {}", subcommand, args.join(" ")); + } + + let command_started_at = SystemTime::now(); + let output = cmd + .output() + .with_context(|| format!("Failed to run dotnet {}", subcommand))?; + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + let raw = format!("{}\n{}", stdout, stderr); + + let filtered = match subcommand { + "build" => { + let binlog_summary = if should_expect_binlog && binlog_path.exists() { + normalize_build_summary( + binlog::parse_build(&binlog_path).unwrap_or_default(), + output.status.success(), + ) + } else { + binlog::BuildSummary::default() + }; + let raw_summary = normalize_build_summary( + binlog::parse_build_from_text(&raw), + output.status.success(), + ); + let summary = merge_build_summaries(binlog_summary, raw_summary); + format_build_output(&summary, &binlog_path) + } + "test" => { + // First try to parse from binlog/console output + let parsed_summary = if should_expect_binlog && binlog_path.exists() { + binlog::parse_test(&binlog_path).unwrap_or_default() + } else { + binlog::TestSummary::default() + }; + let raw_summary = binlog::parse_test_from_text(&raw); + let merged_summary = merge_test_summaries(parsed_summary, raw_summary); + let summary = merge_test_summary_from_trx( + merged_summary, + trx_results_dir.as_deref(), + dotnet_trx::find_recent_trx_in_testresults(), + command_started_at, + ); + + let summary = normalize_test_summary(summary, output.status.success()); + let binlog_diagnostics = if should_expect_binlog && binlog_path.exists() { + normalize_build_summary( + binlog::parse_build(&binlog_path).unwrap_or_default(), + output.status.success(), + ) + } else { + binlog::BuildSummary::default() + }; + let raw_diagnostics = normalize_build_summary( + binlog::parse_build_from_text(&raw), + output.status.success(), + ); + let test_build_summary = merge_build_summaries(binlog_diagnostics, raw_diagnostics); + format_test_output( + &summary, + &test_build_summary.errors, + &test_build_summary.warnings, + &binlog_path, + ) + } + "restore" => { + let binlog_summary = if should_expect_binlog && binlog_path.exists() { + normalize_restore_summary( + binlog::parse_restore(&binlog_path).unwrap_or_default(), + output.status.success(), + ) + } else { + binlog::RestoreSummary::default() + }; + let raw_summary = normalize_restore_summary( + binlog::parse_restore_from_text(&raw), + output.status.success(), + ); + let summary = merge_restore_summaries(binlog_summary, raw_summary); + + let (raw_errors, raw_warnings) = binlog::parse_restore_issues_from_text(&raw); + + format_restore_output(&summary, &raw_errors, &raw_warnings, &binlog_path) + } + _ => raw.clone(), + }; + + let output_to_print = if !output.status.success() { + let stdout_trimmed = stdout.trim(); + let stderr_trimmed = stderr.trim(); + if !stdout_trimmed.is_empty() { + format!("{}\n\n{}", stdout_trimmed, filtered) + } else if !stderr_trimmed.is_empty() { + format!("{}\n\n{}", stderr_trimmed, filtered) + } else { + filtered + } + } else { + filtered + }; + + println!("{}", output_to_print); + + timer.track( + &format!("dotnet {} {}", subcommand, args.join(" ")), + &format!("rtk dotnet {} {}", subcommand, args.join(" ")), + &raw, + &output_to_print, + ); + + cleanup_temp_file(&binlog_path); + if cleanup_trx_results_dir { + if let Some(dir) = trx_results_dir.as_deref() { + cleanup_temp_dir(dir); + } + } + + if verbose > 0 { + eprintln!("Binlog cleaned up: {}", binlog_path.display()); + } + + if !output.status.success() { + std::process::exit(output.status.code().unwrap_or(1)); + } + + Ok(()) +} + +fn build_binlog_path(subcommand: &str) -> PathBuf { + std::env::temp_dir().join(format!( + "rtk_dotnet_{}_{}.binlog", + subcommand, + unique_temp_suffix() + )) +} + +fn build_trx_results_dir() -> PathBuf { + std::env::temp_dir().join(format!("rtk_dotnet_testresults_{}", unique_temp_suffix())) +} + +fn unique_temp_suffix() -> String { + let ts = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_millis()) + .unwrap_or(0); + let pid = std::process::id(); + let seq = TEMP_PATH_COUNTER.fetch_add(1, Ordering::Relaxed); + + // Keep suffix compact to avoid long temp paths while preserving practical uniqueness. + format!("{:x}{:x}{:x}", ts, pid, seq) +} + +fn resolve_trx_results_dir(subcommand: &str, args: &[String]) -> (Option, bool) { + if subcommand != "test" { + return (None, false); + } + + if let Some(user_dir) = extract_results_directory_arg(args) { + return (Some(user_dir), false); + } + + (Some(build_trx_results_dir()), true) +} + +fn build_format_report_path() -> PathBuf { + std::env::temp_dir().join(format!("rtk_dotnet_format_{}.json", unique_temp_suffix())) +} + +fn resolve_format_report_path(args: &[String]) -> (Option, bool) { + if let Some(user_report_path) = extract_report_arg(args) { + return (Some(user_report_path), false); + } + + (Some(build_format_report_path()), true) +} + +fn build_effective_dotnet_format_args(args: &[String], report_path: Option<&Path>) -> Vec { + let mut effective: Vec = args + .iter() + .filter(|arg| !arg.eq_ignore_ascii_case("--write")) + .cloned() + .collect(); + let force_write_mode = has_write_mode_override(args); + + if !force_write_mode && !has_verify_no_changes_arg(args) { + effective.push("--verify-no-changes".to_string()); + } + + if !has_report_arg(args) { + if let Some(path) = report_path { + effective.push("--report".to_string()); + effective.push(path.display().to_string()); + } + } + + effective +} + +fn format_report_summary_or_raw( + report_path: Option<&Path>, + check_mode: bool, + raw: &str, + command_started_at: SystemTime, +) -> String { + let Some(report_path) = report_path else { + return raw.to_string(); + }; + + if !is_fresh_report(report_path, command_started_at) { + return raw.to_string(); + } + + match dotnet_format_report::parse_format_report(report_path) { + Ok(summary) => format_dotnet_format_output(&summary, check_mode), + Err(_) => raw.to_string(), + } +} + +fn is_fresh_report(path: &Path, command_started_at: SystemTime) -> bool { + let Ok(metadata) = std::fs::metadata(path) else { + return false; + }; + + let Ok(modified_at) = metadata.modified() else { + return false; + }; + + modified_at.duration_since(command_started_at).is_ok() +} + +fn format_dotnet_format_output( + summary: &dotnet_format_report::FormatSummary, + check_mode: bool, +) -> String { + let changed_count = summary.files_with_changes.len(); + + if changed_count == 0 { + return format!( + "ok dotnet format: {} files formatted correctly", + summary.total_files + ); + } + + if !check_mode { + return format!( + "ok dotnet format: formatted {} files ({} already formatted)", + changed_count, summary.files_unchanged + ); + } + + let mut output = format!("Format: {} files need formatting", changed_count); + output.push_str("\n---------------------------------------"); + + for (index, file) in summary.files_with_changes.iter().take(20).enumerate() { + let first_change = &file.changes[0]; + let rule = if first_change.diagnostic_id.is_empty() { + first_change.format_description.as_str() + } else { + first_change.diagnostic_id.as_str() + }; + output.push_str(&format!( + "\n{}. {} (line {}, col {}, {})", + index + 1, + file.path, + first_change.line_number, + first_change.char_number, + rule + )); + } + + if changed_count > 20 { + output.push_str(&format!("\n... +{} more files", changed_count - 20)); + } + + output.push_str(&format!( + "\n\nok {} files already formatted\nRun `dotnet format` to apply fixes", + summary.files_unchanged + )); + output +} + +fn cleanup_temp_file(path: &Path) { + if path.exists() { + std::fs::remove_file(path).ok(); + } +} + +fn cleanup_temp_dir(path: &Path) { + if path.exists() { + std::fs::remove_dir_all(path).ok(); + } +} + +fn merge_test_summary_from_trx( + mut summary: binlog::TestSummary, + trx_results_dir: Option<&Path>, + fallback_trx_path: Option, + command_started_at: SystemTime, +) -> binlog::TestSummary { + let mut trx_summary = None; + + if let Some(dir) = trx_results_dir.filter(|path| path.exists()) { + trx_summary = dotnet_trx::parse_trx_files_in_dir_since(dir, Some(command_started_at)); + + if trx_summary.is_none() { + trx_summary = dotnet_trx::parse_trx_files_in_dir(dir); + } + } + + if trx_summary.is_none() { + if let Some(trx) = fallback_trx_path { + trx_summary = dotnet_trx::parse_trx_file_since(&trx, command_started_at); + } + } + + let Some(trx_summary) = trx_summary else { + return summary; + }; + + if trx_summary.total > 0 && (summary.total == 0 || trx_summary.total >= summary.total) { + summary.passed = trx_summary.passed; + summary.failed = trx_summary.failed; + summary.skipped = trx_summary.skipped; + summary.total = trx_summary.total; + } + + if summary.failed_tests.is_empty() && !trx_summary.failed_tests.is_empty() { + summary.failed_tests = trx_summary.failed_tests; + } + + if let Some(duration) = trx_summary.duration_text { + summary.duration_text = Some(duration); + } + + if trx_summary.project_count > summary.project_count { + summary.project_count = trx_summary.project_count; + } + + summary +} + +fn build_effective_dotnet_args( + subcommand: &str, + args: &[String], + binlog_path: &Path, + trx_results_dir: Option<&Path>, +) -> Vec { + let mut effective = Vec::new(); + + if subcommand != "test" && !has_binlog_arg(args) { + effective.push(format!("-bl:{}", binlog_path.display())); + } + + if subcommand != "test" && !has_verbosity_arg(args) { + effective.push("-v:minimal".to_string()); + } + + if !has_nologo_arg(args) { + effective.push("-nologo".to_string()); + } + + if subcommand == "test" { + if !has_trx_logger_arg(args) { + effective.push("--logger".to_string()); + effective.push("trx".to_string()); + } + + if !has_results_directory_arg(args) { + if let Some(results_dir) = trx_results_dir { + effective.push("--results-directory".to_string()); + effective.push(results_dir.display().to_string()); + } + } + } + + effective.extend(args.iter().cloned()); + effective +} + +fn has_binlog_arg(args: &[String]) -> bool { + args.iter().any(|arg| { + let lower = arg.to_ascii_lowercase(); + lower.starts_with("-bl") || lower.starts_with("/bl") + }) +} + +fn has_verbosity_arg(args: &[String]) -> bool { + args.iter().any(|arg| { + let lower = arg.to_ascii_lowercase(); + lower.starts_with("-v:") + || lower.starts_with("/v:") + || lower == "-v" + || lower == "/v" + || lower == "--verbosity" + || lower.starts_with("--verbosity=") + }) +} + +fn has_nologo_arg(args: &[String]) -> bool { + args.iter() + .any(|arg| matches!(arg.to_ascii_lowercase().as_str(), "-nologo" | "/nologo")) +} + +fn has_trx_logger_arg(args: &[String]) -> bool { + let mut iter = args.iter().peekable(); + while let Some(arg) = iter.next() { + let lower = arg.to_ascii_lowercase(); + if lower == "--logger" || lower == "-l" { + if let Some(next) = iter.peek() { + let next_lower = next.to_ascii_lowercase(); + if next_lower == "trx" || next_lower.starts_with("trx;") { + return true; + } + } + continue; + } + + for prefix in ["--logger:", "--logger=", "-l:", "-l="] { + if let Some(value) = lower.strip_prefix(prefix) { + if value == "trx" || value.starts_with("trx;") { + return true; + } + } + } + } + + false +} + +fn has_results_directory_arg(args: &[String]) -> bool { + args.iter().any(|arg| { + let lower = arg.to_ascii_lowercase(); + lower == "--results-directory" || lower.starts_with("--results-directory=") + }) +} + +fn has_report_arg(args: &[String]) -> bool { + args.iter().any(|arg| { + let lower = arg.to_ascii_lowercase(); + lower == "--report" || lower.starts_with("--report=") + }) +} + +fn extract_report_arg(args: &[String]) -> Option { + let mut iter = args.iter().peekable(); + while let Some(arg) = iter.next() { + if arg.eq_ignore_ascii_case("--report") { + if let Some(next) = iter.peek() { + return Some(PathBuf::from(next.as_str())); + } + continue; + } + + if let Some((_, value)) = arg.split_once('=') { + if arg + .split('=') + .next() + .is_some_and(|key| key.eq_ignore_ascii_case("--report")) + { + return Some(PathBuf::from(value)); + } + } + } + + None +} + +fn has_verify_no_changes_arg(args: &[String]) -> bool { + args.iter().any(|arg| { + let lower = arg.to_ascii_lowercase(); + lower == "--verify-no-changes" || lower.starts_with("--verify-no-changes=") + }) +} + +fn has_write_mode_override(args: &[String]) -> bool { + args.iter().any(|arg| arg.eq_ignore_ascii_case("--write")) +} + +fn extract_results_directory_arg(args: &[String]) -> Option { + let mut iter = args.iter().peekable(); + while let Some(arg) = iter.next() { + if arg.eq_ignore_ascii_case("--results-directory") { + if let Some(next) = iter.peek() { + return Some(PathBuf::from(next.as_str())); + } + continue; + } + + if let Some((_, value)) = arg.split_once('=') { + if arg + .split('=') + .next() + .is_some_and(|key| key.eq_ignore_ascii_case("--results-directory")) + { + return Some(PathBuf::from(value)); + } + } + } + + None +} + +fn normalize_build_summary( + mut summary: binlog::BuildSummary, + command_success: bool, +) -> binlog::BuildSummary { + if command_success { + summary.succeeded = true; + if summary.project_count == 0 { + summary.project_count = 1; + } + } + + summary +} + +fn merge_build_summaries( + mut binlog_summary: binlog::BuildSummary, + raw_summary: binlog::BuildSummary, +) -> binlog::BuildSummary { + if binlog_summary.errors.is_empty() { + binlog_summary.errors = raw_summary.errors; + } + if binlog_summary.warnings.is_empty() { + binlog_summary.warnings = raw_summary.warnings; + } + + if binlog_summary.project_count == 0 { + binlog_summary.project_count = raw_summary.project_count; + } + if binlog_summary.duration_text.is_none() { + binlog_summary.duration_text = raw_summary.duration_text; + } + + binlog_summary +} + +fn normalize_test_summary( + mut summary: binlog::TestSummary, + command_success: bool, +) -> binlog::TestSummary { + if !command_success && summary.failed == 0 && summary.failed_tests.is_empty() { + summary.failed = 1; + if summary.total == 0 { + summary.total = 1; + } + } + + if command_success && summary.total == 0 && summary.passed == 0 { + summary.project_count = summary.project_count.max(1); + } + + summary +} + +fn merge_test_summaries( + mut binlog_summary: binlog::TestSummary, + raw_summary: binlog::TestSummary, +) -> binlog::TestSummary { + if binlog_summary.total == 0 && raw_summary.total > 0 { + binlog_summary.passed = raw_summary.passed; + binlog_summary.failed = raw_summary.failed; + binlog_summary.skipped = raw_summary.skipped; + binlog_summary.total = raw_summary.total; + } + + if !raw_summary.failed_tests.is_empty() { + binlog_summary.failed_tests = raw_summary.failed_tests; + } + + if binlog_summary.project_count == 0 { + binlog_summary.project_count = raw_summary.project_count; + } + + if binlog_summary.duration_text.is_none() { + binlog_summary.duration_text = raw_summary.duration_text; + } + + binlog_summary +} + +fn normalize_restore_summary( + mut summary: binlog::RestoreSummary, + command_success: bool, +) -> binlog::RestoreSummary { + if !command_success && summary.errors == 0 { + summary.errors = 1; + } + + summary +} + +fn merge_restore_summaries( + mut binlog_summary: binlog::RestoreSummary, + raw_summary: binlog::RestoreSummary, +) -> binlog::RestoreSummary { + if binlog_summary.restored_projects == 0 { + binlog_summary.restored_projects = raw_summary.restored_projects; + } + if binlog_summary.errors == 0 { + binlog_summary.errors = raw_summary.errors; + } + if binlog_summary.warnings == 0 { + binlog_summary.warnings = raw_summary.warnings; + } + if binlog_summary.duration_text.is_none() { + binlog_summary.duration_text = raw_summary.duration_text; + } + + binlog_summary +} + +fn format_issue(issue: &binlog::BinlogIssue, kind: &str) -> String { + if issue.file.is_empty() { + return format!(" {} {}", kind, truncate(&issue.message, 180)); + } + if issue.code.is_empty() { + return format!( + " {}({},{}) {}: {}", + issue.file, + issue.line, + issue.column, + kind, + truncate(&issue.message, 180) + ); + } + format!( + " {}({},{}) {} {}: {}", + issue.file, + issue.line, + issue.column, + kind, + issue.code, + truncate(&issue.message, 180) + ) +} + +fn format_build_output(summary: &binlog::BuildSummary, binlog_path: &Path) -> String { + let status_icon = if summary.succeeded { "ok" } else { "fail" }; + let duration = summary.duration_text.as_deref().unwrap_or("unknown"); + + let mut out = format!( + "{} dotnet build: {} projects, {} errors, {} warnings ({})", + status_icon, + summary.project_count, + summary.errors.len(), + summary.warnings.len(), + duration + ); + + if !summary.errors.is_empty() { + out.push_str("\n---------------------------------------\n\nErrors:\n"); + for issue in summary.errors.iter().take(20) { + out.push_str(&format!("{}\n", format_issue(issue, "error"))); + } + if summary.errors.len() > 20 { + out.push_str(&format!( + " ... +{} more errors\n", + summary.errors.len() - 20 + )); + } + } + + if !summary.warnings.is_empty() { + out.push_str("\nWarnings:\n"); + for issue in summary.warnings.iter().take(10) { + out.push_str(&format!("{}\n", format_issue(issue, "warning"))); + } + if summary.warnings.len() > 10 { + out.push_str(&format!( + " ... +{} more warnings\n", + summary.warnings.len() - 10 + )); + } + } + + out.push_str(&format!("\nBinlog: {}", binlog_path.display())); + out +} + +fn format_test_output( + summary: &binlog::TestSummary, + errors: &[binlog::BinlogIssue], + warnings: &[binlog::BinlogIssue], + binlog_path: &Path, +) -> String { + let has_failures = summary.failed > 0 || !summary.failed_tests.is_empty(); + let status_icon = if has_failures { "fail" } else { "ok" }; + let duration = summary.duration_text.as_deref().unwrap_or("unknown"); + let warning_count = warnings.len(); + let counts_unavailable = summary.passed == 0 + && summary.failed == 0 + && summary.skipped == 0 + && summary.total == 0 + && summary.failed_tests.is_empty(); + + let mut out = if counts_unavailable { + format!( + "{} dotnet test: completed (binlog-only mode, counts unavailable, {} warnings) ({})", + status_icon, warning_count, duration + ) + } else if has_failures { + format!( + "{} dotnet test: {} passed, {} failed, {} skipped, {} warnings in {} projects ({})", + status_icon, + summary.passed, + summary.failed, + summary.skipped, + warning_count, + summary.project_count, + duration + ) + } else { + format!( + "{} dotnet test: {} tests passed, {} warnings in {} projects ({})", + status_icon, summary.passed, warning_count, summary.project_count, duration + ) + }; + + if has_failures && !summary.failed_tests.is_empty() { + out.push_str("\n---------------------------------------\n\nFailed Tests:\n"); + for failed in summary.failed_tests.iter().take(15) { + out.push_str(&format!(" {}\n", failed.name)); + for detail in &failed.details { + out.push_str(&format!(" {}\n", truncate(detail, 320))); + } + out.push('\n'); + } + if summary.failed_tests.len() > 15 { + out.push_str(&format!( + "... +{} more failed tests\n", + summary.failed_tests.len() - 15 + )); + } + } + + if !errors.is_empty() { + out.push_str("\nErrors:\n"); + for issue in errors.iter().take(10) { + out.push_str(&format!("{}\n", format_issue(issue, "error"))); + } + if errors.len() > 10 { + out.push_str(&format!(" ... +{} more errors\n", errors.len() - 10)); + } + } + + if !warnings.is_empty() { + out.push_str("\nWarnings:\n"); + for issue in warnings.iter().take(10) { + out.push_str(&format!("{}\n", format_issue(issue, "warning"))); + } + if warnings.len() > 10 { + out.push_str(&format!(" ... +{} more warnings\n", warnings.len() - 10)); + } + } + + out.push_str(&format!("\nBinlog: {}", binlog_path.display())); + out +} + +fn format_restore_output( + summary: &binlog::RestoreSummary, + errors: &[binlog::BinlogIssue], + warnings: &[binlog::BinlogIssue], + binlog_path: &Path, +) -> String { + let has_errors = summary.errors > 0; + let status_icon = if has_errors { "fail" } else { "ok" }; + let duration = summary.duration_text.as_deref().unwrap_or("unknown"); + + let mut out = format!( + "{} dotnet restore: {} projects, {} errors, {} warnings ({})", + status_icon, summary.restored_projects, summary.errors, summary.warnings, duration + ); + + if !errors.is_empty() { + out.push_str("\n---------------------------------------\n\nErrors:\n"); + for issue in errors.iter().take(20) { + out.push_str(&format!("{}\n", format_issue(issue, "error"))); + } + if errors.len() > 20 { + out.push_str(&format!(" ... +{} more errors\n", errors.len() - 20)); + } + } + + if !warnings.is_empty() { + out.push_str("\nWarnings:\n"); + for issue in warnings.iter().take(10) { + out.push_str(&format!("{}\n", format_issue(issue, "warning"))); + } + if warnings.len() > 10 { + out.push_str(&format!(" ... +{} more warnings\n", warnings.len() - 10)); + } + } + + out.push_str(&format!("\nBinlog: {}", binlog_path.display())); + out +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::dotnet_format_report; + use std::fs; + use std::time::Duration; + + fn build_dotnet_args_for_test( + subcommand: &str, + args: &[String], + with_trx: bool, + ) -> Vec { + let binlog_path = Path::new("/tmp/test.binlog"); + let trx_results_dir = if with_trx { + Some(Path::new("/tmp/test results")) + } else { + None + }; + + build_effective_dotnet_args(subcommand, args, binlog_path, trx_results_dir) + } + + fn trx_with_counts(total: usize, passed: usize, failed: usize) -> String { + format!( + r#" + + + + +"#, + total, total, passed, failed + ) + } + + fn format_fixture(name: &str) -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("tests") + .join("fixtures") + .join("dotnet") + .join(name) + } + + #[test] + fn test_has_binlog_arg_detects_variants() { + let args = vec!["-bl:my.binlog".to_string()]; + assert!(has_binlog_arg(&args)); + + let args = vec!["/bl".to_string()]; + assert!(has_binlog_arg(&args)); + + let args = vec!["--configuration".to_string(), "Release".to_string()]; + assert!(!has_binlog_arg(&args)); + } + + #[test] + fn test_format_build_output_includes_errors_and_warnings() { + let summary = binlog::BuildSummary { + succeeded: false, + project_count: 2, + errors: vec![binlog::BinlogIssue { + code: "CS0103".to_string(), + file: "src/Program.cs".to_string(), + line: 42, + column: 15, + message: "The name 'foo' does not exist".to_string(), + }], + warnings: vec![binlog::BinlogIssue { + code: "CS0219".to_string(), + file: "src/Program.cs".to_string(), + line: 25, + column: 10, + message: "Variable 'x' is assigned but never used".to_string(), + }], + duration_text: Some("00:00:04.20".to_string()), + }; + + let output = format_build_output(&summary, Path::new("/tmp/build.binlog")); + assert!(output.contains("dotnet build: 2 projects, 1 errors, 1 warnings")); + assert!(output.contains("error CS0103")); + assert!(output.contains("warning CS0219")); + } + + #[test] + fn test_format_test_output_shows_failures() { + let summary = binlog::TestSummary { + passed: 10, + failed: 1, + skipped: 0, + total: 11, + project_count: 1, + failed_tests: vec![binlog::FailedTest { + name: "MyTests.ShouldFail".to_string(), + details: vec!["Assert.Equal failure".to_string()], + }], + duration_text: Some("1 s".to_string()), + }; + + let output = format_test_output(&summary, &[], &[], Path::new("/tmp/test.binlog")); + assert!(output.contains("10 passed, 1 failed")); + assert!(output.contains("MyTests.ShouldFail")); + } + + #[test] + fn test_format_test_output_surfaces_warnings() { + let summary = binlog::TestSummary { + passed: 940, + failed: 0, + skipped: 7, + total: 947, + project_count: 1, + failed_tests: Vec::new(), + duration_text: Some("1 s".to_string()), + }; + + let warnings = vec![binlog::BinlogIssue { + code: String::new(), + file: "/sdk/Microsoft.TestPlatform.targets".to_string(), + line: 48, + column: 5, + message: "Violators:".to_string(), + }]; + + let output = format_test_output(&summary, &[], &warnings, Path::new("/tmp/test.binlog")); + assert!(output.contains("940 tests passed, 1 warnings")); + assert!(output.contains("Warnings:")); + assert!(output.contains("Microsoft.TestPlatform.targets")); + } + + #[test] + fn test_format_test_output_surfaces_errors() { + let summary = binlog::TestSummary { + passed: 939, + failed: 1, + skipped: 7, + total: 947, + project_count: 1, + failed_tests: Vec::new(), + duration_text: Some("1 s".to_string()), + }; + + let errors = vec![binlog::BinlogIssue { + code: "TESTERROR".to_string(), + file: "/repo/MessageMapperTests.cs".to_string(), + line: 135, + column: 0, + message: "CreateInstance_should_initialize_interface_message_type_on_demand" + .to_string(), + }]; + + let output = format_test_output(&summary, &errors, &[], Path::new("/tmp/test.binlog")); + assert!(output.contains("Errors:")); + assert!(output.contains("error TESTERROR")); + assert!( + output.contains("CreateInstance_should_initialize_interface_message_type_on_demand") + ); + } + + #[test] + fn test_format_restore_output_success() { + let summary = binlog::RestoreSummary { + restored_projects: 3, + warnings: 1, + errors: 0, + duration_text: Some("00:00:01.10".to_string()), + }; + + let output = format_restore_output(&summary, &[], &[], Path::new("/tmp/restore.binlog")); + assert!(output.starts_with("ok dotnet restore")); + assert!(output.contains("3 projects")); + assert!(output.contains("1 warnings")); + } + + #[test] + fn test_format_restore_output_failure() { + let summary = binlog::RestoreSummary { + restored_projects: 2, + warnings: 0, + errors: 1, + duration_text: Some("00:00:01.00".to_string()), + }; + + let output = format_restore_output(&summary, &[], &[], Path::new("/tmp/restore.binlog")); + assert!(output.starts_with("fail dotnet restore")); + assert!(output.contains("1 errors")); + } + + #[test] + fn test_format_restore_output_includes_error_details() { + let summary = binlog::RestoreSummary { + restored_projects: 2, + warnings: 0, + errors: 1, + duration_text: Some("00:00:01.00".to_string()), + }; + + let issues = vec![binlog::BinlogIssue { + code: "NU1101".to_string(), + file: "/repo/src/App/App.csproj".to_string(), + line: 0, + column: 0, + message: "Unable to find package Foo.Bar".to_string(), + }]; + + let output = + format_restore_output(&summary, &issues, &[], Path::new("/tmp/restore.binlog")); + assert!(output.contains("Errors:")); + assert!(output.contains("error NU1101")); + assert!(output.contains("Unable to find package Foo.Bar")); + } + + #[test] + fn test_format_test_output_handles_binlog_only_without_counts() { + let summary = binlog::TestSummary { + passed: 0, + failed: 0, + skipped: 0, + total: 0, + project_count: 0, + failed_tests: Vec::new(), + duration_text: Some("unknown".to_string()), + }; + + let output = format_test_output(&summary, &[], &[], Path::new("/tmp/test.binlog")); + assert!(output.contains("counts unavailable")); + } + + #[test] + fn test_normalize_build_summary_sets_success_floor() { + let summary = binlog::BuildSummary { + succeeded: false, + project_count: 0, + errors: Vec::new(), + warnings: Vec::new(), + duration_text: None, + }; + + let normalized = normalize_build_summary(summary, true); + assert!(normalized.succeeded); + assert_eq!(normalized.project_count, 1); + } + + #[test] + fn test_merge_build_summaries_keeps_structured_issues_when_present() { + let binlog_summary = binlog::BuildSummary { + succeeded: false, + project_count: 11, + errors: vec![binlog::BinlogIssue { + code: String::new(), + file: "IDE0055".to_string(), + line: 0, + column: 0, + message: "Fix formatting".to_string(), + }], + warnings: Vec::new(), + duration_text: Some("00:00:03.54".to_string()), + }; + + let raw_summary = binlog::BuildSummary { + succeeded: false, + project_count: 2, + errors: vec![ + binlog::BinlogIssue { + code: "IDE0055".to_string(), + file: "/repo/src/Behavior.cs".to_string(), + line: 13, + column: 32, + message: "Fix formatting".to_string(), + }, + binlog::BinlogIssue { + code: "IDE0055".to_string(), + file: "/repo/src/Behavior.cs".to_string(), + line: 13, + column: 41, + message: "Fix formatting".to_string(), + }, + ], + warnings: Vec::new(), + duration_text: Some("00:00:03.54".to_string()), + }; + + let merged = merge_build_summaries(binlog_summary, raw_summary); + assert_eq!(merged.project_count, 11); + assert_eq!(merged.errors.len(), 1); + assert_eq!(merged.errors[0].file, "IDE0055"); + assert_eq!(merged.errors[0].line, 0); + assert_eq!(merged.errors[0].column, 0); + } + + #[test] + fn test_merge_build_summaries_keeps_binlog_when_context_is_good() { + let binlog_summary = binlog::BuildSummary { + succeeded: false, + project_count: 2, + errors: vec![binlog::BinlogIssue { + code: "CS0103".to_string(), + file: "src/Program.cs".to_string(), + line: 42, + column: 15, + message: "The name 'foo' does not exist".to_string(), + }], + warnings: Vec::new(), + duration_text: Some("00:00:01.00".to_string()), + }; + + let raw_summary = binlog::BuildSummary { + succeeded: false, + project_count: 2, + errors: vec![binlog::BinlogIssue { + code: "CS0103".to_string(), + file: String::new(), + line: 0, + column: 0, + message: "Build error #1 (details omitted)".to_string(), + }], + warnings: Vec::new(), + duration_text: None, + }; + + let merged = merge_build_summaries(binlog_summary.clone(), raw_summary); + assert_eq!(merged.errors, binlog_summary.errors); + } + + #[test] + fn test_normalize_test_summary_sets_failure_floor() { + let summary = binlog::TestSummary { + passed: 0, + failed: 0, + skipped: 0, + total: 0, + project_count: 0, + failed_tests: Vec::new(), + duration_text: None, + }; + + let normalized = normalize_test_summary(summary, false); + assert_eq!(normalized.failed, 1); + assert_eq!(normalized.total, 1); + } + + #[test] + fn test_merge_test_summaries_keeps_structured_counts_and_fills_failed_tests() { + let binlog_summary = binlog::TestSummary { + passed: 939, + failed: 1, + skipped: 8, + total: 948, + project_count: 1, + failed_tests: Vec::new(), + duration_text: Some("unknown".to_string()), + }; + + let raw_summary = binlog::TestSummary { + passed: 939, + failed: 1, + skipped: 7, + total: 947, + project_count: 0, + failed_tests: vec![binlog::FailedTest { + name: "MessageMapperTests.CreateInstance_should_initialize_interface_message_type_on_demand" + .to_string(), + details: vec!["Assert.That(messageInstance, Is.Null)".to_string()], + }], + duration_text: Some("1 s".to_string()), + }; + + let merged = merge_test_summaries(binlog_summary, raw_summary); + assert_eq!(merged.skipped, 8); + assert_eq!(merged.total, 948); + assert_eq!(merged.failed_tests.len(), 1); + assert!(merged.failed_tests[0] + .name + .contains("CreateInstance_should_initialize")); + } + + #[test] + fn test_normalize_restore_summary_sets_error_floor_on_failed_command() { + let summary = binlog::RestoreSummary { + restored_projects: 2, + warnings: 0, + errors: 0, + duration_text: None, + }; + + let normalized = normalize_restore_summary(summary, false); + assert_eq!(normalized.errors, 1); + } + + #[test] + fn test_merge_restore_summaries_prefers_raw_error_count() { + let binlog_summary = binlog::RestoreSummary { + restored_projects: 2, + warnings: 0, + errors: 0, + duration_text: Some("unknown".to_string()), + }; + + let raw_summary = binlog::RestoreSummary { + restored_projects: 0, + warnings: 0, + errors: 1, + duration_text: Some("unknown".to_string()), + }; + + let merged = merge_restore_summaries(binlog_summary, raw_summary); + assert_eq!(merged.errors, 1); + assert_eq!(merged.restored_projects, 2); + } + + #[test] + fn test_forwarding_args_with_spaces() { + let args = vec![ + "--filter".to_string(), + "FullyQualifiedName~MyTests.Calculator*".to_string(), + "-c".to_string(), + "Release".to_string(), + ]; + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(injected.contains(&"--filter".to_string())); + assert!(injected.contains(&"FullyQualifiedName~MyTests.Calculator*".to_string())); + assert!(injected.contains(&"-c".to_string())); + assert!(injected.contains(&"Release".to_string())); + } + + #[test] + fn test_forwarding_config_and_framework() { + let args = vec![ + "--configuration".to_string(), + "Release".to_string(), + "--framework".to_string(), + "net8.0".to_string(), + ]; + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(injected.contains(&"--configuration".to_string())); + assert!(injected.contains(&"Release".to_string())); + assert!(injected.contains(&"--framework".to_string())); + assert!(injected.contains(&"net8.0".to_string())); + } + + #[test] + fn test_forwarding_project_file() { + let args = vec![ + "--project".to_string(), + "src/My App.Tests/My App.Tests.csproj".to_string(), + ]; + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(injected.contains(&"--project".to_string())); + assert!(injected.contains(&"src/My App.Tests/My App.Tests.csproj".to_string())); + } + + #[test] + fn test_forwarding_no_build_and_no_restore() { + let args = vec!["--no-build".to_string(), "--no-restore".to_string()]; + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(injected.contains(&"--no-build".to_string())); + assert!(injected.contains(&"--no-restore".to_string())); + } + + #[test] + fn test_user_verbose_override() { + let args = vec!["-v:detailed".to_string()]; + + let injected = build_dotnet_args_for_test("test", &args, true); + let verbose_count = injected.iter().filter(|a| a.starts_with("-v:")).count(); + assert_eq!(verbose_count, 1); + assert!(injected.contains(&"-v:detailed".to_string())); + assert!(!injected.contains(&"-v:minimal".to_string())); + } + + #[test] + fn test_user_long_verbosity_override() { + let args = vec!["--verbosity".to_string(), "detailed".to_string()]; + + let injected = build_dotnet_args_for_test("build", &args, false); + assert!(injected.contains(&"--verbosity".to_string())); + assert!(injected.contains(&"detailed".to_string())); + assert!(!injected.contains(&"-v:minimal".to_string())); + } + + #[test] + fn test_test_subcommand_does_not_inject_minimal_verbosity_by_default() { + let args = Vec::::new(); + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(!injected.contains(&"-v:minimal".to_string())); + } + + #[test] + fn test_user_logger_override() { + let args = vec![ + "--logger".to_string(), + "console;verbosity=detailed".to_string(), + ]; + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(injected.contains(&"--logger".to_string())); + assert!(injected.contains(&"console;verbosity=detailed".to_string())); + assert!(injected.iter().any(|a| a == "trx")); + assert!(injected.iter().any(|a| a == "--results-directory")); + } + + #[test] + fn test_trx_logger_and_results_directory_injected() { + let args = Vec::::new(); + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(injected.contains(&"--logger".to_string())); + assert!(injected.contains(&"trx".to_string())); + assert!(injected.contains(&"--results-directory".to_string())); + assert!(injected.contains(&"/tmp/test results".to_string())); + } + + #[test] + fn test_user_trx_logger_does_not_duplicate() { + let args = vec!["--logger".to_string(), "trx".to_string()]; + + let injected = build_dotnet_args_for_test("test", &args, true); + let trx_logger_count = injected.iter().filter(|a| *a == "trx").count(); + assert_eq!(trx_logger_count, 1); + } + + #[test] + fn test_user_results_directory_prevents_extra_injection() { + let args = vec![ + "--results-directory".to_string(), + "/custom/results".to_string(), + ]; + + let injected = build_dotnet_args_for_test("test", &args, true); + assert!(!injected + .windows(2) + .any(|w| w[0] == "--results-directory" && w[1] == "/tmp/test results")); + assert!(injected + .windows(2) + .any(|w| w[0] == "--results-directory" && w[1] == "/custom/results")); + } + + #[test] + fn test_merge_test_summary_from_trx_uses_primary_and_cleans_file() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let primary = temp_dir.path().join("primary.trx"); + fs::write(&primary, trx_with_counts(3, 3, 0)).expect("write primary trx"); + + let filled = merge_test_summary_from_trx( + binlog::TestSummary::default(), + Some(temp_dir.path()), + None, + SystemTime::now(), + ); + + assert_eq!(filled.total, 3); + assert_eq!(filled.passed, 3); + assert!(primary.exists()); + } + + #[test] + fn test_merge_test_summary_from_trx_falls_back_to_testresults() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let fallback = temp_dir.path().join("fallback.trx"); + fs::write(&fallback, trx_with_counts(2, 1, 1)).expect("write fallback trx"); + let missing_primary = temp_dir.path().join("missing.trx"); + + let filled = merge_test_summary_from_trx( + binlog::TestSummary::default(), + Some(&missing_primary), + Some(fallback.clone()), + UNIX_EPOCH, + ); + + assert_eq!(filled.total, 2); + assert_eq!(filled.failed, 1); + assert!(fallback.exists()); + } + + #[test] + fn test_merge_test_summary_from_trx_returns_default_when_no_trx() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let missing = temp_dir.path().join("missing.trx"); + + let filled = merge_test_summary_from_trx( + binlog::TestSummary::default(), + Some(&missing), + None, + SystemTime::now(), + ); + assert_eq!(filled.total, 0); + } + + #[test] + fn test_merge_test_summary_from_trx_ignores_stale_fallback_file() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let fallback = temp_dir.path().join("fallback.trx"); + fs::write(&fallback, trx_with_counts(2, 1, 1)).expect("write fallback trx"); + std::thread::sleep(std::time::Duration::from_millis(5)); + let command_started_at = SystemTime::now(); + let missing_primary = temp_dir.path().join("missing.trx"); + + let filled = merge_test_summary_from_trx( + binlog::TestSummary::default(), + Some(&missing_primary), + Some(fallback.clone()), + command_started_at, + ); + + assert_eq!(filled.total, 0); + assert!(fallback.exists()); + } + + #[test] + fn test_merge_test_summary_from_trx_keeps_larger_existing_counts() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let primary = temp_dir.path().join("primary.trx"); + fs::write(&primary, trx_with_counts(5, 4, 1)).expect("write primary trx"); + + let existing = binlog::TestSummary { + passed: 10, + failed: 2, + skipped: 0, + total: 12, + project_count: 1, + failed_tests: Vec::new(), + duration_text: Some("1 s".to_string()), + }; + + let merged = + merge_test_summary_from_trx(existing, Some(temp_dir.path()), None, SystemTime::now()); + assert_eq!(merged.total, 12); + assert_eq!(merged.passed, 10); + assert_eq!(merged.failed, 2); + } + + #[test] + fn test_merge_test_summary_from_trx_overrides_smaller_existing_counts() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let primary = temp_dir.path().join("primary.trx"); + fs::write(&primary, trx_with_counts(12, 10, 2)).expect("write primary trx"); + + let existing = binlog::TestSummary { + passed: 4, + failed: 1, + skipped: 0, + total: 5, + project_count: 1, + failed_tests: Vec::new(), + duration_text: Some("1 s".to_string()), + }; + + let merged = + merge_test_summary_from_trx(existing, Some(temp_dir.path()), None, SystemTime::now()); + assert_eq!(merged.total, 12); + assert_eq!(merged.passed, 10); + assert_eq!(merged.failed, 2); + } + + #[test] + fn test_merge_test_summary_from_trx_uses_larger_project_count() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let trx_a = temp_dir.path().join("a.trx"); + let trx_b = temp_dir.path().join("b.trx"); + fs::write(&trx_a, trx_with_counts(2, 2, 0)).expect("write first trx"); + fs::write(&trx_b, trx_with_counts(3, 3, 0)).expect("write second trx"); + + let existing = binlog::TestSummary { + passed: 5, + failed: 0, + skipped: 0, + total: 5, + project_count: 1, + failed_tests: Vec::new(), + duration_text: Some("1 s".to_string()), + }; + + let merged = + merge_test_summary_from_trx(existing, Some(temp_dir.path()), None, SystemTime::now()); + assert_eq!(merged.project_count, 2); + } + + #[test] + fn test_has_results_directory_arg_detects_variants() { + let args = vec!["--results-directory".to_string(), "/tmp/trx".to_string()]; + assert!(has_results_directory_arg(&args)); + + let args = vec!["--results-directory=/tmp/trx".to_string()]; + assert!(has_results_directory_arg(&args)); + + let args = vec!["--logger".to_string(), "trx".to_string()]; + assert!(!has_results_directory_arg(&args)); + } + + #[test] + fn test_extract_results_directory_arg_detects_variants() { + let args = vec!["--results-directory".to_string(), "/tmp/r1".to_string()]; + assert_eq!( + extract_results_directory_arg(&args), + Some(PathBuf::from("/tmp/r1")) + ); + + let args = vec!["--results-directory=/tmp/r2".to_string()]; + assert_eq!( + extract_results_directory_arg(&args), + Some(PathBuf::from("/tmp/r2")) + ); + } + + #[test] + fn test_resolve_trx_results_dir_user_directory_is_not_marked_for_cleanup() { + let args = vec![ + "--results-directory".to_string(), + "/custom/results".to_string(), + ]; + + let (dir, cleanup) = resolve_trx_results_dir("test", &args); + assert_eq!(dir, Some(PathBuf::from("/custom/results"))); + assert!(!cleanup); + } + + #[test] + fn test_resolve_trx_results_dir_generated_directory_is_marked_for_cleanup() { + let args = Vec::::new(); + + let (dir, cleanup) = resolve_trx_results_dir("test", &args); + assert!(dir.is_some()); + assert!(cleanup); + } + + #[test] + fn test_format_all_formatted() { + let summary = + dotnet_format_report::parse_format_report(&format_fixture("format_success.json")) + .expect("parse format report"); + + let output = format_dotnet_format_output(&summary, true); + assert!(output.contains("ok dotnet format: 2 files formatted correctly")); + } + + #[test] + fn test_format_needs_formatting() { + let summary = + dotnet_format_report::parse_format_report(&format_fixture("format_changes.json")) + .expect("parse format report"); + + let output = format_dotnet_format_output(&summary, true); + assert!(output.contains("Format: 2 files need formatting")); + assert!(output.contains("src/Program.cs (line 42, col 17, WHITESPACE)")); + assert!(output.contains("Run `dotnet format` to apply fixes")); + } + + #[test] + fn test_format_temp_file_cleanup() { + let args = Vec::::new(); + let (report_path, cleanup) = resolve_format_report_path(&args); + let report_path = report_path.expect("report path"); + + assert!(cleanup); + fs::write(&report_path, "[]").expect("write temp report"); + cleanup_temp_file(&report_path); + assert!(!report_path.exists()); + } + + #[test] + fn test_format_user_report_arg_no_cleanup() { + let args = vec![ + "--report".to_string(), + "/tmp/user-format-report.json".to_string(), + ]; + + let (report_path, cleanup) = resolve_format_report_path(&args); + assert_eq!( + report_path, + Some(PathBuf::from("/tmp/user-format-report.json")) + ); + assert!(!cleanup); + } + + #[test] + fn test_format_preserves_positional_project_argument_order() { + let args = vec!["src/App/App.csproj".to_string()]; + + let effective = + build_effective_dotnet_format_args(&args, Some(Path::new("/tmp/report.json"))); + assert_eq!( + effective.first().map(String::as_str), + Some("src/App/App.csproj") + ); + } + + #[test] + fn test_format_report_summary_ignores_stale_report_file() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let report = temp_dir.path().join("report.json"); + fs::write(&report, "[]").expect("write report"); + + let command_started_at = SystemTime::now() + .checked_add(Duration::from_secs(2)) + .expect("future timestamp"); + let raw = "RAW OUTPUT"; + + let output = format_report_summary_or_raw(Some(&report), true, raw, command_started_at); + assert_eq!(output, raw); + } + + #[test] + fn test_format_report_summary_uses_fresh_report_file() { + let report = format_fixture("format_success.json"); + let raw = "RAW OUTPUT"; + + let output = format_report_summary_or_raw(Some(&report), true, raw, UNIX_EPOCH); + assert!(output.contains("ok dotnet format: 2 files formatted correctly")); + } + + #[test] + fn test_cleanup_temp_file_removes_existing_file() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let temp_file = temp_dir.path().join("temp.binlog"); + fs::write(&temp_file, "content").expect("write temp file"); + + cleanup_temp_file(&temp_file); + + assert!(!temp_file.exists()); + } + + #[test] + fn test_cleanup_temp_file_ignores_missing_file() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let missing_file = temp_dir.path().join("missing.binlog"); + + cleanup_temp_file(&missing_file); + + assert!(!missing_file.exists()); + } +} diff --git a/src/dotnet_format_report.rs b/src/dotnet_format_report.rs new file mode 100644 index 00000000..5b8837ff --- /dev/null +++ b/src/dotnet_format_report.rs @@ -0,0 +1,133 @@ +use anyhow::{Context, Result}; +use serde::Deserialize; +use std::fs::File; +use std::io::BufReader; +use std::path::Path; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "PascalCase")] +struct FormatReportEntry { + file_path: String, + #[serde(default)] + file_changes: Vec, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "PascalCase")] +struct FileChange { + line_number: u32, + char_number: u32, + diagnostic_id: String, + format_description: String, +} + +#[derive(Debug)] +pub struct ChangeDetail { + pub line_number: u32, + pub char_number: u32, + pub diagnostic_id: String, + pub format_description: String, +} + +#[derive(Debug)] +pub struct FileWithChanges { + pub path: String, + pub changes: Vec, +} + +#[derive(Debug)] +pub struct FormatSummary { + pub files_with_changes: Vec, + pub files_unchanged: usize, + pub total_files: usize, +} + +pub fn parse_format_report(path: &Path) -> Result { + let file = File::open(path) + .with_context(|| format!("Failed to read dotnet format report at {}", path.display()))?; + let reader = BufReader::new(file); + + let entries: Vec = serde_json::from_reader(reader).with_context(|| { + format!( + "Failed to parse dotnet format report JSON at {}", + path.display() + ) + })?; + + let total_files = entries.len(); + let files_with_changes: Vec = entries + .into_iter() + .filter_map(|entry| { + if entry.file_changes.is_empty() { + return None; + } + + let changes = entry + .file_changes + .into_iter() + .map(|change| ChangeDetail { + line_number: change.line_number, + char_number: change.char_number, + diagnostic_id: change.diagnostic_id, + format_description: change.format_description, + }) + .collect(); + + Some(FileWithChanges { + path: entry.file_path, + changes, + }) + }) + .collect(); + + let files_unchanged = total_files.saturating_sub(files_with_changes.len()); + + Ok(FormatSummary { + files_with_changes, + files_unchanged, + total_files, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + fn fixture(name: &str) -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("tests") + .join("fixtures") + .join("dotnet") + .join(name) + } + + #[test] + fn test_parse_format_report_all_formatted() { + let summary = parse_format_report(&fixture("format_success.json")).expect("parse report"); + + assert_eq!(summary.total_files, 2); + assert_eq!(summary.files_unchanged, 2); + assert!(summary.files_with_changes.is_empty()); + } + + #[test] + fn test_parse_format_report_with_changes() { + let summary = parse_format_report(&fixture("format_changes.json")).expect("parse report"); + + assert_eq!(summary.total_files, 3); + assert_eq!(summary.files_unchanged, 1); + assert_eq!(summary.files_with_changes.len(), 2); + assert!(summary.files_with_changes[0].path.contains("Program.cs")); + assert_eq!(summary.files_with_changes[0].changes[0].line_number, 42); + } + + #[test] + fn test_parse_format_report_empty() { + let summary = parse_format_report(&fixture("format_empty.json")).expect("parse report"); + + assert_eq!(summary.total_files, 0); + assert_eq!(summary.files_unchanged, 0); + assert!(summary.files_with_changes.is_empty()); + } +} diff --git a/src/dotnet_trx.rs b/src/dotnet_trx.rs new file mode 100644 index 00000000..8c967666 --- /dev/null +++ b/src/dotnet_trx.rs @@ -0,0 +1,593 @@ +use crate::binlog::{FailedTest, TestSummary}; +use chrono::{DateTime, FixedOffset}; +use quick_xml::events::{BytesStart, Event}; +use quick_xml::Reader; +use std::path::{Path, PathBuf}; +use std::time::SystemTime; + +fn local_name(name: &[u8]) -> &[u8] { + name.rsplit(|b| *b == b':').next().unwrap_or(name) +} + +fn extract_attr_value( + reader: &Reader<&[u8]>, + start: &BytesStart<'_>, + key: &[u8], +) -> Option { + for attr in start.attributes().flatten() { + if local_name(attr.key.as_ref()) != key { + continue; + } + + if let Ok(value) = attr.decode_and_unescape_value(reader.decoder()) { + return Some(value.into_owned()); + } + } + + None +} + +fn parse_usize_attr(reader: &Reader<&[u8]>, start: &BytesStart<'_>, key: &[u8]) -> usize { + extract_attr_value(reader, start, key) + .and_then(|v| v.parse::().ok()) + .unwrap_or(0) +} + +fn parse_trx_duration(start: &str, finish: &str) -> Option { + let start_dt = DateTime::parse_from_rfc3339(start).ok()?; + let finish_dt = DateTime::parse_from_rfc3339(finish).ok()?; + format_duration_between(start_dt, finish_dt) +} + +fn format_duration_between( + start_dt: DateTime, + finish_dt: DateTime, +) -> Option { + let diff = finish_dt.signed_duration_since(start_dt); + let millis = diff.num_milliseconds(); + if millis <= 0 { + return None; + } + + if millis >= 1_000 { + let seconds = millis as f64 / 1_000.0; + return Some(format!("{seconds:.1} s")); + } + + Some(format!("{millis} ms")) +} + +fn parse_trx_time_bounds(content: &str) -> Option<(DateTime, DateTime)> { + let mut reader = Reader::from_str(content); + reader.config_mut().trim_text(true); + let mut buf = Vec::new(); + + loop { + match reader.read_event_into(&mut buf) { + Ok(Event::Start(e)) | Ok(Event::Empty(e)) => { + if local_name(e.name().as_ref()) != b"Times" { + buf.clear(); + continue; + } + + let start = extract_attr_value(&reader, &e, b"start")?; + let finish = extract_attr_value(&reader, &e, b"finish")?; + let start_dt = DateTime::parse_from_rfc3339(&start).ok()?; + let finish_dt = DateTime::parse_from_rfc3339(&finish).ok()?; + return Some((start_dt, finish_dt)); + } + Ok(Event::Eof) => break, + Err(_) => return None, + _ => {} + } + + buf.clear(); + } + + None +} + +/// Parse TRX (Visual Studio Test Results) file to extract test summary. +/// Returns None if the file doesn't exist or isn't a valid TRX file. +pub fn parse_trx_file(path: &Path) -> Option { + let content = std::fs::read_to_string(path).ok()?; + parse_trx_content(&content) +} + +pub fn parse_trx_file_since(path: &Path, since: SystemTime) -> Option { + let modified = std::fs::metadata(path).ok()?.modified().ok()?; + if modified < since { + return None; + } + + parse_trx_file(path) +} + +pub fn parse_trx_files_in_dir(dir: &Path) -> Option { + parse_trx_files_in_dir_since(dir, None) +} + +pub fn parse_trx_files_in_dir_since(dir: &Path, since: Option) -> Option { + if !dir.exists() || !dir.is_dir() { + return None; + } + + let mut summaries = Vec::new(); + let mut min_start: Option> = None; + let mut max_finish: Option> = None; + let entries = std::fs::read_dir(dir).ok()?; + for entry in entries.flatten() { + let path = entry.path(); + if path + .extension() + .is_none_or(|e| !e.eq_ignore_ascii_case("trx")) + { + continue; + } + + if let Some(since) = since { + let modified = match entry.metadata().ok().and_then(|m| m.modified().ok()) { + Some(modified) => modified, + None => continue, + }; + if modified < since { + continue; + } + } + + let content = match std::fs::read_to_string(&path) { + Ok(content) => content, + Err(_) => continue, + }; + + if let Some((start, finish)) = parse_trx_time_bounds(&content) { + min_start = Some(min_start.map_or(start, |prev| prev.min(start))); + max_finish = Some(max_finish.map_or(finish, |prev| prev.max(finish))); + } + + if let Some(summary) = parse_trx_content(&content) { + summaries.push(summary); + } + } + + if summaries.is_empty() { + return None; + } + + let mut merged = TestSummary::default(); + for summary in summaries { + merged.passed += summary.passed; + merged.failed += summary.failed; + merged.skipped += summary.skipped; + merged.total += summary.total; + merged.failed_tests.extend(summary.failed_tests); + merged.project_count += summary.project_count.max(1); + if merged.duration_text.is_none() { + merged.duration_text = summary.duration_text; + } + } + + if let (Some(start), Some(finish)) = (min_start, max_finish) { + merged.duration_text = format_duration_between(start, finish); + } + + Some(merged) +} + +pub fn find_recent_trx_in_testresults() -> Option { + find_recent_trx_in_dir(Path::new("./TestResults")) +} + +fn find_recent_trx_in_dir(dir: &Path) -> Option { + if !dir.exists() { + return None; + } + + std::fs::read_dir(dir) + .ok()? + .filter_map(|entry| entry.ok()) + .filter_map(|entry| { + let path = entry.path(); + let is_trx = path + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("trx")); + if !is_trx { + return None; + } + + let modified = entry.metadata().ok()?.modified().ok()?; + Some((modified, path)) + }) + .max_by_key(|(modified, _)| *modified) + .map(|(_, path)| path) +} + +fn parse_trx_content(content: &str) -> Option { + #[derive(Clone, Copy)] + enum CaptureField { + Message, + StackTrace, + } + + let mut reader = Reader::from_str(content); + reader.config_mut().trim_text(true); + let mut buf = Vec::new(); + let mut summary = TestSummary::default(); + let mut saw_test_run = false; + let mut in_failed_result = false; + let mut in_error_info = false; + let mut failed_test_name = String::new(); + let mut message_buf = String::new(); + let mut stack_buf = String::new(); + let mut capture_field: Option = None; + + loop { + match reader.read_event_into(&mut buf) { + Ok(Event::Start(e)) => match local_name(e.name().as_ref()) { + b"TestRun" => saw_test_run = true, + b"Times" => { + let start = extract_attr_value(&reader, &e, b"start"); + let finish = extract_attr_value(&reader, &e, b"finish"); + if let (Some(start), Some(finish)) = (start, finish) { + summary.duration_text = parse_trx_duration(&start, &finish); + } + } + b"Counters" => { + summary.total = parse_usize_attr(&reader, &e, b"total"); + summary.passed = parse_usize_attr(&reader, &e, b"passed"); + summary.failed = parse_usize_attr(&reader, &e, b"failed"); + } + b"UnitTestResult" => { + let outcome = extract_attr_value(&reader, &e, b"outcome") + .unwrap_or_else(|| "Unknown".to_string()); + + if outcome == "Failed" { + in_failed_result = true; + in_error_info = false; + capture_field = None; + message_buf.clear(); + stack_buf.clear(); + failed_test_name = extract_attr_value(&reader, &e, b"testName") + .unwrap_or_else(|| "unknown".to_string()); + } + } + b"ErrorInfo" => { + if in_failed_result { + in_error_info = true; + } + } + b"Message" => { + if in_failed_result && in_error_info { + capture_field = Some(CaptureField::Message); + message_buf.clear(); + } + } + b"StackTrace" => { + if in_failed_result && in_error_info { + capture_field = Some(CaptureField::StackTrace); + stack_buf.clear(); + } + } + _ => {} + }, + Ok(Event::Empty(e)) => match local_name(e.name().as_ref()) { + b"Times" => { + let start = extract_attr_value(&reader, &e, b"start"); + let finish = extract_attr_value(&reader, &e, b"finish"); + if let (Some(start), Some(finish)) = (start, finish) { + summary.duration_text = parse_trx_duration(&start, &finish); + } + } + b"Counters" => { + summary.total = parse_usize_attr(&reader, &e, b"total"); + summary.passed = parse_usize_attr(&reader, &e, b"passed"); + summary.failed = parse_usize_attr(&reader, &e, b"failed"); + } + b"UnitTestResult" => { + let outcome = extract_attr_value(&reader, &e, b"outcome") + .unwrap_or_else(|| "Unknown".to_string()); + if outcome == "Failed" { + let name = extract_attr_value(&reader, &e, b"testName") + .unwrap_or_else(|| "unknown".to_string()); + summary.failed_tests.push(FailedTest { + name, + details: Vec::new(), + }); + } + } + _ => {} + }, + Ok(Event::Text(e)) => { + if !in_failed_result { + buf.clear(); + continue; + } + + let text = String::from_utf8_lossy(e.as_ref()); + match capture_field { + Some(CaptureField::Message) => message_buf.push_str(&text), + Some(CaptureField::StackTrace) => stack_buf.push_str(&text), + None => {} + } + } + Ok(Event::CData(e)) => { + if !in_failed_result { + buf.clear(); + continue; + } + + let text = String::from_utf8_lossy(e.as_ref()); + match capture_field { + Some(CaptureField::Message) => message_buf.push_str(&text), + Some(CaptureField::StackTrace) => stack_buf.push_str(&text), + None => {} + } + } + Ok(Event::End(e)) => match local_name(e.name().as_ref()) { + b"Message" | b"StackTrace" => { + capture_field = None; + } + b"ErrorInfo" => { + in_error_info = false; + } + b"UnitTestResult" => { + if in_failed_result { + let mut details = Vec::new(); + + let message = message_buf.trim(); + if !message.is_empty() { + details.push(message.to_string()); + } + + let stack = stack_buf.trim(); + if !stack.is_empty() { + let stack_lines: Vec<&str> = stack.lines().take(3).collect(); + if !stack_lines.is_empty() { + details.push(stack_lines.join("\n")); + } + } + + summary.failed_tests.push(FailedTest { + name: failed_test_name.clone(), + details, + }); + + in_failed_result = false; + in_error_info = false; + capture_field = None; + message_buf.clear(); + stack_buf.clear(); + } + } + _ => {} + }, + Ok(Event::Eof) => break, + Err(_) => return None, + _ => {} + } + + buf.clear(); + } + + if !saw_test_run { + return None; + } + + // Calculate skipped from counters if available + if summary.total > 0 { + summary.skipped = summary + .total + .saturating_sub(summary.passed + summary.failed); + } + + // Set project count to at least 1 if there were any tests + if summary.total > 0 { + summary.project_count = 1; + } + + Some(summary) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::time::Duration; + + #[test] + fn test_parse_trx_content_extracts_passed_counts() { + let trx = r#" + + + + + +"#; + + let summary = parse_trx_content(trx).expect("valid TRX"); + assert_eq!(summary.total, 42); + assert_eq!(summary.passed, 40); + assert_eq!(summary.failed, 2); + assert_eq!(summary.skipped, 0); + assert_eq!(summary.duration_text.as_deref(), Some("2.5 s")); + } + + #[test] + fn test_parse_trx_content_extracts_failed_tests_with_details() { + let trx = r#" + + + + + + Expected: 5, Actual: 4 + at MyTests.Calculator.Add_ShouldFail()\nat line 42 + + + + + +"#; + + let summary = parse_trx_content(trx).expect("valid TRX"); + assert_eq!(summary.failed_tests.len(), 1); + assert_eq!( + summary.failed_tests[0].name, + "MyTests.Calculator.Add_ShouldFail" + ); + assert!(summary.failed_tests[0].details[0].contains("Expected: 5, Actual: 4")); + } + + #[test] + fn test_parse_trx_content_extracts_counters_when_attribute_order_varies() { + let trx = r#" + + + + +"#; + + let summary = parse_trx_content(trx).expect("valid TRX"); + assert_eq!(summary.total, 10); + assert_eq!(summary.passed, 7); + assert_eq!(summary.failed, 3); + } + + #[test] + fn test_parse_trx_content_extracts_failed_tests_when_attribute_order_varies() { + let trx = r#" + + + + + + Boom + at MyTests.Ordering.ShouldStillParse() + + + + + +"#; + + let summary = parse_trx_content(trx).expect("valid TRX"); + assert_eq!(summary.failed, 1); + assert_eq!(summary.failed_tests.len(), 1); + assert_eq!( + summary.failed_tests[0].name, + "MyTests.Ordering.ShouldStillParse" + ); + } + + #[test] + fn test_parse_trx_content_returns_none_for_invalid_xml() { + let not_trx = "This is not a TRX file"; + assert!(parse_trx_content(not_trx).is_none()); + } + + #[test] + fn test_find_recent_trx_in_dir_returns_none_when_missing() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let missing_dir = temp_dir.path().join("TestResults"); + + let found = find_recent_trx_in_dir(&missing_dir); + assert!(found.is_none()); + } + + #[test] + fn test_find_recent_trx_in_dir_picks_newest_trx() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let testresults_dir = temp_dir.path().join("TestResults"); + std::fs::create_dir_all(&testresults_dir).expect("create TestResults"); + + let old_trx = testresults_dir.join("old.trx"); + let new_trx = testresults_dir.join("new.trx"); + std::fs::write(&old_trx, "old").expect("write old"); + std::thread::sleep(Duration::from_millis(5)); + std::fs::write(&new_trx, "new").expect("write new"); + + let found = find_recent_trx_in_dir(&testresults_dir).expect("should find newest trx"); + assert_eq!(found, new_trx); + } + + #[test] + fn test_find_recent_trx_in_dir_ignores_non_trx_files() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let testresults_dir = temp_dir.path().join("TestResults"); + std::fs::create_dir_all(&testresults_dir).expect("create TestResults"); + + let txt = testresults_dir.join("notes.txt"); + std::fs::write(&txt, "noop").expect("write txt"); + + let found = find_recent_trx_in_dir(&testresults_dir); + assert!(found.is_none()); + } + + #[test] + fn test_parse_trx_files_in_dir_aggregates_counts_and_wall_clock_duration() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let trx_dir = temp_dir.path().join("TestResults"); + std::fs::create_dir_all(&trx_dir).expect("create TestResults"); + + let trx_one = r#" + + + + + +"#; + + let trx_two = r#" + + + + + +"#; + + std::fs::write(trx_dir.join("a.trx"), trx_one).expect("write first trx"); + std::fs::write(trx_dir.join("b.trx"), trx_two).expect("write second trx"); + + let summary = parse_trx_files_in_dir(&trx_dir).expect("merged summary"); + assert_eq!(summary.total, 30); + assert_eq!(summary.passed, 29); + assert_eq!(summary.failed, 1); + assert_eq!(summary.duration_text.as_deref(), Some("3.0 s")); + } + + #[test] + fn test_parse_trx_files_in_dir_since_ignores_older_files() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let trx_dir = temp_dir.path().join("TestResults"); + std::fs::create_dir_all(&trx_dir).expect("create TestResults"); + + let trx_old = r#" +"#; + std::fs::write(trx_dir.join("old.trx"), trx_old).expect("write old trx"); + std::thread::sleep(Duration::from_millis(5)); + let since = SystemTime::now(); + std::thread::sleep(Duration::from_millis(5)); + + let trx_new = r#" +"#; + std::fs::write(trx_dir.join("new.trx"), trx_new).expect("write new trx"); + + let summary = parse_trx_files_in_dir_since(&trx_dir, Some(since)).expect("merged summary"); + assert_eq!(summary.total, 3); + assert_eq!(summary.failed, 1); + } + + #[test] + fn test_parse_trx_files_in_dir_since_handles_uppercase_extension() { + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let trx_dir = temp_dir.path().join("TestResults"); + std::fs::create_dir_all(&trx_dir).expect("create TestResults"); + + let trx = r#" +"#; + std::fs::write(trx_dir.join("UPPER.TRX"), trx).expect("write trx"); + + let summary = parse_trx_files_in_dir_since(&trx_dir, None).expect("summary"); + assert_eq!(summary.total, 3); + assert_eq!(summary.failed, 1); + } +} diff --git a/src/main.rs b/src/main.rs index cef7f3e3..3e6a66c3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +mod binlog; mod cargo_cmd; mod cc_economics; mod ccusage; @@ -8,6 +9,9 @@ mod deps; mod diff_cmd; mod discover; mod display_helpers; +mod dotnet_cmd; +mod dotnet_format_report; +mod dotnet_trx; mod env_cmd; mod filter; mod find_cmd; @@ -410,6 +414,12 @@ enum Commands { command: CargoCommands, }, + /// .NET CLI commands with compact output + Dotnet { + #[command(subcommand)] + command: DotnetCommands, + }, + /// npm run with filtered output (strip boilerplate) Npm { /// npm run arguments (script name + options) @@ -807,6 +817,37 @@ enum GoCommands { Other(Vec), } +#[derive(Subcommand)] +enum DotnetCommands { + /// Build with compact output (errors/warnings summary) + Build { + /// Additional dotnet build arguments + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + args: Vec, + }, + /// Test with compact output (failed tests only) + Test { + /// Additional dotnet test arguments + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + args: Vec, + }, + /// Restore with compact output + Restore { + /// Additional dotnet restore arguments + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + args: Vec, + }, + /// Format with compact output + Format { + /// Additional dotnet format arguments + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + args: Vec, + }, + /// Passthrough: runs any unsupported dotnet subcommand directly + #[command(external_subcommand)] + Other(Vec), +} + fn main() -> Result<()> { let cli = Cli::parse(); @@ -1211,6 +1252,24 @@ fn main() -> Result<()> { } }, + Commands::Dotnet { command } => match command { + DotnetCommands::Build { args } => { + dotnet_cmd::run_build(&args, cli.verbose)?; + } + DotnetCommands::Test { args } => { + dotnet_cmd::run_test(&args, cli.verbose)?; + } + DotnetCommands::Restore { args } => { + dotnet_cmd::run_restore(&args, cli.verbose)?; + } + DotnetCommands::Format { args } => { + dotnet_cmd::run_format(&args, cli.verbose)?; + } + DotnetCommands::Other(args) => { + dotnet_cmd::run_passthrough(&args, cli.verbose)?; + } + }, + Commands::Npm { args } => { npm_cmd::run(&args, cli.verbose, cli.skip_env)?; } diff --git a/tests/fixtures/dotnet/build_failed.txt b/tests/fixtures/dotnet/build_failed.txt new file mode 100644 index 00000000..be4bdec2 --- /dev/null +++ b/tests/fixtures/dotnet/build_failed.txt @@ -0,0 +1,11 @@ + Determining projects to restore... + All projects are up-to-date for restore. +/private/tmp/RtkDotnetSmoke/Broken.cs(7,17): error CS1525: Invalid expression term ';' [/private/tmp/RtkDotnetSmoke/RtkDotnetSmoke.csproj] + +Build FAILED. + +/private/tmp/RtkDotnetSmoke/Broken.cs(7,17): error CS1525: Invalid expression term ';' [/private/tmp/RtkDotnetSmoke/RtkDotnetSmoke.csproj] + 0 Warning(s) + 1 Error(s) + +Time Elapsed 00:00:00.76 diff --git a/tests/fixtures/dotnet/format_changes.json b/tests/fixtures/dotnet/format_changes.json new file mode 100644 index 00000000..93fe1cfa --- /dev/null +++ b/tests/fixtures/dotnet/format_changes.json @@ -0,0 +1,31 @@ +[ + { + "FileName": "Program.cs", + "FilePath": "src/Program.cs", + "FileChanges": [ + { + "LineNumber": 42, + "CharNumber": 17, + "DiagnosticId": "WHITESPACE", + "FormatDescription": "Fix whitespace" + } + ] + }, + { + "FileName": "Utils.cs", + "FilePath": "src/Utils.cs", + "FileChanges": [ + { + "LineNumber": 15, + "CharNumber": 8, + "DiagnosticId": "IDE0055", + "FormatDescription": "Fix formatting" + } + ] + }, + { + "FileName": "Tests.cs", + "FilePath": "tests/Tests.cs", + "FileChanges": [] + } +] diff --git a/tests/fixtures/dotnet/format_empty.json b/tests/fixtures/dotnet/format_empty.json new file mode 100644 index 00000000..fe51488c --- /dev/null +++ b/tests/fixtures/dotnet/format_empty.json @@ -0,0 +1 @@ +[] diff --git a/tests/fixtures/dotnet/format_success.json b/tests/fixtures/dotnet/format_success.json new file mode 100644 index 00000000..bd3c18b2 --- /dev/null +++ b/tests/fixtures/dotnet/format_success.json @@ -0,0 +1,12 @@ +[ + { + "FileName": "Program.cs", + "FilePath": "src/Program.cs", + "FileChanges": [] + }, + { + "FileName": "Utils.cs", + "FilePath": "src/Utils.cs", + "FileChanges": [] + } +] diff --git a/tests/fixtures/dotnet/test_failed.txt b/tests/fixtures/dotnet/test_failed.txt new file mode 100644 index 00000000..7bca9cce --- /dev/null +++ b/tests/fixtures/dotnet/test_failed.txt @@ -0,0 +1,18 @@ + Determining projects to restore... + All projects are up-to-date for restore. + RtkDotnetSmoke -> /private/tmp/RtkDotnetSmoke/bin/Debug/net10.0/RtkDotnetSmoke.dll +Test run for /private/tmp/RtkDotnetSmoke/bin/Debug/net10.0/RtkDotnetSmoke.dll (.NETCoreApp,Version=v10.0) +VSTest version 18.0.1 (arm64) + +Starting test execution, please wait... +A total of 1 test files matched the specified pattern. +[xUnit.net 00:00:00.11] RtkDotnetSmoke.UnitTest1.Test1 [FAIL] + Failed RtkDotnetSmoke.UnitTest1.Test1 [4 ms] + Error Message: + Assert.Equal() Failure: Values differ +Expected: 2 +Actual: 3 + Stack Trace: + at RtkDotnetSmoke.UnitTest1.Test1() in /private/tmp/RtkDotnetSmoke/UnitTest1.cs:line 8 + +Failed! - Failed: 1, Passed: 0, Skipped: 0, Total: 1, Duration: 13 ms - RtkDotnetSmoke.dll (net10.0)