From 886a8f7c6a6b4320a2f0fe285e16b9ba171ed879 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 27 Feb 2026 15:51:38 +0000 Subject: [PATCH 01/15] Initial plan From b5fbe72a07a7ebd7e3b69c9140e4a32b9c28fa81 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 27 Feb 2026 16:13:18 +0000 Subject: [PATCH 02/15] Add current: true checkout flag to mark logical current repository Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- pkg/parser/schemas/main_workflow_schema.json | 4 + pkg/workflow/checkout_manager.go | 73 ++++++++++-- pkg/workflow/checkout_manager_test.go | 113 +++++++++++++++++++ pkg/workflow/unified_prompt_step.go | 17 +++ 4 files changed, 199 insertions(+), 8 deletions(-) diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json index ae338c5e7b..b18826631f 100644 --- a/pkg/parser/schemas/main_workflow_schema.json +++ b/pkg/parser/schemas/main_workflow_schema.json @@ -7898,6 +7898,10 @@ "type": "string", "description": "GitHub token for authentication. Use ${{ secrets.MY_TOKEN }} to reference a secret. Credentials are always removed after checkout (persist-credentials: false is enforced).", "examples": ["${{ secrets.MY_PAT }}", "${{ secrets.GITHUB_TOKEN }}"] + }, + "current": { + "type": "boolean", + "description": "Marks this checkout as the logical current repository for the workflow. When set to true, the AI agent will treat this repository as its primary working target. Only one checkout may have current set to true. Useful for central-repo workflows targeting a different repository." } } } diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index 58e459bdb7..45ed05fbcf 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -55,6 +55,12 @@ type CheckoutConfig struct { // LFS enables checkout of Git LFS objects. LFS bool `json:"lfs,omitempty"` + + // Current marks this checkout as the logical "current" repository for the workflow. + // When set, the AI agent will treat this repository as its primary working target. + // Only one checkout may have Current set to true. + // This is useful for workflows that run from a central repo targeting a different repo. + Current bool `json:"current,omitempty"` } // checkoutKey uniquely identifies a checkout target used for grouping/deduplication. @@ -74,6 +80,7 @@ type resolvedCheckout struct { sparsePatterns []string // merged sparse-checkout patterns submodules string lfs bool + current bool // true if this checkout is the logical current repository } // CheckoutManager collects checkout requests and merges them to minimize @@ -132,6 +139,9 @@ func (cm *CheckoutManager) add(cfg *CheckoutConfig) { if cfg.LFS { entry.lfs = true } + if cfg.Current { + entry.current = true + } if cfg.Submodules != "" && entry.submodules == "" { entry.submodules = cfg.Submodules } @@ -144,6 +154,7 @@ func (cm *CheckoutManager) add(cfg *CheckoutConfig) { fetchDepth: cfg.FetchDepth, submodules: cfg.Submodules, lfs: cfg.LFS, + current: cfg.Current, } if cfg.SparseCheckout != "" { entry.sparsePatterns = mergeSparsePatterns(nil, cfg.SparseCheckout) @@ -169,6 +180,18 @@ func (cm *CheckoutManager) GetDefaultCheckoutOverride() *resolvedCheckout { return nil } +// GetCurrentRepository returns the repository of the checkout marked as current (current: true). +// Returns an empty string if no checkout is marked as current or if the current checkout +// uses the default repository (empty Repository field). +func (cm *CheckoutManager) GetCurrentRepository() string { + for _, entry := range cm.ordered { + if entry.current { + return entry.key.repository + } + } + return "" +} + // GenerateAdditionalCheckoutSteps generates YAML step lines for all non-default // (additional) checkouts — those that target a specific path other than the root. // The caller is responsible for emitting the default workspace checkout separately. @@ -369,18 +392,18 @@ func ParseCheckoutConfigs(raw any) ([]*CheckoutConfig, error) { } checkoutManagerLog.Printf("Parsing checkout configuration: type=%T", raw) + var configs []*CheckoutConfig + // Try single object first if singleMap, ok := raw.(map[string]any); ok { cfg, err := checkoutConfigFromMap(singleMap) if err != nil { return nil, fmt.Errorf("invalid checkout configuration: %w", err) } - return []*CheckoutConfig{cfg}, nil - } - - // Try array of objects - if arr, ok := raw.([]any); ok { - configs := make([]*CheckoutConfig, 0, len(arr)) + configs = []*CheckoutConfig{cfg} + } else if arr, ok := raw.([]any); ok { + // Try array of objects + configs = make([]*CheckoutConfig, 0, len(arr)) for i, item := range arr { itemMap, ok := item.(map[string]any) if !ok { @@ -392,10 +415,24 @@ func ParseCheckoutConfigs(raw any) ([]*CheckoutConfig, error) { } configs = append(configs, cfg) } - return configs, nil + } else { + return nil, fmt.Errorf("checkout must be an object or an array of objects, got %T", raw) + } + + // Validate that at most one checkout has current: true. + // Multiple current checkouts are not allowed since only one repo can be + // the logical primary target for the agent at a time. + currentCount := 0 + for _, cfg := range configs { + if cfg.Current { + currentCount++ + } + } + if currentCount > 1 { + return nil, fmt.Errorf("only one checkout may have current: true, found %d", currentCount) } - return nil, fmt.Errorf("checkout must be an object or an array of objects, got %T", raw) + return configs, nil } // checkoutConfigFromMap converts a raw map to a CheckoutConfig. @@ -487,5 +524,25 @@ func checkoutConfigFromMap(m map[string]any) (*CheckoutConfig, error) { cfg.LFS = b } + if v, ok := m["current"]; ok { + b, ok := v.(bool) + if !ok { + return nil, errors.New("checkout.current must be a boolean") + } + cfg.Current = b + } + return cfg, nil } + +// getCurrentCheckoutRepository returns the repository of the checkout marked as current (current: true). +// Returns an empty string if no checkout has current: true or if the current checkout +// uses the default repository (empty Repository field). +func getCurrentCheckoutRepository(checkouts []*CheckoutConfig) string { + for _, cfg := range checkouts { + if cfg != nil && cfg.Current { + return cfg.Repository + } + } + return "" +} diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index 11af372dca..20267f8c1c 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -336,3 +336,116 @@ func TestMergeSparsePatterns(t *testing.T) { assert.Equal(t, []string{"src/"}, result, "should preserve existing patterns") }) } + +// TestCheckoutCurrentFlag verifies the current: true checkout flag behavior. +func TestCheckoutCurrentFlag(t *testing.T) { + t.Run("parse current: true from single object", func(t *testing.T) { + raw := map[string]any{ + "repository": "owner/target-repo", + "current": true, + } + configs, err := ParseCheckoutConfigs(raw) + require.NoError(t, err, "should parse without error") + require.Len(t, configs, 1, "should produce one config") + assert.True(t, configs[0].Current, "current flag should be true") + assert.Equal(t, "owner/target-repo", configs[0].Repository, "repository should be set") + }) + + t.Run("parse current: false from map", func(t *testing.T) { + raw := map[string]any{"current": false} + configs, err := ParseCheckoutConfigs(raw) + require.NoError(t, err, "should parse without error") + require.Len(t, configs, 1) + assert.False(t, configs[0].Current, "current flag should be false") + }) + + t.Run("invalid current type returns error", func(t *testing.T) { + raw := map[string]any{"current": "yes"} + _, err := ParseCheckoutConfigs(raw) + assert.Error(t, err, "non-boolean current should return error") + }) + + t.Run("multiple current: true in array returns error", func(t *testing.T) { + raw := []any{ + map[string]any{"repository": "owner/repo1", "path": "./r1", "current": true}, + map[string]any{"repository": "owner/repo2", "path": "./r2", "current": true}, + } + _, err := ParseCheckoutConfigs(raw) + require.Error(t, err, "multiple current: true should return error") + assert.Contains(t, err.Error(), "only one checkout may have current: true", "error should mention the constraint") + }) + + t.Run("single current: true in array is valid", func(t *testing.T) { + raw := []any{ + map[string]any{"path": "."}, + map[string]any{"repository": "owner/target", "path": "./target", "current": true}, + } + configs, err := ParseCheckoutConfigs(raw) + require.NoError(t, err, "single current: true in array should be valid") + require.Len(t, configs, 2) + assert.False(t, configs[0].Current, "first checkout should not be current") + assert.True(t, configs[1].Current, "second checkout should be current") + }) +} + +// TestGetCurrentRepository verifies CheckoutManager.GetCurrentRepository behavior. +func TestGetCurrentRepository(t *testing.T) { + t.Run("returns empty string when no current checkout", func(t *testing.T) { + cm := NewCheckoutManager([]*CheckoutConfig{ + {Repository: "owner/repo", Path: "./libs"}, + }) + assert.Empty(t, cm.GetCurrentRepository(), "should return empty string without current flag") + }) + + t.Run("returns repository when current: true is set", func(t *testing.T) { + cm := NewCheckoutManager([]*CheckoutConfig{ + {Repository: "owner/target-repo", Path: "./target", Current: true}, + }) + assert.Equal(t, "owner/target-repo", cm.GetCurrentRepository(), "should return current checkout repository") + }) + + t.Run("returns empty string when current: true but no repository", func(t *testing.T) { + cm := NewCheckoutManager([]*CheckoutConfig{ + {Path: ".", Current: true}, + }) + assert.Empty(t, cm.GetCurrentRepository(), "should return empty string when repository is not set") + }) + + t.Run("returns repository from current in multiple checkouts", func(t *testing.T) { + cm := NewCheckoutManager([]*CheckoutConfig{ + {Path: "."}, + {Repository: "owner/central", Path: "./central"}, + {Repository: "owner/target", Path: "./target", Current: true}, + }) + assert.Equal(t, "owner/target", cm.GetCurrentRepository(), "should return the current checkout repository") + }) +} + +// TestGetCurrentCheckoutRepository verifies the standalone helper function. +func TestGetCurrentCheckoutRepository(t *testing.T) { + t.Run("nil slice returns empty string", func(t *testing.T) { + assert.Empty(t, getCurrentCheckoutRepository(nil), "nil slice should return empty string") + }) + + t.Run("no current flag returns empty string", func(t *testing.T) { + configs := []*CheckoutConfig{ + {Repository: "owner/repo"}, + } + assert.Empty(t, getCurrentCheckoutRepository(configs), "no current flag should return empty string") + }) + + t.Run("current: true returns repository", func(t *testing.T) { + configs := []*CheckoutConfig{ + {Repository: "owner/other"}, + {Repository: "owner/target", Current: true}, + } + assert.Equal(t, "owner/target", getCurrentCheckoutRepository(configs), "should return current checkout repository") + }) + + t.Run("current: true with no repository returns empty string", func(t *testing.T) { + configs := []*CheckoutConfig{ + {Current: true}, + } + assert.Empty(t, getCurrentCheckoutRepository(configs), "current without repository should return empty string") + }) +} diff --git a/pkg/workflow/unified_prompt_step.go b/pkg/workflow/unified_prompt_step.go index 13c3f32f64..6b9ddc9691 100644 --- a/pkg/workflow/unified_prompt_step.go +++ b/pkg/workflow/unified_prompt_step.go @@ -304,6 +304,23 @@ func (c *Compiler) collectPromptSections(data *WorkflowData) []PromptSection { // Replace expressions with environment variable references modifiedPromptText := extractor.ReplaceExpressionsWithEnvVars(githubContextPromptText) + // If a checkout is marked as current and has a non-default repository, + // inject it into the GitHub context so the agent knows its primary target. + if currentRepo := getCurrentCheckoutRepository(data.CheckoutConfigs); currentRepo != "" { + unifiedPromptLog.Printf("Injecting current-repository into GitHub context: %s", currentRepo) + currentRepoLine := "- **current-repository**: " + currentRepo + + " (this is the repository you are working on; use this as the target for all GitHub operations unless otherwise specified)" + // Append the current-repository line before the closing tag. + // We build the insertion safely by finding the tag boundary. + const closeTag = "" + if idx := strings.LastIndex(modifiedPromptText, closeTag); idx >= 0 { + modifiedPromptText = modifiedPromptText[:idx] + currentRepoLine + "\n" + modifiedPromptText[idx:] + } else { + // Closing tag not found — append at the end as a safe fallback. + modifiedPromptText += "\n" + currentRepoLine + "\n" + } + } + // Build environment variables map envVars := make(map[string]string) for _, mapping := range expressionMappings { From 688ab13be302e5da5976d5526c7f7a09ed9e8828 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 01:59:47 +0000 Subject: [PATCH 03/15] Populate GitHub context prompt with full checkout list Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .github/workflows/smoke-codex.lock.yml | 2 + pkg/workflow/checkout_manager.go | 43 ++++++++++++++++++ pkg/workflow/checkout_manager_test.go | 62 ++++++++++++++++++++++++++ pkg/workflow/unified_prompt_step.go | 40 ++++++++--------- 4 files changed, 126 insertions(+), 21 deletions(-) diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index cb2e70817d..1e1728dc47 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -188,6 +188,8 @@ jobs: {{#if __GH_AW_GITHUB_RUN_ID__ }} - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ {{/if}} + - **checkouts**: The following repositories have been checked out and are available in the workspace: + - `workspace root` → `__GH_AW_GITHUB_REPOSITORY__` GH_AW_PROMPT_EOF diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index 45ed05fbcf..a1861819cd 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -546,3 +546,46 @@ func getCurrentCheckoutRepository(checkouts []*CheckoutConfig) string { } return "" } + +// buildCheckoutsPromptContent returns a markdown bullet list describing all user-configured +// checkouts for inclusion in the GitHub context prompt. +// Returns an empty string when no checkouts are configured. +// +// The generated content may include "${{ github.repository }}" for any checkout that does +// not have an explicit repository configured (defaulting to the triggering repository). +// Callers must ensure these expressions are processed by an ExpressionExtractor so the +// placeholder substitution step can resolve them at runtime. +func buildCheckoutsPromptContent(checkouts []*CheckoutConfig) string { + if len(checkouts) == 0 { + return "" + } + + var sb strings.Builder + sb.WriteString("- **checkouts**: The following repositories have been checked out and are available in the workspace:\n") + + for _, cfg := range checkouts { + if cfg == nil { + continue + } + + // Determine human-readable path label + path := cfg.Path + if path == "" { + path = "." + } + + // Determine repo: use configured value or fall back to the triggering repository expression + repo := cfg.Repository + if repo == "" { + repo = "${{ github.repository }}" + } + + line := fmt.Sprintf(" - `%s` → `%s`", path, repo) + if cfg.Current { + line += " (**current** - this is the repository you are working on; use this as the target for all GitHub operations unless otherwise specified)" + } + sb.WriteString(line + "\n") + } + + return sb.String() +} diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index 20267f8c1c..a5fb7cf811 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -449,3 +449,65 @@ func TestGetCurrentCheckoutRepository(t *testing.T) { assert.Empty(t, getCurrentCheckoutRepository(configs), "current without repository should return empty string") }) } + +// TestBuildCheckoutsPromptContent verifies the prompt content generation for the checkout list. +func TestBuildCheckoutsPromptContent(t *testing.T) { + t.Run("nil slice returns empty string", func(t *testing.T) { + assert.Empty(t, buildCheckoutsPromptContent(nil), "nil should return empty string") + }) + + t.Run("empty slice returns empty string", func(t *testing.T) { + assert.Empty(t, buildCheckoutsPromptContent([]*CheckoutConfig{}), "empty slice should return empty string") + }) + + t.Run("default checkout with no repo uses github.repository expression", func(t *testing.T) { + content := buildCheckoutsPromptContent([]*CheckoutConfig{ + {}, + }) + assert.Contains(t, content, "`.`", "should show '.' path for empty path") + assert.Contains(t, content, "${{ github.repository }}", "should reference github.repository expression for default checkout") + }) + + t.Run("path dot treated same as empty path", func(t *testing.T) { + emptyContent := buildCheckoutsPromptContent([]*CheckoutConfig{{Path: ""}}) + dotContent := buildCheckoutsPromptContent([]*CheckoutConfig{{Path: "."}}) + assert.Equal(t, emptyContent, dotContent, "empty path and '.' should produce identical output") + }) + + t.Run("checkout with explicit repo shows repo", func(t *testing.T) { + content := buildCheckoutsPromptContent([]*CheckoutConfig{ + {Repository: "owner/target", Path: "./target"}, + }) + assert.Contains(t, content, "./target", "should show the configured path") + assert.Contains(t, content, "owner/target", "should show the configured repo") + assert.NotContains(t, content, "github.repository", "should not include github.repository expression for explicit repo") + }) + + t.Run("current checkout is marked", func(t *testing.T) { + content := buildCheckoutsPromptContent([]*CheckoutConfig{ + {Repository: "owner/target", Path: "./target", Current: true}, + }) + assert.Contains(t, content, "**current**", "current checkout should be marked") + assert.Contains(t, content, "this is the repository you are working on", "current checkout should have instructions") + }) + + t.Run("non-current checkout is not marked", func(t *testing.T) { + content := buildCheckoutsPromptContent([]*CheckoutConfig{ + {Repository: "owner/libs", Path: "./libs"}, + }) + assert.NotContains(t, content, "**current**", "non-current checkout should not be marked") + }) + + t.Run("multiple checkouts all listed", func(t *testing.T) { + content := buildCheckoutsPromptContent([]*CheckoutConfig{ + {Path: "."}, + {Repository: "owner/target", Path: "./target", Current: true}, + {Repository: "owner/libs", Path: "./libs"}, + }) + assert.Contains(t, content, "./target", "should include target checkout") + assert.Contains(t, content, "owner/target", "should include target repo") + assert.Contains(t, content, "./libs", "should include libs checkout") + assert.Contains(t, content, "owner/libs", "should include libs repo") + assert.Contains(t, content, "**current**", "current checkout should be marked") + }) +} diff --git a/pkg/workflow/unified_prompt_step.go b/pkg/workflow/unified_prompt_step.go index 6b9ddc9691..69b5fa275c 100644 --- a/pkg/workflow/unified_prompt_step.go +++ b/pkg/workflow/unified_prompt_step.go @@ -297,29 +297,27 @@ func (c *Compiler) collectPromptSections(data *WorkflowData) []PromptSection { // 8. GitHub context (if GitHub tool is enabled) if hasGitHubTool(data.ParsedTools) { unifiedPromptLog.Print("Adding GitHub context section") - // Extract expressions from GitHub context prompt + + // Build the combined prompt text: base github context + optional checkout list. + // The checkout list may contain ${{ github.repository }} which must go through + // the expression extractor so the placeholder substitution step can resolve it. + combinedPromptText := githubContextPromptText + if checkoutsContent := buildCheckoutsPromptContent(data.CheckoutConfigs); checkoutsContent != "" { + unifiedPromptLog.Printf("Injecting checkout list into GitHub context (%d checkouts)", len(data.CheckoutConfigs)) + const closeTag = "" + if idx := strings.LastIndex(combinedPromptText, closeTag); idx >= 0 { + combinedPromptText = combinedPromptText[:idx] + checkoutsContent + combinedPromptText[idx:] + } else { + combinedPromptText += "\n" + checkoutsContent + } + } + + // Extract expressions from the combined content (includes any new expressions + // introduced by the checkout list, e.g. ${{ github.repository }}). extractor := NewExpressionExtractor() - expressionMappings, err := extractor.ExtractExpressions(githubContextPromptText) + expressionMappings, err := extractor.ExtractExpressions(combinedPromptText) if err == nil && len(expressionMappings) > 0 { - // Replace expressions with environment variable references - modifiedPromptText := extractor.ReplaceExpressionsWithEnvVars(githubContextPromptText) - - // If a checkout is marked as current and has a non-default repository, - // inject it into the GitHub context so the agent knows its primary target. - if currentRepo := getCurrentCheckoutRepository(data.CheckoutConfigs); currentRepo != "" { - unifiedPromptLog.Printf("Injecting current-repository into GitHub context: %s", currentRepo) - currentRepoLine := "- **current-repository**: " + currentRepo + - " (this is the repository you are working on; use this as the target for all GitHub operations unless otherwise specified)" - // Append the current-repository line before the closing tag. - // We build the insertion safely by finding the tag boundary. - const closeTag = "" - if idx := strings.LastIndex(modifiedPromptText, closeTag); idx >= 0 { - modifiedPromptText = modifiedPromptText[:idx] + currentRepoLine + "\n" + modifiedPromptText[idx:] - } else { - // Closing tag not found — append at the end as a safe fallback. - modifiedPromptText += "\n" + currentRepoLine + "\n" - } - } + modifiedPromptText := extractor.ReplaceExpressionsWithEnvVars(combinedPromptText) // Build environment variables map envVars := make(map[string]string) From b137b9696e4b68a97b634b4e2024b23031d10e7b Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 02:06:38 +0000 Subject: [PATCH 04/15] docs: document checkout current: true flag in cross-repository reference Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .github/workflows/smoke-codex.lock.yml | 2 +- .../src/content/docs/reference/cross-repository.md | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index 1e1728dc47..99603bc2dc 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -189,7 +189,7 @@ jobs: - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ {{/if}} - **checkouts**: The following repositories have been checked out and are available in the workspace: - - `workspace root` → `__GH_AW_GITHUB_REPOSITORY__` + - `.` → `__GH_AW_GITHUB_REPOSITORY__` GH_AW_PROMPT_EOF diff --git a/docs/src/content/docs/reference/cross-repository.md b/docs/src/content/docs/reference/cross-repository.md index 18620ce0bd..caec1e7cee 100644 --- a/docs/src/content/docs/reference/cross-repository.md +++ b/docs/src/content/docs/reference/cross-repository.md @@ -56,10 +56,24 @@ checkout: | `sparse-checkout` | string | Newline-separated patterns for sparse checkout (e.g., `.github/\nsrc/`). | | `submodules` | string/bool | Submodule handling: `"recursive"`, `"true"`, or `"false"`. | | `lfs` | boolean | Download Git LFS objects. | +| `current` | boolean | Marks this checkout as the primary working repository. The agent uses this as the default target for all GitHub operations. Only one checkout may set `current: true`; the compiler rejects workflows where multiple checkouts enable it. | > [!TIP] > Credentials are always removed after checkout (`persist-credentials: false` is enforced) to prevent credential exfiltration by agents. +### Marking a Primary Repository (`current: true`) + +When a workflow running from a central repository targets a different repository, use `current: true` to tell the agent which repository to treat as its primary working target. The agent uses this as the default for all GitHub operations (creating issues, opening PRs, reading content) unless the prompt instructs otherwise. When omitted, the agent defaults to the repository where the workflow is running. + +```yaml wrap +checkout: + - path: . # central/control repo + - repository: org/target-repo + path: ./target + github-token: ${{ secrets.CROSS_REPO_PAT }} + current: true # agent's primary target +``` + ### Multiple Checkout Merging When multiple configurations target the same path and repository: From fc7251ab54cd389331ce48253bd03c32475ad72f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 05:30:34 +0000 Subject: [PATCH 05/15] Merge main, fix current validation + normalize path dot, recompile Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .devcontainer/devcontainer.json | 10 + .github/aw/actions-lock.json | 128 +- .../agent-performance-analyzer.lock.yml | 32 +- .../workflows/agent-persona-explorer.lock.yml | 36 +- .github/workflows/agentics-maintenance.yml | 2 +- .github/workflows/ai-moderator.lock.yml | 26 +- .github/workflows/archie.lock.yml | 26 +- .github/workflows/artifacts-summary.lock.yml | 26 +- .github/workflows/audit-workflows.lock.yml | 50 +- .github/workflows/auto-triage-issues.lock.yml | 26 +- .github/workflows/blog-auditor.lock.yml | 24 +- .github/workflows/bot-detection.lock.yml | 24 +- .github/workflows/brave.lock.yml | 26 +- .../breaking-change-checker.lock.yml | 26 +- .github/workflows/changeset.lock.yml | 28 +- .../workflows/chroma-issue-indexer.lock.yml | 16 +- .github/workflows/ci-coach.lock.yml | 40 +- .github/workflows/ci-doctor.lock.yml | 34 +- .../claude-code-user-docs-review.lock.yml | 32 +- .../cli-consistency-checker.lock.yml | 26 +- .../workflows/cli-version-checker.lock.yml | 32 +- .github/workflows/cloclo.lock.yml | 36 +- .../workflows/code-scanning-fixer.lock.yml | 40 +- .github/workflows/code-simplifier.lock.yml | 28 +- .../codex-github-remote-mcp-test.lock.yml | 14 +- .../commit-changes-analyzer.lock.yml | 24 +- .github/workflows/contribution-check.lock.yml | 26 +- .../workflows/copilot-agent-analysis.lock.yml | 36 +- .../copilot-cli-deep-research.lock.yml | 30 +- .../copilot-pr-merged-report.lock.yml | 34 +- .../copilot-pr-nlp-analysis.lock.yml | 50 +- .../copilot-pr-prompt-analysis.lock.yml | 38 +- .../copilot-session-insights.lock.yml | 46 +- .github/workflows/craft.lock.yml | 28 +- .../daily-architecture-diagram.lock.yml | 34 +- .../daily-assign-issue-to-user.lock.yml | 26 +- .github/workflows/daily-choice-test.lock.yml | 24 +- .../workflows/daily-cli-performance.lock.yml | 30 +- .../workflows/daily-cli-tools-tester.lock.yml | 28 +- .github/workflows/daily-code-metrics.lock.yml | 48 +- .../workflows/daily-compiler-quality.lock.yml | 34 +- .../daily-copilot-token-report.lock.yml | 54 +- .github/workflows/daily-doc-healer.lock.yml | 34 +- .github/workflows/daily-doc-updater.lock.yml | 34 +- .github/workflows/daily-fact.lock.yml | 26 +- .github/workflows/daily-file-diet.lock.yml | 26 +- .../workflows/daily-firewall-report.lock.yml | 48 +- .../workflows/daily-issues-report.lock.yml | 46 +- .../daily-malicious-code-scan.lock.yml | 24 +- .../daily-mcp-concurrency-analysis.lock.yml | 34 +- .../daily-multi-device-docs-tester.lock.yml | 30 +- .github/workflows/daily-news.lock.yml | 48 +- .../daily-observability-report.lock.yml | 28 +- .../daily-performance-summary.lock.yml | 46 +- .github/workflows/daily-regulatory.lock.yml | 26 +- .../daily-rendering-scripts-verifier.lock.yml | 36 +- .../workflows/daily-repo-chronicle.lock.yml | 44 +- .../daily-safe-output-optimizer.lock.yml | 34 +- .../daily-safe-outputs-conformance.lock.yml | 24 +- .../workflows/daily-secrets-analysis.lock.yml | 26 +- .../daily-security-red-team.lock.yml | 24 +- .github/workflows/daily-semgrep-scan.lock.yml | 26 +- .../daily-syntax-error-quality.lock.yml | 30 +- .../workflows/daily-syntax-error-quality.md | 2 +- .../daily-team-evolution-insights.lock.yml | 24 +- .github/workflows/daily-team-status.lock.yml | 26 +- .../daily-testify-uber-super-expert.lock.yml | 30 +- .../workflows/daily-workflow-updater.lock.yml | 28 +- .github/workflows/deep-report.lock.yml | 46 +- .github/workflows/delight.lock.yml | 30 +- .github/workflows/dependabot-burner.lock.yml | 26 +- .../workflows/dependabot-go-checker.lock.yml | 26 +- .github/workflows/dev-hawk.lock.yml | 28 +- .github/workflows/dev.lock.yml | 26 +- .../developer-docs-consolidator.lock.yml | 34 +- .github/workflows/dictation-prompt.lock.yml | 28 +- .../workflows/discussion-task-miner.lock.yml | 30 +- .github/workflows/docs-noob-tester.lock.yml | 32 +- .github/workflows/draft-pr-cleanup.lock.yml | 26 +- .../duplicate-code-detector.lock.yml | 26 +- .../example-custom-error-patterns.lock.yml | 14 +- .../example-permissions-warning.lock.yml | 14 +- .../example-workflow-analyzer.lock.yml | 26 +- .github/workflows/firewall-escape.lock.yml | 38 +- .github/workflows/firewall.lock.yml | 14 +- .../workflows/functional-pragmatist.lock.yml | 28 +- .../github-mcp-structural-analysis.lock.yml | 44 +- .../github-mcp-tools-report.lock.yml | 34 +- .../github-remote-mcp-auth-test.lock.yml | 26 +- .../workflows/glossary-maintainer.lock.yml | 36 +- .github/workflows/go-fan.lock.yml | 32 +- .github/workflows/go-logger.lock.yml | 38 +- .github/workflows/go-logger.md | 4 +- .../workflows/go-pattern-detector.lock.yml | 28 +- .github/workflows/go-pattern-detector.md | 2 +- .github/workflows/gpclean.lock.yml | 34 +- .github/workflows/grumpy-reviewer.lock.yml | 34 +- .github/workflows/hourly-ci-cleaner.lock.yml | 32 +- .github/workflows/hourly-ci-cleaner.md | 4 +- .../workflows/instructions-janitor.lock.yml | 34 +- .github/workflows/issue-arborist.lock.yml | 26 +- .github/workflows/issue-monster.lock.yml | 26 +- .github/workflows/issue-triage-agent.lock.yml | 34 +- .github/workflows/jsweep.lock.yml | 36 +- .../workflows/layout-spec-maintainer.lock.yml | 30 +- .github/workflows/lockfile-stats.lock.yml | 32 +- .github/workflows/mcp-inspector.lock.yml | 44 +- .github/workflows/mergefest.lock.yml | 28 +- .github/workflows/metrics-collector.lock.yml | 20 +- .../workflows/notion-issue-summary.lock.yml | 26 +- .github/workflows/org-health-report.lock.yml | 46 +- .github/workflows/pdf-summary.lock.yml | 34 +- .github/workflows/plan.lock.yml | 26 +- .github/workflows/poem-bot.lock.yml | 40 +- .github/workflows/portfolio-analyst.lock.yml | 48 +- .../workflows/pr-nitpick-reviewer.lock.yml | 34 +- .github/workflows/pr-triage-agent.lock.yml | 30 +- .../prompt-clustering-analysis.lock.yml | 42 +- .github/workflows/python-data-charts.lock.yml | 46 +- .github/workflows/q.lock.yml | 38 +- .github/workflows/refiner.lock.yml | 28 +- .github/workflows/release.lock.yml | 52 +- .github/workflows/release.md | 24 +- .../workflows/repo-audit-analyzer.lock.yml | 34 +- .github/workflows/repo-tree-map.lock.yml | 26 +- .../repository-quality-improver.lock.yml | 34 +- .github/workflows/research.lock.yml | 26 +- .github/workflows/safe-output-health.lock.yml | 34 +- .../schema-consistency-checker.lock.yml | 32 +- .github/workflows/scout.lock.yml | 32 +- ...ecurity-alert-burndown.campaign.g.lock.yml | 1592 +++++++++++++++++ .../security-alert-burndown.campaign.g.md | 653 +++++++ .../workflows/security-compliance.lock.yml | 30 +- .github/workflows/security-review.lock.yml | 36 +- .../semantic-function-refactor.lock.yml | 24 +- .github/workflows/sergo.lock.yml | 32 +- .github/workflows/shared/ci-data-analysis.md | 4 +- .github/workflows/shared/mcp-debug.md | 4 +- .github/workflows/shared/mcp/drain3.md | 2 +- .../workflows/shared/ollama-threat-scan.md | 2 +- .github/workflows/shared/python-dataviz.md | 4 +- .../shared/trending-charts-simple.md | 4 +- .../workflows/slide-deck-maintainer.lock.yml | 40 +- .github/workflows/slide-deck-maintainer.md | 2 +- .github/workflows/smoke-agent.lock.yml | 26 +- .github/workflows/smoke-claude.lock.yml | 38 +- .github/workflows/smoke-codex.lock.yml | 36 +- .github/workflows/smoke-copilot-arm.lock.yml | 40 +- .github/workflows/smoke-copilot.lock.yml | 40 +- .github/workflows/smoke-gemini.lock.yml | 34 +- .github/workflows/smoke-multi-pr.lock.yml | 28 +- .github/workflows/smoke-project.lock.yml | 28 +- .github/workflows/smoke-temporary-id.lock.yml | 26 +- .github/workflows/smoke-test-tools.lock.yml | 34 +- .../workflows/smoke-workflow-call.lock.yml | 26 +- .../workflows/stale-repo-identifier.lock.yml | 52 +- .github/workflows/stale-repo-identifier.md | 2 +- .../workflows/static-analysis-report.lock.yml | 34 +- .../workflows/step-name-alignment.lock.yml | 32 +- .github/workflows/sub-issue-closer.lock.yml | 26 +- .github/workflows/super-linter.lock.yml | 42 +- .github/workflows/super-linter.md | 6 +- .../workflows/technical-doc-writer.lock.yml | 44 +- .github/workflows/technical-doc-writer.md | 2 +- .github/workflows/terminal-stylist.lock.yml | 26 +- .../test-create-pr-error-handling.lock.yml | 34 +- .github/workflows/test-dispatcher.lock.yml | 26 +- .../test-project-url-default.lock.yml | 26 +- .github/workflows/test-workflow.lock.yml | 14 +- .github/workflows/tidy.lock.yml | 34 +- .github/workflows/tidy.md | 4 +- .github/workflows/typist.lock.yml | 24 +- .../workflows/ubuntu-image-analyzer.lock.yml | 28 +- .github/workflows/unbloat-docs.lock.yml | 44 +- .github/workflows/unbloat-docs.md | 4 +- .github/workflows/video-analyzer.lock.yml | 26 +- .../weekly-editors-health-check.lock.yml | 34 +- .../workflows/weekly-issue-summary.lock.yml | 52 +- .../weekly-safe-outputs-spec-review.lock.yml | 28 +- .github/workflows/workflow-generator.lock.yml | 26 +- .../workflow-health-manager.lock.yml | 30 +- .../workflows/workflow-normalizer.lock.yml | 28 +- .../workflow-skill-extractor.lock.yml | 26 +- DEADCODE.md | 61 + Makefile | 6 +- .../src/content/docs/agent-factory-status.mdx | 2 + .../guides/deterministic-agentic-patterns.md | 6 +- .../docs/guides/network-configuration.md | 3 +- .../content/docs/patterns/multi-repo-ops.md | 4 + .../docs/reference/cross-repository.md | 166 +- .../frontmatter-hash-specification.md | 2 +- .../src/content/docs/reference/frontmatter.md | 2 +- .../content/docs/reference/github-tools.md | 24 + docs/src/content/docs/reference/glossary.md | 4 +- .../src/content/docs/reference/mcp-gateway.md | 66 + docs/src/content/docs/reference/network.md | 3 +- docs/src/content/docs/reference/playwright.md | 209 +++ docs/src/content/docs/reference/tools.md | 69 +- docs/src/content/docs/reference/triggers.md | 45 + docs/src/content/docs/setup/cli.md | 7 +- pkg/cli/copilot_agent_test.go | 45 - pkg/cli/exec.go | 139 -- pkg/cli/exec_test.go | 229 --- pkg/cli/logs_display.go | 220 --- pkg/cli/logs_overview_test.go | 239 --- pkg/cli/mcp_inspect_safe_inputs_inspector.go | 134 -- pkg/cli/mcp_inspect_safe_inputs_test.go | 264 --- pkg/cli/update_actions.go | 44 +- pkg/cli/update_actions_test.go | 54 +- pkg/cli/update_command.go | 18 +- pkg/cli/update_command_test.go | 10 +- pkg/cli/upgrade_command.go | 2 +- pkg/cli/validation_output.go | 54 - pkg/cli/validation_output_test.go | 234 --- pkg/console/form.go | 122 -- pkg/console/form_test.go | 169 -- pkg/console/golden_test.go | 118 -- pkg/console/layout.go | 162 -- pkg/console/layout_test.go | 383 ---- pkg/console/select.go | 91 - pkg/console/select_test.go | 87 - pkg/constants/constants.go | 7 + pkg/logger/error_formatting.go | 47 - pkg/logger/error_formatting_test.go | 177 -- pkg/parser/ansi_strip.go | 12 - pkg/parser/frontmatter_merge_test.go | 2 - pkg/parser/frontmatter_utils_test.go | 213 --- pkg/parser/virtual_fs_test_helpers.go | 12 - pkg/stringutil/paths.go | 42 - pkg/stringutil/paths_test.go | 129 -- pkg/workflow/action_pins_test.go | 6 +- pkg/workflow/add_labels.go | 32 - pkg/workflow/bundler.go | 589 ------ pkg/workflow/bundler_deduplicate_test.go | 44 - .../bundler_duplicate_modules_test.go | 65 - pkg/workflow/bundler_file_mode.go | 529 ------ pkg/workflow/bundler_file_mode_test.go | 255 --- pkg/workflow/bundler_fs_undefined_test.go | 13 - pkg/workflow/bundler_function_scope_test.go | 13 - pkg/workflow/bundler_indentation_test.go | 58 - pkg/workflow/bundler_inline_test.go | 59 - pkg/workflow/bundler_integration_test.go | 55 - pkg/workflow/bundler_quotes_test.go | 103 -- pkg/workflow/bundler_runtime_mode_test.go | 79 - pkg/workflow/bundler_runtime_validation.go | 176 -- pkg/workflow/bundler_safety_validation.go | 223 --- pkg/workflow/bundler_scope_mixing_test.go | 13 - pkg/workflow/bundler_scope_narrowing_test.go | 13 - pkg/workflow/bundler_script_validation.go | 149 -- .../bundler_script_validation_test.go | 244 --- pkg/workflow/bundler_test.go | 79 - pkg/workflow/cache_memory_integration_test.go | 2 +- .../cache_memory_restore_only_test.go | 32 +- .../cache_memory_threat_detection_test.go | 32 +- pkg/workflow/checkout_manager.go | 28 +- pkg/workflow/checkout_manager_test.go | 6 +- pkg/workflow/codex_engine_test.go | 3 +- pkg/workflow/compile_outputs_pr_test.go | 2 +- pkg/workflow/compiler.go | 2 +- pkg/workflow/compiler_action_mode_test.go | 187 -- pkg/workflow/compiler_activation_jobs.go | 10 +- pkg/workflow/compiler_artifacts_test.go | 6 +- pkg/workflow/compiler_cache_test.go | 45 +- pkg/workflow/compiler_custom_actions_test.go | 192 -- pkg/workflow/compiler_customsteps_test.go | 6 +- .../compiler_orchestrator_workflow.go | 5 + pkg/workflow/compiler_string_api.go | 5 + pkg/workflow/compiler_types.go | 37 - pkg/workflow/copilot_participant_steps.go | 153 -- .../copilot_participant_steps_test.go | 49 - pkg/workflow/create_issue.go | 149 -- pkg/workflow/create_pull_request.go | 207 --- .../custom_action_copilot_token_test.go | 51 - pkg/workflow/data/action_pins.json | 128 +- pkg/workflow/dependency_tracker.go | 121 -- pkg/workflow/dependency_tracker_test.go | 185 -- pkg/workflow/env_mirror.go | 137 -- pkg/workflow/env_mirror_test.go | 221 --- pkg/workflow/git_patch_test.go | 2 +- pkg/workflow/inline_imports_test.go | 254 --- pkg/workflow/js.go | 96 - pkg/workflow/markdown_security_scanner.go | 32 + .../markdown_security_scanner_test.go | 65 +- pkg/workflow/markdown_unfencing.go | 141 -- pkg/workflow/markdown_unfencing_test.go | 277 --- pkg/workflow/mcp_gateway_config.go | 16 +- pkg/workflow/mcp_gateway_config_test.go | 113 +- pkg/workflow/mcp_github_config.go | 23 + pkg/workflow/mcp_logs_upload_test.go | 8 +- pkg/workflow/mcp_renderer.go | 63 +- pkg/workflow/mcp_setup_generator.go | 17 + pkg/workflow/metrics_test.go | 90 - pkg/workflow/multiline_test.go | 10 +- pkg/workflow/prompt_constants.go | 28 + pkg/workflow/prompt_step.go | 64 - pkg/workflow/prompt_step_helper_test.go | 138 -- pkg/workflow/prompt_step_test.go | 146 -- pkg/workflow/runtime_import_checkout_test.go | 88 + pkg/workflow/runtime_integration_test.go | 4 +- .../runtime_setup_integration_test.go | 32 +- pkg/workflow/runtime_setup_test.go | 32 +- pkg/workflow/safe_output_builder.go | 202 --- pkg/workflow/safe_output_helpers_test.go | 2 +- pkg/workflow/safe_outputs_app_import_test.go | 70 - pkg/workflow/safe_outputs_app_test.go | 149 -- .../safe_outputs_env_integration_test.go | 296 --- pkg/workflow/safe_outputs_env_test.go | 196 -- pkg/workflow/safe_outputs_integration_test.go | 423 ----- pkg/workflow/safe_outputs_messages_test.go | 52 - .../schemas/mcp-gateway-config.schema.json | 10 + pkg/workflow/script_registry.go | 323 +--- pkg/workflow/script_registry_test.go | 298 --- pkg/workflow/setup_action_paths.go | 5 + pkg/workflow/sh.go | 152 -- pkg/workflow/sh_integration_test.go | 371 ---- pkg/workflow/sh_test.go | 309 ---- pkg/workflow/staged_add_issue_labels_test.go | 73 - pkg/workflow/staged_create_issue_test.go | 88 - pkg/workflow/staged_pull_request_test.go | 88 - pkg/workflow/template_rendering_test.go | 2 +- .../basic-copilot.golden | 14 +- .../smoke-copilot.golden | 18 +- .../with-imports.golden | 14 +- pkg/workflow/threat_detection_test.go | 2 +- pkg/workflow/tools_parser.go | 8 + pkg/workflow/tools_types.go | 60 +- pkg/workflow/tools_validation.go | 165 ++ pkg/workflow/tools_validation_test.go | 203 +++ pkg/workflow/unified_prompt_step.go | 39 - scratchpad/guard-policies-specification.md | 307 ++++ 330 files changed, 7182 insertions(+), 15035 deletions(-) create mode 100644 .github/workflows/security-alert-burndown.campaign.g.lock.yml create mode 100644 .github/workflows/security-alert-burndown.campaign.g.md create mode 100644 DEADCODE.md create mode 100644 docs/src/content/docs/reference/playwright.md delete mode 100644 pkg/cli/exec.go delete mode 100644 pkg/cli/exec_test.go delete mode 100644 pkg/cli/logs_display.go delete mode 100644 pkg/cli/mcp_inspect_safe_inputs_inspector.go delete mode 100644 pkg/cli/mcp_inspect_safe_inputs_test.go delete mode 100644 pkg/cli/validation_output.go delete mode 100644 pkg/cli/validation_output_test.go delete mode 100644 pkg/console/form.go delete mode 100644 pkg/console/form_test.go delete mode 100644 pkg/console/layout.go delete mode 100644 pkg/console/layout_test.go delete mode 100644 pkg/console/select.go delete mode 100644 pkg/console/select_test.go delete mode 100644 pkg/logger/error_formatting.go delete mode 100644 pkg/logger/error_formatting_test.go delete mode 100644 pkg/parser/ansi_strip.go delete mode 100644 pkg/parser/virtual_fs_test_helpers.go delete mode 100644 pkg/stringutil/paths.go delete mode 100644 pkg/stringutil/paths_test.go delete mode 100644 pkg/workflow/bundler.go delete mode 100644 pkg/workflow/bundler_deduplicate_test.go delete mode 100644 pkg/workflow/bundler_duplicate_modules_test.go delete mode 100644 pkg/workflow/bundler_file_mode.go delete mode 100644 pkg/workflow/bundler_file_mode_test.go delete mode 100644 pkg/workflow/bundler_fs_undefined_test.go delete mode 100644 pkg/workflow/bundler_function_scope_test.go delete mode 100644 pkg/workflow/bundler_indentation_test.go delete mode 100644 pkg/workflow/bundler_inline_test.go delete mode 100644 pkg/workflow/bundler_integration_test.go delete mode 100644 pkg/workflow/bundler_quotes_test.go delete mode 100644 pkg/workflow/bundler_runtime_mode_test.go delete mode 100644 pkg/workflow/bundler_runtime_validation.go delete mode 100644 pkg/workflow/bundler_safety_validation.go delete mode 100644 pkg/workflow/bundler_scope_mixing_test.go delete mode 100644 pkg/workflow/bundler_scope_narrowing_test.go delete mode 100644 pkg/workflow/bundler_script_validation.go delete mode 100644 pkg/workflow/bundler_script_validation_test.go delete mode 100644 pkg/workflow/bundler_test.go delete mode 100644 pkg/workflow/copilot_participant_steps.go delete mode 100644 pkg/workflow/copilot_participant_steps_test.go delete mode 100644 pkg/workflow/custom_action_copilot_token_test.go delete mode 100644 pkg/workflow/dependency_tracker.go delete mode 100644 pkg/workflow/dependency_tracker_test.go delete mode 100644 pkg/workflow/env_mirror.go delete mode 100644 pkg/workflow/env_mirror_test.go delete mode 100644 pkg/workflow/markdown_unfencing.go delete mode 100644 pkg/workflow/markdown_unfencing_test.go create mode 100644 pkg/workflow/prompt_constants.go delete mode 100644 pkg/workflow/prompt_step.go delete mode 100644 pkg/workflow/prompt_step_helper_test.go delete mode 100644 pkg/workflow/prompt_step_test.go delete mode 100644 pkg/workflow/safe_output_builder.go delete mode 100644 pkg/workflow/safe_outputs_env_integration_test.go delete mode 100644 pkg/workflow/safe_outputs_env_test.go delete mode 100644 pkg/workflow/script_registry_test.go create mode 100644 pkg/workflow/setup_action_paths.go delete mode 100644 pkg/workflow/sh.go delete mode 100644 pkg/workflow/sh_integration_test.go delete mode 100644 pkg/workflow/sh_test.go delete mode 100644 pkg/workflow/staged_add_issue_labels_test.go delete mode 100644 pkg/workflow/staged_create_issue_test.go delete mode 100644 pkg/workflow/staged_pull_request_test.go create mode 100644 scratchpad/guard-policies-specification.md diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 82e0cfa4a9..14c6bce0f2 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -25,6 +25,16 @@ "pull-requests": "write", "workflows": "write" } + }, + "githubnext/gh-aw": { + "permissions": { + "actions": "write", + "contents": "write", + "discussions": "read", + "issues": "read", + "pull-requests": "write", + "workflows": "write" + } } } } diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json index 80f6d33962..103020bbd7 100644 --- a/.github/aw/actions-lock.json +++ b/.github/aw/actions-lock.json @@ -1,34 +1,29 @@ { "entries": { - "actions/ai-inference@v2.0.6": { + "actions/ai-inference@v2.0.7": { "repo": "actions/ai-inference", - "version": "v2.0.6", - "sha": "a380166897b5408b8fb7dddd148142794cb5624a" + "version": "v2.0.7", + "sha": "e09e65981758de8b2fdab13c2bfb7c7d5493b0b6" }, - "actions/attest-build-provenance@v2.4.0": { + "actions/attest-build-provenance@v4.1.0": { "repo": "actions/attest-build-provenance", - "version": "v2.4.0", - "sha": "e8998f949152b193b063cb0ec769d69d929409be" + "version": "v4.1.0", + "sha": "a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32" }, - "actions/cache/restore@v4.3.0": { + "actions/cache/restore@v5.0.3": { "repo": "actions/cache/restore", - "version": "v4.3.0", - "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" + "version": "v5.0.3", + "sha": "cdf6c1fa76f9f475f3d7449005a359c84ca0f306" }, - "actions/cache/save@v4.3.0": { + "actions/cache/save@v5.0.3": { "repo": "actions/cache/save", - "version": "v4.3.0", - "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" + "version": "v5.0.3", + "sha": "cdf6c1fa76f9f475f3d7449005a359c84ca0f306" }, - "actions/cache@v4.3.0": { + "actions/cache@v5.0.3": { "repo": "actions/cache", - "version": "v4.3.0", - "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" - }, - "actions/checkout@v4.3.1": { - "repo": "actions/checkout", - "version": "v4.3.1", - "sha": "34e114876b0b11c390a56381ad16ebd13914f8d5" + "version": "v5.0.3", + "sha": "cdf6c1fa76f9f475f3d7449005a359c84ca0f306" }, "actions/checkout@v5.0.1": { "repo": "actions/checkout", @@ -40,20 +35,15 @@ "version": "v6.0.2", "sha": "de0fac2e4500dabe0009e67214ff5f5447ce83dd" }, - "actions/create-github-app-token@v2.2.1": { + "actions/create-github-app-token@v3.0.0-beta.2": { "repo": "actions/create-github-app-token", - "version": "v2.2.1", - "sha": "29824e69f54612133e76f7eaac726eef6c875baf" + "version": "v3.0.0-beta.2", + "sha": "bf559f85448f9380bcfa2899dbdc01eb5b37be3a" }, - "actions/download-artifact@v6": { + "actions/download-artifact@v8.0.0": { "repo": "actions/download-artifact", - "version": "v6", - "sha": "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53" - }, - "actions/github-script@v7.1.0": { - "repo": "actions/github-script", - "version": "v7.1.0", - "sha": "f28e40c7f34bde8b3046d885e986cb6290c5673b" + "version": "v8.0.0", + "sha": "70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3" }, "actions/github-script@v8": { "repo": "actions/github-script", @@ -65,70 +55,50 @@ "version": "v4.3.1", "sha": "67a3573c9a986a3f9c594539f4ab511d57bb3ce9" }, - "actions/setup-go@v5.6.0": { - "repo": "actions/setup-go", - "version": "v5.6.0", - "sha": "40f1582b2485089dde7abd97c1529aa768e1baff" + "actions/setup-dotnet@v5.1.0": { + "repo": "actions/setup-dotnet", + "version": "v5.1.0", + "sha": "baa11fbfe1d6520db94683bd5c7a3818018e4309" }, - "actions/setup-go@v6": { + "actions/setup-go@v6.3.0": { "repo": "actions/setup-go", - "version": "v6", + "version": "v6.3.0", "sha": "4b73464bb391d4059bd26b0524d20df3927bd417" }, - "actions/setup-go@v6.2.0": { - "repo": "actions/setup-go", - "version": "v6.2.0", - "sha": "7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5" - }, - "actions/setup-java@v4.8.0": { + "actions/setup-java@v5.2.0": { "repo": "actions/setup-java", - "version": "v4.8.0", - "sha": "c1e323688fd81a25caa38c78aa6df2d33d3e20d9" - }, - "actions/setup-node@v4.4.0": { - "repo": "actions/setup-node", - "version": "v4.4.0", - "sha": "49933ea5288caeca8642d1e84afbd3f7d6820020" + "version": "v5.2.0", + "sha": "be666c2fcd27ec809703dec50e508c2fdc7f6654" }, "actions/setup-node@v6.2.0": { "repo": "actions/setup-node", "version": "v6.2.0", "sha": "6044e13b5dc448c55e2357c09f80417699197238" }, - "actions/setup-python@v5.6.0": { + "actions/setup-python@v6.2.0": { "repo": "actions/setup-python", - "version": "v5.6.0", - "sha": "a26af69be951a213d495a4c3e4e4022e16d87065" - }, - "actions/upload-artifact@v4.6.2": { - "repo": "actions/upload-artifact", - "version": "v4.6.2", - "sha": "ea165f8d65b6e75b540449e92b4886f43607fa02" + "version": "v6.2.0", + "sha": "a309ff8b426b58ec0e2a45f0f869d46889d02405" }, - "actions/upload-artifact@v5": { + "actions/upload-artifact@v5.0.0": { "repo": "actions/upload-artifact", - "version": "v5", + "version": "v5.0.0", "sha": "330a01c490aca151604b8cf639adc76d48f6c5d4" }, - "actions/upload-artifact@v6": { + "actions/upload-artifact@v7.0.0": { "repo": "actions/upload-artifact", - "version": "v6", - "sha": "b7c566a772e6b6bfb58ed0dc250532a479d7789f" + "version": "v7.0.0", + "sha": "bbbca2ddaa5d8feaa63e36b76fdaad77386f024f" }, - "anchore/sbom-action@v0": { + "anchore/sbom-action@v0.23.0": { "repo": "anchore/sbom-action", - "version": "v0", + "version": "v0.23.0", "sha": "17ae1740179002c89186b61233e0f892c3118b11" }, - "anchore/sbom-action@v0.22.2": { - "repo": "anchore/sbom-action", - "version": "v0.22.2", - "sha": "28d71544de8eaf1b958d335707167c5f783590ad" - }, - "astral-sh/setup-uv@v5.4.2": { + "astral-sh/setup-uv@v7.3.0": { "repo": "astral-sh/setup-uv", - "version": "v5.4.2", - "sha": "d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86" + "version": "v7.3.0", + "sha": "eac588ad8def6316056a12d4907a9d4d84ff7a3b" }, "cli/gh-extension-precompile@v2.1.0": { "repo": "cli/gh-extension-precompile", @@ -165,15 +135,15 @@ "version": "v1.20.4", "sha": "dff508cca8ce57162e7aa6c4769a4f97c2fed638" }, - "github/codeql-action/upload-sarif@v3.32.4": { + "github/codeql-action/upload-sarif@v4.32.4": { "repo": "github/codeql-action/upload-sarif", - "version": "v3.32.4", - "sha": "85b88275909735f5bc23196090e03d2eb148b3de" + "version": "v4.32.4", + "sha": "e34fc2711fb7964ca6850c8a8382121f34745f3b" }, - "github/stale-repos@v3.0.2": { + "github/stale-repos@v8.0.4": { "repo": "github/stale-repos", - "version": "v3.0.2", - "sha": "a21e55567b83cf3c3f3f9085d3038dc6cee02598" + "version": "v8.0.4", + "sha": "6084a41431c4ce8842a7e879b1a15082b88742ae" }, "haskell-actions/setup@v2.10.3": { "repo": "haskell-actions/setup", diff --git a/.github/workflows/agent-performance-analyzer.lock.yml b/.github/workflows/agent-performance-analyzer.lock.yml index f020f03ff7..82d70f8344 100644 --- a/.github/workflows/agent-performance-analyzer.lock.yml +++ b/.github/workflows/agent-performance-analyzer.lock.yml @@ -221,7 +221,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -273,7 +273,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -789,10 +789,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -832,7 +833,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -843,7 +845,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -928,7 +930,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -950,13 +952,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1001,7 +1003,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -1010,7 +1012,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1113,7 +1115,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1169,7 +1171,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1314,7 +1316,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1377,7 +1379,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1404,7 +1406,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/agent-persona-explorer.lock.yml b/.github/workflows/agent-persona-explorer.lock.yml index de07c4b9ba..6dc915117a 100644 --- a/.github/workflows/agent-persona-explorer.lock.yml +++ b/.github/workflows/agent-persona-explorer.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -274,7 +274,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -309,7 +309,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -677,10 +677,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -720,7 +721,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -731,7 +733,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -816,7 +818,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -838,13 +840,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -887,7 +889,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -895,7 +897,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -997,7 +999,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1052,7 +1054,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1191,7 +1193,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1218,7 +1220,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1245,7 +1247,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1261,7 +1263,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/agentics-maintenance.yml b/.github/workflows/agentics-maintenance.yml index 900e2c6b0e..bcb0a5549f 100644 --- a/.github/workflows/agentics-maintenance.yml +++ b/.github/workflows/agentics-maintenance.yml @@ -199,7 +199,7 @@ jobs: - name: Upload secret validation report if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: secret-validation-report path: secret-validation-report.md diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml index 7d21694df4..73155c020c 100644 --- a/.github/workflows/ai-moderator.lock.yml +++ b/.github/workflows/ai-moderator.lock.yml @@ -269,7 +269,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -319,7 +319,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Cache cache-memory file share data - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: spam-tracking-${{ github.repository_owner }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -707,10 +707,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -761,7 +762,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -772,7 +774,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -834,7 +836,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -856,13 +858,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -920,7 +922,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -962,7 +964,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1132,7 +1134,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1159,7 +1161,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/archie.lock.yml b/.github/workflows/archie.lock.yml index 2edd186389..b0b69150eb 100644 --- a/.github/workflows/archie.lock.yml +++ b/.github/workflows/archie.lock.yml @@ -261,7 +261,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -635,10 +635,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -674,7 +675,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -685,7 +687,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -770,7 +772,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -793,13 +795,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -844,7 +846,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -947,7 +949,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1002,7 +1004,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1174,7 +1176,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1201,7 +1203,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/artifacts-summary.lock.yml b/.github/workflows/artifacts-summary.lock.yml index 9826ca5e99..ad9ddf1bd8 100644 --- a/.github/workflows/artifacts-summary.lock.yml +++ b/.github/workflows/artifacts-summary.lock.yml @@ -211,7 +211,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -598,10 +598,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -629,7 +630,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -640,7 +642,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -725,7 +727,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -747,13 +749,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -798,7 +800,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -901,7 +903,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -955,7 +957,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1062,7 +1064,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1089,7 +1091,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml index dbb55a4a07..69f0015abd 100644 --- a/.github/workflows/audit-workflows.lock.yml +++ b/.github/workflows/audit-workflows.lock.yml @@ -29,7 +29,7 @@ # - shared/reporting.md # - shared/trending-charts-simple.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"c12ee1d68dbf447087d51abd3be3bfc9418a2143d48e2326070b55197028b828"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"e687ed6ea9c097f90e8e0c3181f674b2bf92734fd253c3408f6775da1b76e745"} name: "Agentic Workflow Audit Agent" "on": @@ -241,7 +241,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -291,7 +291,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -330,7 +330,7 @@ jobs: pip install --user --quiet numpy pandas matplotlib seaborn scipy - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-charts @@ -338,7 +338,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-source-and-data @@ -351,7 +351,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -763,10 +763,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -803,7 +804,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -814,7 +816,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -956,7 +958,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -978,7 +980,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -1021,14 +1023,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1036,7 +1038,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1045,7 +1047,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1157,7 +1159,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1214,7 +1216,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1333,7 +1335,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1396,7 +1398,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1423,7 +1425,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1450,7 +1452,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1466,7 +1468,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1511,7 +1513,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1522,7 +1524,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/auto-triage-issues.lock.yml b/.github/workflows/auto-triage-issues.lock.yml index c75acb8b61..461289b45f 100644 --- a/.github/workflows/auto-triage-issues.lock.yml +++ b/.github/workflows/auto-triage-issues.lock.yml @@ -226,7 +226,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -651,10 +651,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -682,7 +683,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -693,7 +695,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -794,7 +796,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -816,13 +818,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -867,7 +869,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -970,7 +972,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1025,7 +1027,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1180,7 +1182,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1207,7 +1209,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/blog-auditor.lock.yml b/.github/workflows/blog-auditor.lock.yml index 10eb158b7d..520f4e6019 100644 --- a/.github/workflows/blog-auditor.lock.yml +++ b/.github/workflows/blog-auditor.lock.yml @@ -219,7 +219,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -611,10 +611,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -657,7 +658,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -668,7 +670,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -842,7 +844,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -864,7 +866,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -907,7 +909,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1019,7 +1021,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1073,7 +1075,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1186,7 +1188,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1213,7 +1215,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/bot-detection.lock.yml b/.github/workflows/bot-detection.lock.yml index d7225de33d..2336d08fba 100644 --- a/.github/workflows/bot-detection.lock.yml +++ b/.github/workflows/bot-detection.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -752,10 +752,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -783,7 +784,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -794,7 +796,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -879,7 +881,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -901,13 +903,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -952,7 +954,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -991,7 +993,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1894,7 +1896,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1921,7 +1923,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/brave.lock.yml b/.github/workflows/brave.lock.yml index e7127d3425..ccb2bc9f8c 100644 --- a/.github/workflows/brave.lock.yml +++ b/.github/workflows/brave.lock.yml @@ -246,7 +246,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -620,10 +620,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e BRAVE_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e BRAVE_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -661,7 +662,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -672,7 +674,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -758,7 +760,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -781,13 +783,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -832,7 +834,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -935,7 +937,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -990,7 +992,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1159,7 +1161,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1186,7 +1188,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/breaking-change-checker.lock.yml b/.github/workflows/breaking-change-checker.lock.yml index a46c7249bd..1f1616b806 100644 --- a/.github/workflows/breaking-change-checker.lock.yml +++ b/.github/workflows/breaking-change-checker.lock.yml @@ -213,7 +213,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -622,10 +622,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -653,7 +654,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -664,7 +666,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -769,7 +771,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -791,13 +793,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -842,7 +844,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -945,7 +947,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -998,7 +1000,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1153,7 +1155,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1193,7 +1195,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/changeset.lock.yml b/.github/workflows/changeset.lock.yml index 9f9c79e377..5905cdc523 100644 --- a/.github/workflows/changeset.lock.yml +++ b/.github/workflows/changeset.lock.yml @@ -261,7 +261,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -722,10 +722,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -776,7 +777,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -787,7 +789,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -849,7 +851,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -871,13 +873,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -922,7 +924,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -997,7 +999,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1050,7 +1052,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1211,7 +1213,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1222,7 +1224,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1267,7 +1269,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/chroma-issue-indexer.lock.yml b/.github/workflows/chroma-issue-indexer.lock.yml index 8769c1396c..b69dbe4575 100644 --- a/.github/workflows/chroma-issue-indexer.lock.yml +++ b/.github/workflows/chroma-issue-indexer.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -253,7 +253,7 @@ jobs: run: | mkdir -p /tmp/gh-aw/cache-memory-chroma - name: Cache cache-memory file share data (chroma) - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-chroma-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory-chroma @@ -361,10 +361,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -409,7 +410,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -420,7 +422,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -503,7 +505,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -548,7 +550,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/.github/workflows/ci-coach.lock.yml b/.github/workflows/ci-coach.lock.yml index 9fef18f8f6..1eb3d514da 100644 --- a/.github/workflows/ci-coach.lock.yml +++ b/.github/workflows/ci-coach.lock.yml @@ -30,7 +30,7 @@ # - shared/ci-data-analysis.md # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"f2882ab9d1bbc3ea124c205e3783d6f6a7edae6a21e9efade7cc92014a0f5dd9"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"5191e6c822b25fd810bcb75bbe5eaa6062a26a820ff6c01145e7e2b2507f0426"} name: "CI Optimization Coach" "on": @@ -232,7 +232,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -296,7 +296,7 @@ jobs: name: Download CI workflow runs from last 7 days run: "# Download workflow runs for the ci workflow\ngh run list --repo ${{ github.repository }} --workflow=ci.yml --limit 100 --json databaseId,status,conclusion,createdAt,updatedAt,displayTitle,headBranch,event,url,workflowDatabaseId,number > /tmp/ci-runs.json\n\n# Create directory for artifacts\nmkdir -p /tmp/ci-artifacts\n\n# Download artifacts from recent runs (last 5 successful runs)\necho \"Downloading artifacts from recent CI runs...\"\ngh run list --repo ${{ github.repository }} --workflow=ci.yml --status success --limit 5 --json databaseId | jq -r '.[].databaseId' | while read -r run_id; do\n echo \"Processing run $run_id\"\n gh run download \"$run_id\" --repo ${{ github.repository }} --dir \"/tmp/ci-artifacts/$run_id\" 2>/dev/null || echo \"No artifacts for run $run_id\"\ndone\n\necho \"CI runs data saved to /tmp/ci-runs.json\"\necho \"Artifacts saved to /tmp/ci-artifacts/\"\n\n# Summarize downloaded artifacts\necho \"## Downloaded Artifacts\" >> \"$GITHUB_STEP_SUMMARY\"\nfind /tmp/ci-artifacts -type f -name \"*.txt\" -o -name \"*.html\" -o -name \"*.json\" | head -20 | while read -r f; do\n echo \"- $(basename \"$f\")\" >> \"$GITHUB_STEP_SUMMARY\"\ndone\n" - name: Setup Go - uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: cache: true go-version-file: go.mod @@ -327,7 +327,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -691,10 +691,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -722,7 +723,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -733,7 +735,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -818,7 +820,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -840,13 +842,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -889,7 +891,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -897,7 +899,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1001,7 +1003,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1056,7 +1058,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1185,7 +1187,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1196,7 +1198,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1241,7 +1243,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1268,7 +1270,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1284,7 +1286,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index 01ddd29b10..7d07b0756f 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -253,7 +253,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -315,7 +315,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -837,10 +837,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -868,7 +869,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -879,7 +881,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -964,7 +966,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -986,13 +988,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1035,7 +1037,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1043,7 +1045,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1145,7 +1147,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1201,7 +1203,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1365,7 +1367,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1392,7 +1394,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1419,7 +1421,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1435,7 +1437,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/claude-code-user-docs-review.lock.yml b/.github/workflows/claude-code-user-docs-review.lock.yml index 69dd9b9e6a..7e3d34e0dd 100644 --- a/.github/workflows/claude-code-user-docs-review.lock.yml +++ b/.github/workflows/claude-code-user-docs-review.lock.yml @@ -215,7 +215,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -271,7 +271,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -619,10 +619,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -648,7 +649,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -659,7 +661,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -797,7 +799,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -819,7 +821,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -860,7 +862,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -868,7 +870,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -980,7 +982,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1035,7 +1037,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1148,7 +1150,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1175,7 +1177,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1202,7 +1204,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1218,7 +1220,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/cli-consistency-checker.lock.yml b/.github/workflows/cli-consistency-checker.lock.yml index 023108a6b8..d30c4578d4 100644 --- a/.github/workflows/cli-consistency-checker.lock.yml +++ b/.github/workflows/cli-consistency-checker.lock.yml @@ -200,7 +200,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -611,10 +611,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -642,7 +643,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -653,7 +655,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -738,7 +740,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -760,13 +762,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -811,7 +813,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -914,7 +916,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -967,7 +969,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1071,7 +1073,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1098,7 +1100,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/cli-version-checker.lock.yml b/.github/workflows/cli-version-checker.lock.yml index 00bb9b06bf..d7f84c8873 100644 --- a/.github/workflows/cli-version-checker.lock.yml +++ b/.github/workflows/cli-version-checker.lock.yml @@ -225,7 +225,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -282,7 +282,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -652,10 +652,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -681,7 +682,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -692,7 +694,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -831,7 +833,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -853,7 +855,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -894,7 +896,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -902,7 +904,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1014,7 +1016,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1068,7 +1070,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1173,7 +1175,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1200,7 +1202,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1227,7 +1229,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1243,7 +1245,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/cloclo.lock.yml b/.github/workflows/cloclo.lock.yml index 1c03f4143e..32c1ab68d3 100644 --- a/.github/workflows/cloclo.lock.yml +++ b/.github/workflows/cloclo.lock.yml @@ -318,7 +318,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -367,7 +367,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -405,7 +405,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: cloclo-memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -838,10 +838,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -911,7 +912,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -922,7 +924,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1104,7 +1106,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1127,7 +1129,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -1168,7 +1170,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1176,7 +1178,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1289,7 +1291,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1345,7 +1347,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1546,7 +1548,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1557,7 +1559,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1602,7 +1604,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1629,7 +1631,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1645,7 +1647,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: cloclo-memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/code-scanning-fixer.lock.yml b/.github/workflows/code-scanning-fixer.lock.yml index 6909620a8b..5575202d02 100644 --- a/.github/workflows/code-scanning-fixer.lock.yml +++ b/.github/workflows/code-scanning-fixer.lock.yml @@ -216,7 +216,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -269,7 +269,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -684,10 +684,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -715,7 +716,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -726,7 +728,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -811,7 +813,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -833,13 +835,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -884,14 +886,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (campaigns) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-campaigns path: /tmp/gh-aw/repo-memory/campaigns retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -899,7 +901,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1003,7 +1005,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1059,7 +1061,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1231,7 +1233,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (campaigns) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-campaigns @@ -1295,7 +1297,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1306,7 +1308,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1351,7 +1353,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1378,7 +1380,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1394,7 +1396,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/code-simplifier.lock.yml b/.github/workflows/code-simplifier.lock.yml index d01333e96f..6db2f6f004 100644 --- a/.github/workflows/code-simplifier.lock.yml +++ b/.github/workflows/code-simplifier.lock.yml @@ -222,7 +222,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -630,10 +630,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -661,7 +662,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -672,7 +674,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -757,7 +759,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -779,13 +781,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -830,7 +832,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -933,7 +935,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -987,7 +989,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1161,7 +1163,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1172,7 +1174,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1217,7 +1219,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/codex-github-remote-mcp-test.lock.yml b/.github/workflows/codex-github-remote-mcp-test.lock.yml index 47e09e1e91..d5d110da67 100644 --- a/.github/workflows/codex-github-remote-mcp-test.lock.yml +++ b/.github/workflows/codex-github-remote-mcp-test.lock.yml @@ -199,7 +199,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -340,10 +340,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -380,7 +381,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -391,7 +393,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -451,7 +453,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -496,7 +498,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/.github/workflows/commit-changes-analyzer.lock.yml b/.github/workflows/commit-changes-analyzer.lock.yml index 325f55f02d..11117f4b15 100644 --- a/.github/workflows/commit-changes-analyzer.lock.yml +++ b/.github/workflows/commit-changes-analyzer.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -610,10 +610,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -639,7 +640,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -650,7 +652,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -785,7 +787,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -807,7 +809,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -850,7 +852,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -962,7 +964,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1016,7 +1018,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1124,7 +1126,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1151,7 +1153,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/contribution-check.lock.yml b/.github/workflows/contribution-check.lock.yml index 445d16f24b..4f16cf255a 100644 --- a/.github/workflows/contribution-check.lock.yml +++ b/.github/workflows/contribution-check.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -689,10 +689,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -719,7 +720,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -730,7 +732,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -815,7 +817,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -837,13 +839,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -888,7 +890,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -990,7 +992,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1045,7 +1047,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1152,7 +1154,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1179,7 +1181,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/copilot-agent-analysis.lock.yml b/.github/workflows/copilot-agent-analysis.lock.yml index fdd88269af..5bb91c02c9 100644 --- a/.github/workflows/copilot-agent-analysis.lock.yml +++ b/.github/workflows/copilot-agent-analysis.lock.yml @@ -243,7 +243,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -306,7 +306,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -664,10 +664,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -693,7 +694,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -704,7 +706,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -842,7 +844,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -864,7 +866,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -907,14 +909,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -922,7 +924,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1034,7 +1036,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1090,7 +1092,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1205,7 +1207,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1267,7 +1269,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1294,7 +1296,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1321,7 +1323,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1337,7 +1339,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/copilot-cli-deep-research.lock.yml b/.github/workflows/copilot-cli-deep-research.lock.yml index 59e0d5f699..4a0fb05325 100644 --- a/.github/workflows/copilot-cli-deep-research.lock.yml +++ b/.github/workflows/copilot-cli-deep-research.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -620,10 +620,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -651,7 +652,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -662,7 +664,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -769,7 +771,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -791,13 +793,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -842,7 +844,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -851,7 +853,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -954,7 +956,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1009,7 +1011,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1123,7 +1125,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1185,7 +1187,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1212,7 +1214,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/copilot-pr-merged-report.lock.yml b/.github/workflows/copilot-pr-merged-report.lock.yml index 52f24a50c3..fe8ebd728c 100644 --- a/.github/workflows/copilot-pr-merged-report.lock.yml +++ b/.github/workflows/copilot-pr-merged-report.lock.yml @@ -230,7 +230,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -294,7 +294,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -742,10 +742,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -780,7 +781,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -791,7 +793,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -878,7 +880,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -900,13 +902,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -958,7 +960,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -966,7 +968,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1070,7 +1072,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1125,7 +1127,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1232,7 +1234,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1259,7 +1261,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1286,7 +1288,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1302,7 +1304,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml index a2f8ab2889..6b2a9177fc 100644 --- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml +++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml @@ -31,7 +31,7 @@ # - shared/reporting.md # - shared/copilot-pr-analysis-base.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"201f0114992833aa24ee486a62232b397500ce82339ede729039c07855182118"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"4bbea2b61e80620b1e1f893a467daafe3523322361acb781532709b8e7e2be1d"} name: "Copilot PR Conversation NLP Analysis" "on": @@ -243,7 +243,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -301,7 +301,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -309,7 +309,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -336,7 +336,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -718,10 +718,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -749,7 +750,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -760,7 +762,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -848,7 +850,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -870,13 +872,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -921,14 +923,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -936,7 +938,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -945,7 +947,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1048,7 +1050,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1105,7 +1107,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1219,7 +1221,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1281,7 +1283,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1308,7 +1310,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1335,7 +1337,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1351,7 +1353,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1396,7 +1398,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1407,7 +1409,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/copilot-pr-prompt-analysis.lock.yml b/.github/workflows/copilot-pr-prompt-analysis.lock.yml index 60ed8cc88a..0c09391e4c 100644 --- a/.github/workflows/copilot-pr-prompt-analysis.lock.yml +++ b/.github/workflows/copilot-pr-prompt-analysis.lock.yml @@ -238,7 +238,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -302,7 +302,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -655,10 +655,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -686,7 +687,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -697,7 +699,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -782,7 +784,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -804,13 +806,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -855,14 +857,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -870,7 +872,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -973,7 +975,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1029,7 +1031,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1143,7 +1145,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1205,7 +1207,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1232,7 +1234,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1259,7 +1261,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1275,7 +1277,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: copilot-pr-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml index e76d10f34b..596f391d55 100644 --- a/.github/workflows/copilot-session-insights.lock.yml +++ b/.github/workflows/copilot-session-insights.lock.yml @@ -258,7 +258,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -322,7 +322,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -330,7 +330,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -343,7 +343,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -730,10 +730,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -759,7 +760,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -770,7 +772,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -911,7 +913,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -933,7 +935,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -976,14 +978,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -991,7 +993,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1000,7 +1002,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1112,7 +1114,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1169,7 +1171,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1284,7 +1286,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1346,7 +1348,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1373,7 +1375,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1400,7 +1402,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1416,7 +1418,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1461,7 +1463,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1472,7 +1474,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/craft.lock.yml b/.github/workflows/craft.lock.yml index 07c11e3470..e25c19c26c 100644 --- a/.github/workflows/craft.lock.yml +++ b/.github/workflows/craft.lock.yml @@ -239,7 +239,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -665,10 +665,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -696,7 +697,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -707,7 +709,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -792,7 +794,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -815,13 +817,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -866,7 +868,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -970,7 +972,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1025,7 +1027,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1197,7 +1199,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1208,7 +1210,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1253,7 +1255,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-architecture-diagram.lock.yml b/.github/workflows/daily-architecture-diagram.lock.yml index 494828f93a..861aae4bba 100644 --- a/.github/workflows/daily-architecture-diagram.lock.yml +++ b/.github/workflows/daily-architecture-diagram.lock.yml @@ -213,7 +213,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -267,7 +267,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -632,10 +632,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -663,7 +664,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -674,7 +676,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -759,7 +761,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -781,13 +783,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -830,7 +832,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -838,7 +840,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -941,7 +943,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -995,7 +997,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1099,7 +1101,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1126,7 +1128,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1153,7 +1155,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1169,7 +1171,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-assign-issue-to-user.lock.yml b/.github/workflows/daily-assign-issue-to-user.lock.yml index 94acad64a8..b37655d585 100644 --- a/.github/workflows/daily-assign-issue-to-user.lock.yml +++ b/.github/workflows/daily-assign-issue-to-user.lock.yml @@ -198,7 +198,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -626,10 +626,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -657,7 +658,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -668,7 +670,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -753,7 +755,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -775,13 +777,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -826,7 +828,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -929,7 +931,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -984,7 +986,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1091,7 +1093,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1118,7 +1120,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-choice-test.lock.yml b/.github/workflows/daily-choice-test.lock.yml index d3824ef615..ddccfb378e 100644 --- a/.github/workflows/daily-choice-test.lock.yml +++ b/.github/workflows/daily-choice-test.lock.yml @@ -204,7 +204,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -575,10 +575,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -604,7 +605,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -615,7 +617,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -750,7 +752,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -772,7 +774,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -815,7 +817,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -927,7 +929,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -978,7 +980,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1086,7 +1088,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1121,7 +1123,7 @@ jobs: steps: - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /opt/gh-aw/safe-jobs/ diff --git a/.github/workflows/daily-cli-performance.lock.yml b/.github/workflows/daily-cli-performance.lock.yml index df5fd939bb..20f2e77078 100644 --- a/.github/workflows/daily-cli-performance.lock.yml +++ b/.github/workflows/daily-cli-performance.lock.yml @@ -227,7 +227,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -813,10 +813,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -851,7 +852,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -862,7 +864,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -947,7 +949,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -969,13 +971,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1029,7 +1031,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -1038,7 +1040,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1142,7 +1144,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1198,7 +1200,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1314,7 +1316,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1378,7 +1380,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1405,7 +1407,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-cli-tools-tester.lock.yml b/.github/workflows/daily-cli-tools-tester.lock.yml index d3fb9c41f1..e2a353a799 100644 --- a/.github/workflows/daily-cli-tools-tester.lock.yml +++ b/.github/workflows/daily-cli-tools-tester.lock.yml @@ -207,7 +207,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -257,7 +257,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -672,10 +672,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -715,7 +716,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -726,7 +728,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -811,7 +813,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -833,13 +835,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -884,7 +886,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -986,7 +988,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1039,7 +1041,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1144,7 +1146,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1171,7 +1173,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml index 2f34099f31..0a00af3082 100644 --- a/.github/workflows/daily-code-metrics.lock.yml +++ b/.github/workflows/daily-code-metrics.lock.yml @@ -29,7 +29,7 @@ # - shared/reporting.md # - shared/trends.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"97b80db74305ae7fd8cdf3defc0a6536b1b594b5768b64771ac5288324e89f91"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"1f34dc5e9f03f58bb2bcd2573b3aa3e091890178b2ac1a80d0fb0ff7da0589f1"} name: "Daily Code Metrics and Trend Tracking Agent" "on": @@ -240,7 +240,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -296,7 +296,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -304,7 +304,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -317,7 +317,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -704,10 +704,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -733,7 +734,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -744,7 +746,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -885,7 +887,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -907,7 +909,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -950,14 +952,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -965,7 +967,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -974,7 +976,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1086,7 +1088,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1143,7 +1145,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1262,7 +1264,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1325,7 +1327,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1352,7 +1354,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1379,7 +1381,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1395,7 +1397,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1440,7 +1442,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1451,7 +1453,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-compiler-quality.lock.yml b/.github/workflows/daily-compiler-quality.lock.yml index aad353d3ec..721ef96eb6 100644 --- a/.github/workflows/daily-compiler-quality.lock.yml +++ b/.github/workflows/daily-compiler-quality.lock.yml @@ -219,7 +219,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -274,7 +274,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -617,10 +617,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -656,7 +657,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -667,7 +669,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -772,7 +774,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -794,13 +796,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -843,7 +845,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -851,7 +853,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -954,7 +956,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1009,7 +1011,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1121,7 +1123,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1148,7 +1150,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1175,7 +1177,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1191,7 +1193,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml index 522084d037..731fce7ded 100644 --- a/.github/workflows/daily-copilot-token-report.lock.yml +++ b/.github/workflows/daily-copilot-token-report.lock.yml @@ -29,7 +29,7 @@ # - shared/python-dataviz.md # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"ade2714854660b3ce9d5ede334dee1383c8b7d26416b77c0dbe08a57488c65b2"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"62ed9373b2a71bd045f90201268707bb8c2044f67b98fb93de8ac325d21d3613"} name: "Daily Copilot Token Consumption Report" "on": @@ -231,7 +231,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -282,7 +282,7 @@ jobs: - name: Checkout code uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.25' - name: Capture GOROOT for AWF chroot mode @@ -307,7 +307,7 @@ jobs: name: Recompile workflows run: make recompile || true - name: Install uv - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # eac588ad8def6316056a12d4907a9d4d84ff7a3b + uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0 - name: Install Go language server (gopls) run: go install golang.org/x/tools/gopls@latest - name: Install TypeScript language server @@ -318,7 +318,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -326,7 +326,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -343,7 +343,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -725,10 +725,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -756,7 +757,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -767,7 +769,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -855,7 +857,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -877,13 +879,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -928,14 +930,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -943,7 +945,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -952,7 +954,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1055,7 +1057,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1112,7 +1114,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1230,7 +1232,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1293,7 +1295,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1320,7 +1322,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1347,7 +1349,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1363,7 +1365,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1408,7 +1410,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1419,7 +1421,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-doc-healer.lock.yml b/.github/workflows/daily-doc-healer.lock.yml index 5b8e34e7e9..1eec16a031 100644 --- a/.github/workflows/daily-doc-healer.lock.yml +++ b/.github/workflows/daily-doc-healer.lock.yml @@ -216,7 +216,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -270,7 +270,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -713,10 +713,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -742,7 +743,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -753,7 +755,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -916,7 +918,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -938,7 +940,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -979,7 +981,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -987,7 +989,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1100,7 +1102,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1155,7 +1157,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1285,7 +1287,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1296,7 +1298,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1354,7 +1356,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1381,7 +1383,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1397,7 +1399,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-doc-updater.lock.yml b/.github/workflows/daily-doc-updater.lock.yml index 4a8588c716..54a5b71b0c 100644 --- a/.github/workflows/daily-doc-updater.lock.yml +++ b/.github/workflows/daily-doc-updater.lock.yml @@ -216,7 +216,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -270,7 +270,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -639,10 +639,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -668,7 +669,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -679,7 +681,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -843,7 +845,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -865,7 +867,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -906,7 +908,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -914,7 +916,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1027,7 +1029,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1082,7 +1084,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1212,7 +1214,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1223,7 +1225,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1268,7 +1270,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1295,7 +1297,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1311,7 +1313,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-fact.lock.yml b/.github/workflows/daily-fact.lock.yml index 41a743f605..b642dcc344 100644 --- a/.github/workflows/daily-fact.lock.yml +++ b/.github/workflows/daily-fact.lock.yml @@ -192,7 +192,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -567,10 +567,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -621,7 +622,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -632,7 +634,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -694,7 +696,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -716,13 +718,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -767,7 +769,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -857,7 +859,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -906,7 +908,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1015,7 +1017,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1042,7 +1044,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml index 09a4c73853..1c98b9f5ee 100644 --- a/.github/workflows/daily-file-diet.lock.yml +++ b/.github/workflows/daily-file-diet.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -630,10 +630,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -669,7 +670,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -680,7 +682,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -786,7 +788,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -808,13 +810,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -859,7 +861,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -962,7 +964,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1015,7 +1017,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1168,7 +1170,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1195,7 +1197,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml index 8b8d49c821..58f64763ab 100644 --- a/.github/workflows/daily-firewall-report.lock.yml +++ b/.github/workflows/daily-firewall-report.lock.yml @@ -28,7 +28,7 @@ # - shared/reporting.md # - shared/trending-charts-simple.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"242cb4c2a2510d7ee8406006878fc34be8be05f271cba1379a98ffdaf2cb0fe0"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"575da188e33143c8c5b5e07660a4f4d3302695314638124a21ac50fec6fd4b3a"} name: "Daily Firewall Logs Collector and Reporter" "on": @@ -225,7 +225,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -277,7 +277,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -314,7 +314,7 @@ jobs: pip install --user --quiet numpy pandas matplotlib seaborn scipy - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-charts @@ -322,7 +322,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-source-and-data @@ -335,7 +335,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -732,10 +732,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -775,7 +776,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -786,7 +788,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -874,7 +876,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -896,13 +898,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -945,7 +947,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -953,7 +955,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -962,7 +964,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1064,7 +1066,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1120,7 +1122,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1233,7 +1235,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1260,7 +1262,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1287,7 +1289,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1303,7 +1305,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1348,7 +1350,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1359,7 +1361,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml index af49fd12ae..247eddc88e 100644 --- a/.github/workflows/daily-issues-report.lock.yml +++ b/.github/workflows/daily-issues-report.lock.yml @@ -31,7 +31,7 @@ # - shared/reporting.md # - shared/trends.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"d99235242f1be4abb37e155b52d66ec477e66287c4c057da9603e4151007b97d"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"1c3e2673c7420e3ab56f0a356331fc4c0b883e4a048fd9ae5501d7fa30dac9a8"} name: "Daily Issues Report Generator" "on": @@ -244,7 +244,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -309,7 +309,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -317,7 +317,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -330,7 +330,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -768,10 +768,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -822,7 +823,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -833,7 +835,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -898,7 +900,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -920,13 +922,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -969,7 +971,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -977,7 +979,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -986,7 +988,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1076,7 +1078,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1132,7 +1134,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1276,7 +1278,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1303,7 +1305,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1330,7 +1332,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1346,7 +1348,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1391,7 +1393,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1402,7 +1404,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-malicious-code-scan.lock.yml b/.github/workflows/daily-malicious-code-scan.lock.yml index 7bc4bf3b29..65fa01fece 100644 --- a/.github/workflows/daily-malicious-code-scan.lock.yml +++ b/.github/workflows/daily-malicious-code-scan.lock.yml @@ -207,7 +207,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -634,10 +634,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -665,7 +666,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -676,7 +678,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -761,7 +763,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -783,13 +785,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -834,7 +836,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -873,7 +875,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -982,7 +984,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1009,7 +1011,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml index a05abaf5a3..2742244a42 100644 --- a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml +++ b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml @@ -218,7 +218,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -273,7 +273,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -670,10 +670,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -709,7 +710,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -720,7 +722,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -826,7 +828,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -848,13 +850,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -897,7 +899,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -905,7 +907,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1008,7 +1010,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1062,7 +1064,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1173,7 +1175,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1212,7 +1214,7 @@ jobs: const { main } = require('/opt/gh-aw/actions/create_agent_session.cjs'); await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1239,7 +1241,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1255,7 +1257,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml index 247469cad2..2dd98e38ef 100644 --- a/.github/workflows/daily-multi-device-docs-tester.lock.yml +++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml @@ -232,7 +232,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -675,10 +675,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -721,7 +722,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -732,7 +734,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -912,7 +914,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -934,7 +936,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -977,7 +979,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -986,7 +988,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1098,7 +1100,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1152,7 +1154,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1262,7 +1264,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1289,7 +1291,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1335,7 +1337,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1346,7 +1348,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml index d11337af71..32a88d5898 100644 --- a/.github/workflows/daily-news.lock.yml +++ b/.github/workflows/daily-news.lock.yml @@ -241,7 +241,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -302,7 +302,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -310,7 +310,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -389,7 +389,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -772,10 +772,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -816,7 +817,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -827,7 +829,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -917,7 +919,7 @@ jobs: SECRET_TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -939,13 +941,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -990,14 +992,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1005,7 +1007,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1014,7 +1016,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1117,7 +1119,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1174,7 +1176,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1292,7 +1294,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1355,7 +1357,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1382,7 +1384,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1409,7 +1411,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1425,7 +1427,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1470,7 +1472,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1481,7 +1483,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-observability-report.lock.yml b/.github/workflows/daily-observability-report.lock.yml index a3a80a40c7..4d68bb8954 100644 --- a/.github/workflows/daily-observability-report.lock.yml +++ b/.github/workflows/daily-observability-report.lock.yml @@ -219,7 +219,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -270,7 +270,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -729,10 +729,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -799,7 +800,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -810,7 +812,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -872,7 +874,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -894,13 +896,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -945,7 +947,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1035,7 +1037,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1089,7 +1091,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1233,7 +1235,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1260,7 +1262,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml index b5e6a0aa28..455e7b0202 100644 --- a/.github/workflows/daily-performance-summary.lock.yml +++ b/.github/workflows/daily-performance-summary.lock.yml @@ -29,7 +29,7 @@ # - shared/reporting.md # - shared/trending-charts-simple.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"bafa109221dd7788395c3712ded55f1c20366eaef13dd3987c5ca7015799350d"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"f20bb2cf4c1486e5487a38568f5ca68848b5398c2d33608bf051731c12e1a580"} name: "Daily Project Performance Summary Generator (Using Safe Inputs)" "on": @@ -232,7 +232,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -290,7 +290,7 @@ jobs: pip install --user --quiet numpy pandas matplotlib seaborn scipy - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-charts @@ -298,7 +298,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-source-and-data @@ -311,7 +311,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1208,10 +1208,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -1274,7 +1275,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1285,7 +1287,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1351,7 +1353,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1373,13 +1375,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1431,7 +1433,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1439,7 +1441,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1448,7 +1450,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1539,7 +1541,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1595,7 +1597,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1708,7 +1710,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1735,7 +1737,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1762,7 +1764,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1778,7 +1780,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1823,7 +1825,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1834,7 +1836,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-regulatory.lock.yml b/.github/workflows/daily-regulatory.lock.yml index 9332ded11f..8d776f3eee 100644 --- a/.github/workflows/daily-regulatory.lock.yml +++ b/.github/workflows/daily-regulatory.lock.yml @@ -218,7 +218,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -1127,10 +1127,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -1165,7 +1166,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1176,7 +1178,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1262,7 +1264,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1284,13 +1286,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1344,7 +1346,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1447,7 +1449,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1501,7 +1503,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1614,7 +1616,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1641,7 +1643,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-rendering-scripts-verifier.lock.yml b/.github/workflows/daily-rendering-scripts-verifier.lock.yml index 96acd924d7..d3039d3418 100644 --- a/.github/workflows/daily-rendering-scripts-verifier.lock.yml +++ b/.github/workflows/daily-rendering-scripts-verifier.lock.yml @@ -229,7 +229,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -279,7 +279,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -314,7 +314,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -708,10 +708,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -748,7 +749,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -759,7 +761,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -927,7 +929,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -949,7 +951,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -990,7 +992,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -998,7 +1000,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1111,7 +1113,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1166,7 +1168,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1340,7 +1342,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1351,7 +1353,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1396,7 +1398,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1423,7 +1425,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1439,7 +1441,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml index af0fe95db7..4b6b811fe4 100644 --- a/.github/workflows/daily-repo-chronicle.lock.yml +++ b/.github/workflows/daily-repo-chronicle.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -281,7 +281,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -289,7 +289,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -302,7 +302,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -674,10 +674,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -705,7 +706,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -716,7 +718,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -804,7 +806,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -826,13 +828,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -875,7 +877,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -883,7 +885,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -892,7 +894,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -995,7 +997,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1051,7 +1053,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1163,7 +1165,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1190,7 +1192,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1217,7 +1219,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1233,7 +1235,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1278,7 +1280,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1289,7 +1291,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/daily-safe-output-optimizer.lock.yml b/.github/workflows/daily-safe-output-optimizer.lock.yml index c153508320..ae23fe5086 100644 --- a/.github/workflows/daily-safe-output-optimizer.lock.yml +++ b/.github/workflows/daily-safe-output-optimizer.lock.yml @@ -229,7 +229,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -279,7 +279,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -321,7 +321,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -716,10 +716,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -756,7 +757,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -767,7 +769,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -920,7 +922,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -942,7 +944,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -983,7 +985,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -991,7 +993,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1103,7 +1105,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1157,7 +1159,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1306,7 +1308,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1333,7 +1335,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1360,7 +1362,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1376,7 +1378,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/daily-safe-outputs-conformance.lock.yml b/.github/workflows/daily-safe-outputs-conformance.lock.yml index 83770d35bc..84e51e68f1 100644 --- a/.github/workflows/daily-safe-outputs-conformance.lock.yml +++ b/.github/workflows/daily-safe-outputs-conformance.lock.yml @@ -214,7 +214,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -627,10 +627,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -656,7 +657,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -667,7 +669,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -801,7 +803,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -823,7 +825,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -866,7 +868,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -978,7 +980,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1031,7 +1033,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1141,7 +1143,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1168,7 +1170,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-secrets-analysis.lock.yml b/.github/workflows/daily-secrets-analysis.lock.yml index 371ecdb988..06cf1d9f8a 100644 --- a/.github/workflows/daily-secrets-analysis.lock.yml +++ b/.github/workflows/daily-secrets-analysis.lock.yml @@ -208,7 +208,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -658,10 +658,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -689,7 +690,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -700,7 +702,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -785,7 +787,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -807,13 +809,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -858,7 +860,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -961,7 +963,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1015,7 +1017,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1127,7 +1129,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1154,7 +1156,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-security-red-team.lock.yml b/.github/workflows/daily-security-red-team.lock.yml index 5dc01e8a2f..21bd8828a3 100644 --- a/.github/workflows/daily-security-red-team.lock.yml +++ b/.github/workflows/daily-security-red-team.lock.yml @@ -218,7 +218,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -631,10 +631,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -660,7 +661,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -671,7 +673,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -805,7 +807,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -827,7 +829,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -870,7 +872,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -982,7 +984,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1035,7 +1037,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1145,7 +1147,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1172,7 +1174,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-semgrep-scan.lock.yml b/.github/workflows/daily-semgrep-scan.lock.yml index d3f623dd26..3dc37bcfd6 100644 --- a/.github/workflows/daily-semgrep-scan.lock.yml +++ b/.github/workflows/daily-semgrep-scan.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -641,10 +641,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -685,7 +686,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -696,7 +698,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -781,7 +783,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -803,13 +805,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -854,7 +856,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -956,7 +958,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1009,7 +1011,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1114,7 +1116,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1141,7 +1143,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-syntax-error-quality.lock.yml b/.github/workflows/daily-syntax-error-quality.lock.yml index 671e1c5485..684f2e920c 100644 --- a/.github/workflows/daily-syntax-error-quality.lock.yml +++ b/.github/workflows/daily-syntax-error-quality.lock.yml @@ -27,7 +27,7 @@ # Imports: # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"6bc4b8a34f82800434c13be2769dfce0c06d8ec3269c8cca14fd951af01797d9"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"c4b7c52a6c58b8c054b75b4e9240efd1b9435789fa1f35652462876451aed8b2"} name: "Daily Syntax Error Quality Check" "on": @@ -208,7 +208,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -260,7 +260,7 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Setup Go - uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: cache: true go-version-file: go.mod @@ -620,10 +620,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -650,7 +651,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -661,7 +663,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -767,7 +769,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -789,13 +791,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -840,7 +842,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -943,7 +945,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -996,7 +998,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1105,7 +1107,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1132,7 +1134,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-syntax-error-quality.md b/.github/workflows/daily-syntax-error-quality.md index 2017b07790..ec24d931de 100644 --- a/.github/workflows/daily-syntax-error-quality.md +++ b/.github/workflows/daily-syntax-error-quality.md @@ -33,7 +33,7 @@ timeout-minutes: 20 strict: true steps: - name: Setup Go - uses: actions/setup-go@v6 + uses: actions/setup-go@v6.3.0 with: go-version-file: go.mod cache: true diff --git a/.github/workflows/daily-team-evolution-insights.lock.yml b/.github/workflows/daily-team-evolution-insights.lock.yml index 723855542a..c75ccf901d 100644 --- a/.github/workflows/daily-team-evolution-insights.lock.yml +++ b/.github/workflows/daily-team-evolution-insights.lock.yml @@ -214,7 +214,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -608,10 +608,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -637,7 +638,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -648,7 +650,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -782,7 +784,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -804,7 +806,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -847,7 +849,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -959,7 +961,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1013,7 +1015,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1126,7 +1128,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1153,7 +1155,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-team-status.lock.yml b/.github/workflows/daily-team-status.lock.yml index c1940ecdb7..26e069ceea 100644 --- a/.github/workflows/daily-team-status.lock.yml +++ b/.github/workflows/daily-team-status.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -632,10 +632,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -663,7 +664,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -674,7 +676,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -759,7 +761,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -781,13 +783,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -832,7 +834,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -934,7 +936,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -987,7 +989,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1138,7 +1140,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1165,7 +1167,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-testify-uber-super-expert.lock.yml b/.github/workflows/daily-testify-uber-super-expert.lock.yml index 7ede94f6db..a2b6140719 100644 --- a/.github/workflows/daily-testify-uber-super-expert.lock.yml +++ b/.github/workflows/daily-testify-uber-super-expert.lock.yml @@ -232,7 +232,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -652,10 +652,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -691,7 +692,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -702,7 +704,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -807,7 +809,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -829,13 +831,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -880,7 +882,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -889,7 +891,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -992,7 +994,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1046,7 +1048,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1206,7 +1208,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1268,7 +1270,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1295,7 +1297,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/daily-workflow-updater.lock.yml b/.github/workflows/daily-workflow-updater.lock.yml index 988a97ad9f..5f4f074b99 100644 --- a/.github/workflows/daily-workflow-updater.lock.yml +++ b/.github/workflows/daily-workflow-updater.lock.yml @@ -202,7 +202,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -611,10 +611,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -642,7 +643,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -653,7 +655,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -738,7 +740,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -760,13 +762,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -811,7 +813,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -915,7 +917,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -969,7 +971,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1098,7 +1100,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1109,7 +1111,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1154,7 +1156,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml index c26c689a32..a14e296dcb 100644 --- a/.github/workflows/deep-report.lock.yml +++ b/.github/workflows/deep-report.lock.yml @@ -240,7 +240,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -292,7 +292,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -335,7 +335,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: weekly-issues-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -821,10 +821,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -891,7 +892,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -902,7 +904,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -967,7 +969,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -989,13 +991,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1040,14 +1042,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1055,7 +1057,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1064,7 +1066,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1154,7 +1156,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1211,7 +1213,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1330,7 +1332,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1393,7 +1395,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1420,7 +1422,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1447,7 +1449,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1463,7 +1465,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: weekly-issues-data-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1508,7 +1510,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1519,7 +1521,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/delight.lock.yml b/.github/workflows/delight.lock.yml index d1b812d849..ce8003b13c 100644 --- a/.github/workflows/delight.lock.yml +++ b/.github/workflows/delight.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -699,10 +699,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -730,7 +731,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -741,7 +743,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -849,7 +851,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -871,13 +873,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -922,7 +924,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -931,7 +933,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1034,7 +1036,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1089,7 +1091,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1208,7 +1210,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1272,7 +1274,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1299,7 +1301,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/dependabot-burner.lock.yml b/.github/workflows/dependabot-burner.lock.yml index cb2346aba0..4abcc0f902 100644 --- a/.github/workflows/dependabot-burner.lock.yml +++ b/.github/workflows/dependabot-burner.lock.yml @@ -208,7 +208,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -617,10 +617,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -648,7 +649,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -659,7 +661,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -744,7 +746,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -766,13 +768,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -817,7 +819,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -919,7 +921,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -972,7 +974,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1108,7 +1110,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1135,7 +1137,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/dependabot-go-checker.lock.yml b/.github/workflows/dependabot-go-checker.lock.yml index 9523db384b..c1fe52eb85 100644 --- a/.github/workflows/dependabot-go-checker.lock.yml +++ b/.github/workflows/dependabot-go-checker.lock.yml @@ -205,7 +205,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -658,10 +658,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -689,7 +690,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -700,7 +702,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -785,7 +787,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -807,13 +809,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -858,7 +860,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -960,7 +962,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1013,7 +1015,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1118,7 +1120,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1145,7 +1147,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml index cb067e629b..07e84605b3 100644 --- a/.github/workflows/dev-hawk.lock.yml +++ b/.github/workflows/dev-hawk.lock.yml @@ -236,7 +236,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -286,7 +286,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -666,10 +666,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -709,7 +710,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -720,7 +722,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -821,7 +823,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -843,13 +845,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -894,7 +896,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -997,7 +999,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1052,7 +1054,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1192,7 +1194,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1219,7 +1221,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml index 2eea0056cb..f27282bbb4 100644 --- a/.github/workflows/dev.lock.yml +++ b/.github/workflows/dev.lock.yml @@ -198,7 +198,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -608,10 +608,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -639,7 +640,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -650,7 +652,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -735,7 +737,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -757,13 +759,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -808,7 +810,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -911,7 +913,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -964,7 +966,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1068,7 +1070,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1095,7 +1097,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/developer-docs-consolidator.lock.yml b/.github/workflows/developer-docs-consolidator.lock.yml index 6ad82d9f9a..272a7c9997 100644 --- a/.github/workflows/developer-docs-consolidator.lock.yml +++ b/.github/workflows/developer-docs-consolidator.lock.yml @@ -228,7 +228,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -283,7 +283,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: developer-docs-cache-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -704,10 +704,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -749,7 +750,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -760,7 +762,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -923,7 +925,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -945,7 +947,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -986,7 +988,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -994,7 +996,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1107,7 +1109,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1163,7 +1165,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1290,7 +1292,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1301,7 +1303,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1346,7 +1348,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1373,7 +1375,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1389,7 +1391,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: developer-docs-cache-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/dictation-prompt.lock.yml b/.github/workflows/dictation-prompt.lock.yml index dbcef82109..637cec40ec 100644 --- a/.github/workflows/dictation-prompt.lock.yml +++ b/.github/workflows/dictation-prompt.lock.yml @@ -208,7 +208,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -617,10 +617,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -648,7 +649,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -659,7 +661,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -744,7 +746,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -766,13 +768,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -817,7 +819,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -921,7 +923,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -975,7 +977,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1098,7 +1100,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1109,7 +1111,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1154,7 +1156,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/discussion-task-miner.lock.yml b/.github/workflows/discussion-task-miner.lock.yml index 5c821425ea..690d382522 100644 --- a/.github/workflows/discussion-task-miner.lock.yml +++ b/.github/workflows/discussion-task-miner.lock.yml @@ -221,7 +221,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -684,10 +684,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -715,7 +716,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -726,7 +728,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -832,7 +834,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -854,13 +856,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -905,7 +907,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -914,7 +916,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1017,7 +1019,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1073,7 +1075,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1190,7 +1192,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1255,7 +1257,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1282,7 +1284,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml index 1264626b83..45a604941c 100644 --- a/.github/workflows/docs-noob-tester.lock.yml +++ b/.github/workflows/docs-noob-tester.lock.yml @@ -211,7 +211,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -628,10 +628,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -666,7 +667,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -677,7 +679,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -765,7 +767,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -787,13 +789,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -838,7 +840,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -847,7 +849,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -950,7 +952,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1005,7 +1007,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1112,7 +1114,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1139,7 +1141,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1185,7 +1187,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1196,7 +1198,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/draft-pr-cleanup.lock.yml b/.github/workflows/draft-pr-cleanup.lock.yml index c08a29d2cc..d11481d09f 100644 --- a/.github/workflows/draft-pr-cleanup.lock.yml +++ b/.github/workflows/draft-pr-cleanup.lock.yml @@ -201,7 +201,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -639,10 +639,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -670,7 +671,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -681,7 +683,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -782,7 +784,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -804,13 +806,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -855,7 +857,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -958,7 +960,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1013,7 +1015,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1121,7 +1123,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1148,7 +1150,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/duplicate-code-detector.lock.yml b/.github/workflows/duplicate-code-detector.lock.yml index d70120de30..e1abd5713c 100644 --- a/.github/workflows/duplicate-code-detector.lock.yml +++ b/.github/workflows/duplicate-code-detector.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -634,10 +634,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -720,7 +721,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -731,7 +733,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -793,7 +795,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -815,13 +817,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -866,7 +868,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -956,7 +958,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1009,7 +1011,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1114,7 +1116,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1154,7 +1156,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/example-custom-error-patterns.lock.yml b/.github/workflows/example-custom-error-patterns.lock.yml index a4c15a89e4..409753768d 100644 --- a/.github/workflows/example-custom-error-patterns.lock.yml +++ b/.github/workflows/example-custom-error-patterns.lock.yml @@ -214,7 +214,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -351,10 +351,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -375,7 +376,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -386,7 +388,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -469,7 +471,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -514,7 +516,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/.github/workflows/example-permissions-warning.lock.yml b/.github/workflows/example-permissions-warning.lock.yml index 116ccd9d9b..366dd8aa44 100644 --- a/.github/workflows/example-permissions-warning.lock.yml +++ b/.github/workflows/example-permissions-warning.lock.yml @@ -197,7 +197,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -334,10 +334,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -357,7 +358,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -368,7 +370,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -451,7 +453,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -496,7 +498,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/.github/workflows/example-workflow-analyzer.lock.yml b/.github/workflows/example-workflow-analyzer.lock.yml index 96699e8ce6..8d49c7225b 100644 --- a/.github/workflows/example-workflow-analyzer.lock.yml +++ b/.github/workflows/example-workflow-analyzer.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -262,7 +262,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -660,10 +660,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -700,7 +701,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -711,7 +713,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -845,7 +847,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -867,7 +869,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -910,7 +912,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1022,7 +1024,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1076,7 +1078,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1184,7 +1186,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1211,7 +1213,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml index 37e4cc5418..3ed3c61553 100644 --- a/.github/workflows/firewall-escape.lock.yml +++ b/.github/workflows/firewall-escape.lock.yml @@ -242,7 +242,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -297,7 +297,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -650,10 +650,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -681,7 +682,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -692,7 +694,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -777,7 +779,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -799,13 +801,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -850,14 +852,14 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default retention-days: 1 if-no-files-found: ignore - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -865,7 +867,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -968,7 +970,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1024,7 +1026,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1205,7 +1207,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1267,7 +1269,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1294,7 +1296,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1321,7 +1323,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1337,7 +1339,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/firewall.lock.yml b/.github/workflows/firewall.lock.yml index a4874afe9b..18d0a5eb7c 100644 --- a/.github/workflows/firewall.lock.yml +++ b/.github/workflows/firewall.lock.yml @@ -199,7 +199,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -336,10 +336,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -360,7 +361,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -371,7 +373,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -454,7 +456,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -499,7 +501,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/.github/workflows/functional-pragmatist.lock.yml b/.github/workflows/functional-pragmatist.lock.yml index ae3eb62b3c..0bca5c39dd 100644 --- a/.github/workflows/functional-pragmatist.lock.yml +++ b/.github/workflows/functional-pragmatist.lock.yml @@ -216,7 +216,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -624,10 +624,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -655,7 +656,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -666,7 +668,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -751,7 +753,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -773,13 +775,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -824,7 +826,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -927,7 +929,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -981,7 +983,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1111,7 +1113,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1122,7 +1124,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1167,7 +1169,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml index 21475aac33..f339bb53ed 100644 --- a/.github/workflows/github-mcp-structural-analysis.lock.yml +++ b/.github/workflows/github-mcp-structural-analysis.lock.yml @@ -28,7 +28,7 @@ # - shared/python-dataviz.md # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"58f21d0c94b655d86e891f0c3cb2d788fb5497bb379b3652f1c75cfac89261bc"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"bae501e108ec9d6346e7e1686373396ef4828d5e532005db44bde88081d0ea79"} name: "GitHub MCP Structural Analysis" "on": @@ -226,7 +226,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -285,7 +285,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -293,7 +293,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -306,7 +306,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -683,10 +683,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -712,7 +713,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -723,7 +725,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -864,7 +866,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -886,7 +888,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -927,7 +929,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -935,7 +937,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -944,7 +946,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1056,7 +1058,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1112,7 +1114,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1220,7 +1222,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1247,7 +1249,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1274,7 +1276,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1290,7 +1292,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1335,7 +1337,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1346,7 +1348,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/github-mcp-tools-report.lock.yml b/.github/workflows/github-mcp-tools-report.lock.yml index 31ea144ce6..db5169f77e 100644 --- a/.github/workflows/github-mcp-tools-report.lock.yml +++ b/.github/workflows/github-mcp-tools-report.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -280,7 +280,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -701,10 +701,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -731,7 +732,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -742,7 +744,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -880,7 +882,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -902,7 +904,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -943,7 +945,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -951,7 +953,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1064,7 +1066,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1120,7 +1122,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1247,7 +1249,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1258,7 +1260,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1303,7 +1305,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1330,7 +1332,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1346,7 +1348,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/github-remote-mcp-auth-test.lock.yml b/.github/workflows/github-remote-mcp-auth-test.lock.yml index cdac9211ef..ed04fd2294 100644 --- a/.github/workflows/github-remote-mcp-auth-test.lock.yml +++ b/.github/workflows/github-remote-mcp-auth-test.lock.yml @@ -211,7 +211,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -598,11 +598,12 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" export GITHUB_PERSONAL_ACCESS_TOKEN="$GITHUB_MCP_SERVER_TOKEN" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_PERSONAL_ACCESS_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_PERSONAL_ACCESS_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -638,7 +639,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -649,7 +651,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -734,7 +736,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -756,13 +758,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -807,7 +809,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -909,7 +911,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -963,7 +965,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1072,7 +1074,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1099,7 +1101,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/glossary-maintainer.lock.yml b/.github/workflows/glossary-maintainer.lock.yml index a9ca7543fc..9f3331ebb2 100644 --- a/.github/workflows/glossary-maintainer.lock.yml +++ b/.github/workflows/glossary-maintainer.lock.yml @@ -230,7 +230,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -296,7 +296,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -660,10 +660,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -699,7 +700,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -710,7 +712,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -822,7 +824,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -844,13 +846,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -893,7 +895,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -901,7 +903,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1004,7 +1006,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1059,7 +1061,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1183,7 +1185,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1194,7 +1196,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1239,7 +1241,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1266,7 +1268,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1282,7 +1284,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/go-fan.lock.yml b/.github/workflows/go-fan.lock.yml index 04f483052f..bede913819 100644 --- a/.github/workflows/go-fan.lock.yml +++ b/.github/workflows/go-fan.lock.yml @@ -225,7 +225,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -280,7 +280,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -628,10 +628,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -673,7 +674,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -684,7 +686,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -840,7 +842,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -862,7 +864,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -903,7 +905,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -911,7 +913,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1023,7 +1025,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1078,7 +1080,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1191,7 +1193,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1218,7 +1220,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1245,7 +1247,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1261,7 +1263,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/go-logger.lock.yml b/.github/workflows/go-logger.lock.yml index 0002ba3403..59becb5d34 100644 --- a/.github/workflows/go-logger.lock.yml +++ b/.github/workflows/go-logger.lock.yml @@ -27,7 +27,7 @@ # Imports: # - shared/go-make.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"6160c9a01f19aa6c63aa169965824e7f8ce09444b8da855ce3124b6e3dd766a0"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"f25ce343847e9c88cf0b683033fae1614998f81e330da7bf355bcf7a2e50e199"} name: "Go Logger Enhancement" "on": @@ -222,7 +222,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -280,7 +280,7 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Setup Go - uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: cache: true go-version-file: go.mod @@ -292,7 +292,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -788,10 +788,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -824,7 +825,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -835,7 +837,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1001,7 +1003,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1023,7 +1025,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -1073,7 +1075,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1081,7 +1083,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1195,7 +1197,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1250,7 +1252,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1374,7 +1376,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1385,7 +1387,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1430,7 +1432,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1457,7 +1459,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1473,7 +1475,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/go-logger.md b/.github/workflows/go-logger.md index 91b98aa17f..b3a43c5614 100644 --- a/.github/workflows/go-logger.md +++ b/.github/workflows/go-logger.md @@ -21,13 +21,13 @@ safe-outputs: steps: - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: "24" cache: npm cache-dependency-path: actions/setup/js/package-lock.json - name: Setup Go - uses: actions/setup-go@v6 + uses: actions/setup-go@v6.3.0 with: go-version-file: go.mod cache: true diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml index db17919d6f..bab1013725 100644 --- a/.github/workflows/go-pattern-detector.lock.yml +++ b/.github/workflows/go-pattern-detector.lock.yml @@ -27,7 +27,7 @@ # Imports: # - shared/mcp/ast-grep.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"b97f1fea9e98decc6a42563d2e324c065d494f9a2cf336f4743dc4818be0c610"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"d0376cb6b3eae45827ed7c21ea355b6564f20c636546a5b3a584cf8f880edfba"} name: "Go Pattern Detector" "on": @@ -217,7 +217,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -634,10 +634,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -667,7 +668,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -678,7 +680,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -813,7 +815,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -835,7 +837,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -878,7 +880,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -990,7 +992,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1023,7 +1025,7 @@ jobs: found_patterns: ${{ steps.detect.outputs.found_patterns }} steps: - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - name: Install ast-grep @@ -1077,7 +1079,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1182,7 +1184,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1209,7 +1211,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/go-pattern-detector.md b/.github/workflows/go-pattern-detector.md index dff90320d5..021bfc5eb0 100644 --- a/.github/workflows/go-pattern-detector.md +++ b/.github/workflows/go-pattern-detector.md @@ -17,7 +17,7 @@ jobs: found_patterns: ${{ steps.detect.outputs.found_patterns }} steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v6.0.2 with: persist-credentials: false - name: Install ast-grep diff --git a/.github/workflows/gpclean.lock.yml b/.github/workflows/gpclean.lock.yml index 6d390c8885..210ff687be 100644 --- a/.github/workflows/gpclean.lock.yml +++ b/.github/workflows/gpclean.lock.yml @@ -213,7 +213,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -273,7 +273,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -638,10 +638,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -669,7 +670,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -680,7 +682,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -765,7 +767,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -787,13 +789,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -836,7 +838,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -844,7 +846,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -946,7 +948,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1000,7 +1002,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1105,7 +1107,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1132,7 +1134,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1159,7 +1161,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1175,7 +1177,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/grumpy-reviewer.lock.yml b/.github/workflows/grumpy-reviewer.lock.yml index 8aca558e60..735e62d46e 100644 --- a/.github/workflows/grumpy-reviewer.lock.yml +++ b/.github/workflows/grumpy-reviewer.lock.yml @@ -253,7 +253,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -304,7 +304,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -715,10 +715,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -746,7 +747,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -757,7 +759,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -842,7 +844,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -865,13 +867,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -914,7 +916,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -922,7 +924,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1024,7 +1026,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1078,7 +1080,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1247,7 +1249,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1274,7 +1276,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1301,7 +1303,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1317,7 +1319,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/hourly-ci-cleaner.lock.yml b/.github/workflows/hourly-ci-cleaner.lock.yml index c2db066500..383aa7aabc 100644 --- a/.github/workflows/hourly-ci-cleaner.lock.yml +++ b/.github/workflows/hourly-ci-cleaner.lock.yml @@ -27,7 +27,7 @@ # Imports: # - ../agents/ci-cleaner.agent.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"671a14d4b1a43ceb00db004fb0c5c4ba09d7ae14da6fd2703f3218a72d3d841f"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"f715c77536157d46d383a993c4e231ed9f29a36b211b1b17df979aae0699f650"} name: "CI Cleaner" "on": @@ -229,7 +229,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -306,7 +306,7 @@ jobs: sudo apt-get update sudo apt-get install -y make - name: Setup Go - uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: cache: true go-version-file: go.mod @@ -674,10 +674,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -705,7 +706,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -716,7 +718,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -801,7 +803,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -823,13 +825,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -874,7 +876,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -977,7 +979,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1078,7 +1080,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1210,7 +1212,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1221,7 +1223,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1266,7 +1268,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/hourly-ci-cleaner.md b/.github/workflows/hourly-ci-cleaner.md index 6dac36d2a6..ea9b2ef65a 100644 --- a/.github/workflows/hourly-ci-cleaner.md +++ b/.github/workflows/hourly-ci-cleaner.md @@ -90,12 +90,12 @@ steps: sudo apt-get update sudo apt-get install -y make - name: Setup Go - uses: actions/setup-go@v6 + uses: actions/setup-go@v6.3.0 with: go-version-file: go.mod cache: true - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: "24" cache: npm diff --git a/.github/workflows/instructions-janitor.lock.yml b/.github/workflows/instructions-janitor.lock.yml index 359ddf4b17..12bd43cb47 100644 --- a/.github/workflows/instructions-janitor.lock.yml +++ b/.github/workflows/instructions-janitor.lock.yml @@ -215,7 +215,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -269,7 +269,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -638,10 +638,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -667,7 +668,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -678,7 +680,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -839,7 +841,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -861,7 +863,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -902,7 +904,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -910,7 +912,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1023,7 +1025,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1078,7 +1080,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1202,7 +1204,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1213,7 +1215,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1258,7 +1260,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1285,7 +1287,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1301,7 +1303,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/issue-arborist.lock.yml b/.github/workflows/issue-arborist.lock.yml index 87b2a9bed0..3af88247e6 100644 --- a/.github/workflows/issue-arborist.lock.yml +++ b/.github/workflows/issue-arborist.lock.yml @@ -214,7 +214,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -733,10 +733,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -787,7 +788,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -798,7 +800,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -860,7 +862,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -882,13 +884,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -933,7 +935,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1023,7 +1025,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1077,7 +1079,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1185,7 +1187,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1212,7 +1214,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml index c63ab13fd2..4a0c6dfa7d 100644 --- a/.github/workflows/issue-monster.lock.yml +++ b/.github/workflows/issue-monster.lock.yml @@ -228,7 +228,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -661,10 +661,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -692,7 +693,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -703,7 +705,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -788,7 +790,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -810,13 +812,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -861,7 +863,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -963,7 +965,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1018,7 +1020,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1190,7 +1192,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1233,7 +1235,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/issue-triage-agent.lock.yml b/.github/workflows/issue-triage-agent.lock.yml index fc44488ba9..92cff153eb 100644 --- a/.github/workflows/issue-triage-agent.lock.yml +++ b/.github/workflows/issue-triage-agent.lock.yml @@ -74,6 +74,14 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); await main(); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false - name: Check workflow file timestamps uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: @@ -203,7 +211,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -601,10 +609,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -632,7 +641,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -643,7 +653,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -728,7 +738,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -750,13 +760,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -801,7 +811,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -903,7 +913,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -958,7 +968,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1064,7 +1074,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1091,7 +1101,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/jsweep.lock.yml b/.github/workflows/jsweep.lock.yml index a7c96c34e2..178fc48474 100644 --- a/.github/workflows/jsweep.lock.yml +++ b/.github/workflows/jsweep.lock.yml @@ -217,7 +217,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -281,7 +281,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -645,10 +645,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -684,7 +685,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -695,7 +697,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -780,7 +782,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -802,13 +804,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -851,7 +853,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -859,7 +861,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -962,7 +964,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1017,7 +1019,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1147,7 +1149,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1158,7 +1160,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1203,7 +1205,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1230,7 +1232,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1246,7 +1248,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/layout-spec-maintainer.lock.yml b/.github/workflows/layout-spec-maintainer.lock.yml index 1512b5dde9..6f2d4b9d14 100644 --- a/.github/workflows/layout-spec-maintainer.lock.yml +++ b/.github/workflows/layout-spec-maintainer.lock.yml @@ -209,7 +209,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -261,7 +261,7 @@ jobs: run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh # Cache configuration from frontmatter processed below - name: Cache layout spec data - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: layout-spec-cache-${{ github.run_id }} path: /tmp/gh-aw/layout-cache @@ -624,10 +624,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -655,7 +656,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -666,7 +668,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -780,7 +782,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -802,13 +804,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -853,7 +855,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -956,7 +958,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1010,7 +1012,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1140,7 +1142,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1151,7 +1153,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1196,7 +1198,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/lockfile-stats.lock.yml b/.github/workflows/lockfile-stats.lock.yml index 752e62066c..051abc5352 100644 --- a/.github/workflows/lockfile-stats.lock.yml +++ b/.github/workflows/lockfile-stats.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -274,7 +274,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -622,10 +622,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -651,7 +652,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -662,7 +664,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -800,7 +802,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -822,7 +824,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -863,7 +865,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -871,7 +873,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -983,7 +985,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1038,7 +1040,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1146,7 +1148,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1173,7 +1175,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1200,7 +1202,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1216,7 +1218,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml index 3f61b51445..bf76db17ac 100644 --- a/.github/workflows/mcp-inspector.lock.yml +++ b/.github/workflows/mcp-inspector.lock.yml @@ -279,7 +279,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -329,7 +329,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -364,18 +364,18 @@ jobs: node-version: '24' package-manager-cache: false - name: Setup Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: '3.12' - name: Setup uv - uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2 + uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0 - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh # Cache memory file share configuration from frontmatter processed below - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -789,10 +789,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e AZURE_CLIENT_ID -e AZURE_CLIENT_SECRET -e AZURE_TENANT_ID -e BRAVE_API_KEY -e CONTEXT7_API_KEY -e DD_API_KEY -e DD_APPLICATION_KEY -e DD_SITE -e NOTION_API_TOKEN -e SENTRY_ACCESS_TOKEN -e SENTRY_OPENAI_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e AZURE_CLIENT_ID -e AZURE_CLIENT_SECRET -e AZURE_TENANT_ID -e BRAVE_API_KEY -e CONTEXT7_API_KEY -e DD_API_KEY -e DD_APPLICATION_KEY -e DD_SITE -e NOTION_API_TOKEN -e SENTRY_ACCESS_TOKEN -e SENTRY_OPENAI_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -1019,7 +1020,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1030,7 +1032,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1131,7 +1133,7 @@ jobs: SECRET_TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1153,13 +1155,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1202,7 +1204,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1210,7 +1212,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1312,7 +1314,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1369,7 +1371,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1454,7 +1456,7 @@ jobs: steps: - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /opt/gh-aw/safe-jobs/ @@ -1582,7 +1584,7 @@ jobs: steps: - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /opt/gh-aw/safe-jobs/ @@ -1747,7 +1749,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1774,7 +1776,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1801,7 +1803,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1817,7 +1819,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/mergefest.lock.yml b/.github/workflows/mergefest.lock.yml index f9d252399d..961d740462 100644 --- a/.github/workflows/mergefest.lock.yml +++ b/.github/workflows/mergefest.lock.yml @@ -241,7 +241,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -625,10 +625,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -656,7 +657,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -667,7 +669,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -791,7 +793,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -814,13 +816,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -865,7 +867,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -968,7 +970,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1021,7 +1023,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1191,7 +1193,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1202,7 +1204,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1247,7 +1249,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/metrics-collector.lock.yml b/.github/workflows/metrics-collector.lock.yml index df54d54ae1..0b9fcbae34 100644 --- a/.github/workflows/metrics-collector.lock.yml +++ b/.github/workflows/metrics-collector.lock.yml @@ -216,7 +216,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -254,7 +254,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -422,10 +422,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -458,7 +459,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -469,7 +471,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -552,7 +554,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -597,7 +599,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -606,7 +608,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -687,7 +689,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml index 0e1c0c298f..b89e2825d4 100644 --- a/.github/workflows/notion-issue-summary.lock.yml +++ b/.github/workflows/notion-issue-summary.lock.yml @@ -218,7 +218,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -567,10 +567,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e NOTION_API_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e NOTION_API_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -611,7 +612,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -622,7 +624,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -708,7 +710,7 @@ jobs: SECRET_NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -730,13 +732,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -781,7 +783,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -818,7 +820,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -901,7 +903,7 @@ jobs: steps: - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /opt/gh-aw/safe-jobs/ @@ -1048,7 +1050,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1075,7 +1077,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml index e2a2837fe6..99b23aa2b8 100644 --- a/.github/workflows/org-health-report.lock.yml +++ b/.github/workflows/org-health-report.lock.yml @@ -29,7 +29,7 @@ # - shared/python-dataviz.md # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"96fd9a0074cc690873b004a8a2f47366af9de05c96c06e7cf8e96c75dbd67818"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"5592b8e9dacf789c8872327217ee33c8044526e0751cc44151ba4adbe9751084"} name: "Organization Health Report" "on": @@ -229,7 +229,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -287,7 +287,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -295,7 +295,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -310,7 +310,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -682,10 +682,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -713,7 +714,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -724,7 +726,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -812,7 +814,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -834,13 +836,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -883,7 +885,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -891,7 +893,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -900,7 +902,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1002,7 +1004,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1058,7 +1060,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1166,7 +1168,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1193,7 +1195,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1220,7 +1222,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1236,7 +1238,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1281,7 +1283,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1292,7 +1294,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/pdf-summary.lock.yml b/.github/workflows/pdf-summary.lock.yml index 63a4026c2e..3b83af20b7 100644 --- a/.github/workflows/pdf-summary.lock.yml +++ b/.github/workflows/pdf-summary.lock.yml @@ -286,7 +286,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -339,7 +339,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -721,10 +721,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -759,7 +760,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -770,7 +772,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -855,7 +857,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -878,13 +880,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -927,7 +929,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -935,7 +937,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1037,7 +1039,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1093,7 +1095,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1268,7 +1270,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1295,7 +1297,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1322,7 +1324,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1338,7 +1340,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/plan.lock.yml b/.github/workflows/plan.lock.yml index d313d88a6e..339c06f377 100644 --- a/.github/workflows/plan.lock.yml +++ b/.github/workflows/plan.lock.yml @@ -247,7 +247,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -705,10 +705,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -735,7 +736,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -746,7 +748,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -831,7 +833,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -854,13 +856,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -905,7 +907,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1007,7 +1009,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1061,7 +1063,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1229,7 +1231,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1256,7 +1258,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml index e127bca0c4..dc51b0c8ef 100644 --- a/.github/workflows/poem-bot.lock.yml +++ b/.github/workflows/poem-bot.lock.yml @@ -276,7 +276,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -328,7 +328,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: poem-memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1273,10 +1273,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -1304,7 +1305,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1315,7 +1317,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1428,7 +1430,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1451,13 +1453,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1500,7 +1502,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1509,7 +1511,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1518,7 +1520,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1621,7 +1623,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1678,7 +1680,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1874,7 +1876,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1885,7 +1887,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1964,7 +1966,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1980,7 +1982,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: poem-memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -2025,7 +2027,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -2036,7 +2038,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml index f32b3b4d98..7e5e58227c 100644 --- a/.github/workflows/portfolio-analyst.lock.yml +++ b/.github/workflows/portfolio-analyst.lock.yml @@ -29,7 +29,7 @@ # - shared/reporting.md # - shared/trending-charts-simple.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"045ffd4aeee621421ca02cf32bd6dc85b3937b753dfcac93150cd9b0ca37eece"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"e7935e168ae0e6f7257077dd33ddb274861679adccbf323280cef1efa3759fee"} name: "Automated Portfolio Analyst" "on": @@ -230,7 +230,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -280,7 +280,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -319,7 +319,7 @@ jobs: pip install --user --quiet numpy pandas matplotlib seaborn scipy - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-charts @@ -327,7 +327,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-source-and-data @@ -346,7 +346,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -743,10 +743,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -786,7 +787,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -797,7 +799,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -885,7 +887,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -907,13 +909,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -956,7 +958,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -964,7 +966,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -973,7 +975,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1075,7 +1077,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1131,7 +1133,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1244,7 +1246,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1271,7 +1273,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1298,7 +1300,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1314,7 +1316,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1359,7 +1361,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1370,7 +1372,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/pr-nitpick-reviewer.lock.yml b/.github/workflows/pr-nitpick-reviewer.lock.yml index 476442f64a..595d052251 100644 --- a/.github/workflows/pr-nitpick-reviewer.lock.yml +++ b/.github/workflows/pr-nitpick-reviewer.lock.yml @@ -285,7 +285,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -337,7 +337,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -800,10 +800,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -831,7 +832,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -842,7 +844,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -927,7 +929,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -950,13 +952,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -999,7 +1001,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1007,7 +1009,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1109,7 +1111,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1165,7 +1167,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1344,7 +1346,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1371,7 +1373,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1398,7 +1400,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1414,7 +1416,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/pr-triage-agent.lock.yml b/.github/workflows/pr-triage-agent.lock.yml index cab19eb182..24ee348bf3 100644 --- a/.github/workflows/pr-triage-agent.lock.yml +++ b/.github/workflows/pr-triage-agent.lock.yml @@ -217,7 +217,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -715,10 +715,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -746,7 +747,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -757,7 +759,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -842,7 +844,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -864,13 +866,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -915,7 +917,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -924,7 +926,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1026,7 +1028,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1082,7 +1084,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1196,7 +1198,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1260,7 +1262,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1287,7 +1289,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml index a37349d819..4ace4b96d9 100644 --- a/.github/workflows/prompt-clustering-analysis.lock.yml +++ b/.github/workflows/prompt-clustering-analysis.lock.yml @@ -30,7 +30,7 @@ # - shared/reporting.md # - shared/trending-charts-simple.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"c4de13d6035c4be451ed44a8a1679b53fd3126eb44d3c20a52166258c3310a94"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"333c982fb324ddf949b1860ce52c84135950bd4bc02494c21e88e8aaacc0e360"} name: "Copilot Agent Prompt Clustering Analysis" "on": @@ -234,7 +234,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -284,7 +284,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -328,7 +328,7 @@ jobs: pip install --user --quiet numpy pandas matplotlib seaborn scipy - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-charts @@ -336,7 +336,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-source-and-data @@ -360,7 +360,7 @@ jobs: # Cache configuration from frontmatter processed below - name: Cache prompt clustering data - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: prompt-clustering-cache-${{ github.run_id }} path: /tmp/gh-aw/prompt-cache @@ -369,7 +369,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -742,10 +742,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -782,7 +783,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -793,7 +795,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -931,7 +933,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -953,7 +955,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -994,7 +996,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1002,7 +1004,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1114,7 +1116,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1169,7 +1171,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1277,7 +1279,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1304,7 +1306,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1331,7 +1333,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1347,7 +1349,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml index 63325f7dd9..079fbbf8da 100644 --- a/.github/workflows/python-data-charts.lock.yml +++ b/.github/workflows/python-data-charts.lock.yml @@ -228,7 +228,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -276,7 +276,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -313,7 +313,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -321,7 +321,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -334,7 +334,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -731,10 +731,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -774,7 +775,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -785,7 +787,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -873,7 +875,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -895,13 +897,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -944,7 +946,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -952,7 +954,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -961,7 +963,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1063,7 +1065,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1119,7 +1121,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1227,7 +1229,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1254,7 +1256,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1281,7 +1283,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1297,7 +1299,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1342,7 +1344,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1353,7 +1355,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/q.lock.yml b/.github/workflows/q.lock.yml index e9043da616..ea9c148b7d 100644 --- a/.github/workflows/q.lock.yml +++ b/.github/workflows/q.lock.yml @@ -302,7 +302,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -351,7 +351,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -386,7 +386,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -814,10 +814,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -865,7 +866,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -876,7 +878,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -961,7 +963,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -984,13 +986,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1033,7 +1035,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1041,7 +1043,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1144,7 +1146,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1200,7 +1202,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1401,7 +1403,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1412,7 +1414,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1457,7 +1459,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1484,7 +1486,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1500,7 +1502,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/refiner.lock.yml b/.github/workflows/refiner.lock.yml index 35b9f30a9d..e493a99a03 100644 --- a/.github/workflows/refiner.lock.yml +++ b/.github/workflows/refiner.lock.yml @@ -241,7 +241,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -686,10 +686,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -717,7 +718,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -728,7 +730,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -813,7 +815,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -835,13 +837,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -886,7 +888,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -989,7 +991,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1044,7 +1046,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1205,7 +1207,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1216,7 +1218,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1261,7 +1263,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml index 8876cd85e0..b994626962 100644 --- a/.github/workflows/release.lock.yml +++ b/.github/workflows/release.lock.yml @@ -23,7 +23,7 @@ # # Build, test, and release gh-aw extension, then generate and prepend release highlights # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"6e1c4327b76ef6c7191012d95530b88b4c3d8d8d18ebbdb75a5e8630f0713d96"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"e3397ad5bd389e6aafb3dc65af1f61179419caa966a25715f12fd46e88f4fa48"} name: "Release" "on": @@ -219,7 +219,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -620,10 +620,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -651,7 +652,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -662,7 +664,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -747,7 +749,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -769,13 +771,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -820,7 +822,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -922,7 +924,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -974,7 +976,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1056,7 +1058,7 @@ jobs: release_tag: ${{ steps.compute_config.outputs.release_tag }} steps: - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: false @@ -1202,7 +1204,7 @@ jobs: release_id: ${{ steps.get_release.outputs.release_id }} steps: - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: true @@ -1217,7 +1219,7 @@ jobs: env: RELEASE_TAG: ${{ needs.config.outputs.release_tag }} - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: cache: false go-version-file: go.mod @@ -1229,9 +1231,9 @@ jobs: env: RELEASE_TAG: ${{ needs.config.outputs.release_tag }} - name: Setup Docker Buildx (pre-validation) - uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - name: Build Docker image (validation only) - uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 with: build-args: | BINARY=dist/linux-amd64 @@ -1262,13 +1264,13 @@ jobs: - name: Download Go modules run: go mod download - name: Generate SBOM (SPDX format) - uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0 + uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0 with: artifact-name: sbom.spdx.json format: spdx-json output-file: sbom.spdx.json - name: Generate SBOM (CycloneDX format) - uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0 + uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0 with: artifact-name: sbom.cdx.json format: cyclonedx-json @@ -1282,7 +1284,7 @@ jobs: fi echo "✓ No secrets detected in SBOM files" - name: Upload SBOM artifacts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: sbom-artifacts path: | @@ -1300,16 +1302,16 @@ jobs: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} RELEASE_TAG: ${{ needs.config.outputs.release_tag }} - name: Setup Docker Buildx - uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - name: Log in to GitHub Container Registry - uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0 with: password: ${{ secrets.GITHUB_TOKEN }} registry: ghcr.io username: ${{ github.actor }} - name: Extract metadata for Docker id: meta - uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5 + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0 with: images: ghcr.io/${{ github.repository }} tags: | @@ -1320,7 +1322,7 @@ jobs: type=raw,value=latest,enable={{is_default_branch}} - name: Build and push Docker image (amd64) id: build - uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 with: build-args: | BINARY=dist/linux-amd64 @@ -1365,7 +1367,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1392,7 +1394,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/release.md b/.github/workflows/release.md index 962bbfe001..7f11b7ff9d 100644 --- a/.github/workflows/release.md +++ b/.github/workflows/release.md @@ -44,7 +44,7 @@ jobs: release_tag: ${{ steps.compute_config.outputs.release_tag }} steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v6.0.2 with: fetch-depth: 0 persist-credentials: false @@ -155,7 +155,7 @@ jobs: release_id: ${{ steps.get_release.outputs.release_id }} steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v6.0.2 with: fetch-depth: 0 persist-credentials: true @@ -172,7 +172,7 @@ jobs: echo "✓ Tag created: $RELEASE_TAG" - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: false # Disabled for release security - prevent cache poisoning attacks @@ -186,10 +186,10 @@ jobs: echo "✓ Binaries built successfully" - name: Setup Docker Buildx (pre-validation) - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v3.12.0 - name: Build Docker image (validation only) - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v6.19.2 with: context: . platforms: linux/amd64 @@ -223,14 +223,14 @@ jobs: run: go mod download - name: Generate SBOM (SPDX format) - uses: anchore/sbom-action@v0 + uses: anchore/sbom-action@v0.23.0 with: artifact-name: sbom.spdx.json output-file: sbom.spdx.json format: spdx-json - name: Generate SBOM (CycloneDX format) - uses: anchore/sbom-action@v0 + uses: anchore/sbom-action@v0.23.0 with: artifact-name: sbom.cdx.json output-file: sbom.cdx.json @@ -246,7 +246,7 @@ jobs: echo "✓ No secrets detected in SBOM files" - name: Upload SBOM artifacts - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: sbom-artifacts path: | @@ -266,10 +266,10 @@ jobs: echo "✓ SBOM files uploaded to release" - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v3.12.0 - name: Log in to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@v3.7.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -277,7 +277,7 @@ jobs: - name: Extract metadata for Docker id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@v5.10.0 with: images: ghcr.io/${{ github.repository }} tags: | @@ -289,7 +289,7 @@ jobs: - name: Build and push Docker image (amd64) id: build - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v6.19.2 with: context: . platforms: linux/amd64 diff --git a/.github/workflows/repo-audit-analyzer.lock.yml b/.github/workflows/repo-audit-analyzer.lock.yml index 68d52d332f..f55a9793b6 100644 --- a/.github/workflows/repo-audit-analyzer.lock.yml +++ b/.github/workflows/repo-audit-analyzer.lock.yml @@ -226,7 +226,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -280,7 +280,7 @@ jobs: run: | mkdir -p /tmp/gh-aw/cache-memory-repo-audits - name: Restore cache-memory file share data (repo-audits) - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: repo-audits-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory-repo-audits @@ -623,10 +623,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -654,7 +655,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -665,7 +667,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -750,7 +752,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -772,13 +774,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -821,7 +823,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact (repo-audits) - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory-repo-audits @@ -829,7 +831,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -931,7 +933,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -986,7 +988,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1097,7 +1099,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1124,7 +1126,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1151,7 +1153,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (repo-audits) id: download_cache_repo_audits - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory-repo-audits @@ -1167,7 +1169,7 @@ jobs: fi - name: Save cache-memory to cache (repo-audits) if: steps.check_cache_repo_audits.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: repo-audits-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory-repo-audits diff --git a/.github/workflows/repo-tree-map.lock.yml b/.github/workflows/repo-tree-map.lock.yml index 4516e938e9..676e003842 100644 --- a/.github/workflows/repo-tree-map.lock.yml +++ b/.github/workflows/repo-tree-map.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -599,10 +599,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -630,7 +631,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -641,7 +643,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -726,7 +728,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -748,13 +750,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -799,7 +801,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -901,7 +903,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -955,7 +957,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1063,7 +1065,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1090,7 +1092,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/repository-quality-improver.lock.yml b/.github/workflows/repository-quality-improver.lock.yml index 7fae5ad1b5..4c8446c145 100644 --- a/.github/workflows/repository-quality-improver.lock.yml +++ b/.github/workflows/repository-quality-improver.lock.yml @@ -223,7 +223,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -279,7 +279,7 @@ jobs: run: | mkdir -p /tmp/gh-aw/cache-memory-focus-areas - name: Restore cache-memory file share data (focus-areas) - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: quality-focus-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory-focus-areas @@ -622,10 +622,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -661,7 +662,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -672,7 +674,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -757,7 +759,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -779,13 +781,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -828,7 +830,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact (focus-areas) - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory-focus-areas @@ -836,7 +838,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -938,7 +940,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -993,7 +995,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1101,7 +1103,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1128,7 +1130,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1155,7 +1157,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (focus-areas) id: download_cache_focus_areas - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory-focus-areas @@ -1171,7 +1173,7 @@ jobs: fi - name: Save cache-memory to cache (focus-areas) if: steps.check_cache_focus_areas.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: quality-focus-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory-focus-areas diff --git a/.github/workflows/research.lock.yml b/.github/workflows/research.lock.yml index b19d24dd9f..85bddc9568 100644 --- a/.github/workflows/research.lock.yml +++ b/.github/workflows/research.lock.yml @@ -224,7 +224,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -610,10 +610,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -654,7 +655,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -665,7 +667,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -752,7 +754,7 @@ jobs: SECRET_TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -774,13 +776,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -825,7 +827,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -927,7 +929,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -981,7 +983,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1089,7 +1091,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1116,7 +1118,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml index 7291ec2eba..c4def1f563 100644 --- a/.github/workflows/safe-output-health.lock.yml +++ b/.github/workflows/safe-output-health.lock.yml @@ -224,7 +224,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -274,7 +274,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -316,7 +316,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -689,10 +689,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -729,7 +730,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -740,7 +742,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -893,7 +895,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -915,7 +917,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -956,7 +958,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -964,7 +966,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1076,7 +1078,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1131,7 +1133,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1239,7 +1241,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1266,7 +1268,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1293,7 +1295,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1309,7 +1311,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/schema-consistency-checker.lock.yml b/.github/workflows/schema-consistency-checker.lock.yml index 50b0bc930f..0da8826067 100644 --- a/.github/workflows/schema-consistency-checker.lock.yml +++ b/.github/workflows/schema-consistency-checker.lock.yml @@ -219,7 +219,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -274,7 +274,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: schema-consistency-cache-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -622,10 +622,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -652,7 +653,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -663,7 +665,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -801,7 +803,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -823,7 +825,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -864,7 +866,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -872,7 +874,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -984,7 +986,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1039,7 +1041,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1147,7 +1149,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1174,7 +1176,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1201,7 +1203,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1217,7 +1219,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: schema-consistency-cache-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/scout.lock.yml b/.github/workflows/scout.lock.yml index 6344696d28..2081f3b7ca 100644 --- a/.github/workflows/scout.lock.yml +++ b/.github/workflows/scout.lock.yml @@ -341,7 +341,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -396,7 +396,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -761,10 +761,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -812,7 +813,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -823,7 +825,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -985,7 +987,7 @@ jobs: SECRET_TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1008,7 +1010,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -1049,7 +1051,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1057,7 +1059,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1169,7 +1171,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1225,7 +1227,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1407,7 +1409,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1434,7 +1436,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1461,7 +1463,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1477,7 +1479,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/security-alert-burndown.campaign.g.lock.yml b/.github/workflows/security-alert-burndown.campaign.g.lock.yml new file mode 100644 index 0000000000..aa2c5a4063 --- /dev/null +++ b/.github/workflows/security-alert-burndown.campaign.g.lock.yml @@ -0,0 +1,1592 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw. DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Orchestrator workflow for campaign 'security-alert-burndown' +# +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"6cd777197ff20097174c752a68c0620617aa3b420fcd5f527dbab177c2a681ea"} + +name: "Security Alert Burndown" +"on": + # roles: # Roles processed as role check in pre-activation job + # - admin # Roles processed as role check in pre-activation job + # - maintainer # Roles processed as role check in pre-activation job + # - write # Roles processed as role check in pre-activation job + schedule: + - cron: "0 18 * * *" + workflow_dispatch: + +permissions: {} + +concurrency: + cancel-in-progress: false + group: campaign-security-alert-burndown-orchestrator-${{ github.ref }} + +run-name: "Security Alert Burndown" + +jobs: + activation: + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + comment_id: "" + comment_repo: "" + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Validate ANTHROPIC_API_KEY secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh ANTHROPIC_API_KEY 'Claude Code' https://github.github.com/gh-aw/reference/engines/#anthropic-claude-code + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + - name: Validate context variables + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); + await main(); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_WORKFLOW_FILE: "security-alert-burndown.campaign.g.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + { + cat << 'GH_AW_PROMPT_EOF' + + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/xpia.md" + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" + cat "/opt/gh-aw/prompts/markdown.md" + cat "/opt/gh-aw/prompts/repo_memory_prompt_multi.md" + cat "/opt/gh-aw/prompts/safe_outputs_prompt.md" + cat << 'GH_AW_PROMPT_EOF' + + Tools: add_comment, create_issue, update_project, create_project_status_update, missing_tool, missing_data, noop + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' + {{#runtime-import .github/workflows/security-alert-burndown.campaign.g.md}} + GH_AW_PROMPT_EOF + } > "$GH_AW_PROMPT" + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_MEMORY_ALLOWED_EXTENSIONS: '' + GH_AW_MEMORY_LIST: "- **campaigns**: `/tmp/gh-aw/repo-memory/campaigns/` (branch: `memory/campaigns`)\n" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_MEMORY_ALLOWED_EXTENSIONS: process.env.GH_AW_MEMORY_ALLOWED_EXTENSIONS, + GH_AW_MEMORY_LIST: process.env.GH_AW_MEMORY_LIST + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Upload prompt artifact + if: success() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts/prompt.txt + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + concurrency: + group: "gh-aw-claude-${{ github.workflow }}" + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_WORKFLOW_ID_SANITIZED: securityalertburndown.campaign.g + outputs: + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + detection_conclusion: ${{ steps.detection_conclusion.outputs.conclusion }} + detection_success: ${{ steps.detection_conclusion.outputs.success }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + model: ${{ steps.generate_aw_info.outputs.model }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + - name: Create workspace directory + run: mkdir -p ./.gh-aw + - env: + GH_AW_CAMPAIGN_ID: security-alert-burndown + GH_AW_CURSOR_PATH: /tmp/gh-aw/repo-memory/campaigns/security-alert-burndown/cursor.json + GH_AW_DISCOVERY_REPOS: githubnext/gh-aw + GH_AW_MAX_DISCOVERY_ITEMS: "50" + GH_AW_MAX_DISCOVERY_PAGES: "3" + GH_AW_PROJECT_URL: https://github.com/orgs/githubnext/projects/122 + GH_AW_TRACKER_LABEL: campaign:security-alert-burndown + GH_AW_WORKFLOWS: code-scanning-fixer,security-fix-pr,security-review + id: discovery + name: Run campaign discovery precomputation + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: |- + + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/campaign_discovery.cjs'); + await main(); + + # Repo memory git-based storage configuration from frontmatter processed below + - name: Clone repo-memory branch (campaigns) + env: + GH_TOKEN: ${{ github.token }} + GITHUB_SERVER_URL: ${{ github.server_url }} + BRANCH_NAME: memory/campaigns + TARGET_REPO: ${{ github.repository }} + MEMORY_DIR: /tmp/gh-aw/repo-memory/campaigns + CREATE_ORPHAN: true + run: bash /opt/gh-aw/actions/clone_repo_memory_branch.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + (github.event.pull_request) || (github.event.issue.pull_request) + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Generate agentic run info + id: generate_aw_info + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require('fs'); + + const awInfo = { + engine_id: "claude", + engine_name: "Claude Code", + model: process.env.GH_AW_MODEL_AGENT_CLAUDE || "", + version: "", + agent_version: "2.1.62", + workflow_name: "Security Alert Burndown", + experimental: false, + supports_tools_allowlist: true, + run_id: context.runId, + run_number: context.runNumber, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + repository: context.repo.owner + '/' + context.repo.repo, + ref: context.ref, + sha: context.sha, + actor: context.actor, + event_name: context.eventName, + staged: false, + allowed_domains: ["defaults"], + firewall_enabled: true, + awf_version: "v0.23.0", + awmg_version: "v0.1.6", + steps: { + firewall: "squid" + }, + created_at: new Date().toISOString() + }; + + // Write to /tmp/gh-aw directory to avoid inclusion in PR + const tmpPath = '/tmp/gh-aw/aw_info.json'; + fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); + console.log('Generated aw_info.json at:', tmpPath); + console.log(JSON.stringify(awInfo, null, 2)); + + // Set model as output for reuse in other steps/jobs + core.setOutput('model', awInfo.model); + - name: Setup Node.js + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + with: + node-version: '24' + package-manager-cache: false + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.23.0 + - name: Install Claude Code CLI + run: npm install -g --silent @anthropic-ai/claude-code@2.1.62 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.23.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.23.0 ghcr.io/github/gh-aw-firewall/squid:0.23.0 ghcr.io/github/gh-aw-mcpg:v0.1.6 ghcr.io/github/github-mcp-server:v0.31.0 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p /opt/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' + {"add_comment":{"max":3},"create_issue":{"max":1},"create_project_status_update":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1},"update_project":{"max":10}} + GH_AW_SAFE_OUTPUTS_CONFIG_EOF + cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' + [ + { + "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Detailed issue description in Markdown. Do NOT repeat the title as a heading since it already appears as the issue's h1. Include context, reproduction steps, or acceptance criteria as appropriate.", + "type": "string" + }, + "labels": { + "description": "Labels to categorize the issue (e.g., 'bug', 'enhancement'). Labels must exist in the repository.", + "items": { + "type": "string" + }, + "type": "array" + }, + "parent": { + "description": "Parent issue number for creating sub-issues. This is the numeric ID from the GitHub URL (e.g., 42 in github.com/owner/repo/issues/42). Can also be a temporary_id (e.g., 'aw_abc123', 'aw_Test123') from a previously created issue in the same workflow run.", + "type": [ + "number", + "string" + ] + }, + "temporary_id": { + "description": "Unique temporary identifier for referencing this issue before it's created. Format: 'aw_' followed by 3 to 8 alphanumeric characters (e.g., 'aw_abc1', 'aw_Test123'). Use '#aw_ID' in body text to reference other issues by their temporary_id; these are replaced with actual issue numbers after creation.", + "pattern": "^aw_[A-Za-z0-9]{3,8}$", + "type": "string" + }, + "title": { + "description": "Concise issue title summarizing the bug, feature, or task. The title appears as the main heading, so keep it brief and descriptive.", + "type": "string" + } + }, + "required": [ + "title", + "body" + ], + "type": "object" + }, + "name": "create_issue" + }, + { + "description": "Add a comment to an existing GitHub issue, pull request, or discussion. Use this to provide feedback, answer questions, or add information to an existing conversation. For creating new items, use create_issue, create_discussion, or create_pull_request instead. IMPORTANT: Comments are subject to validation constraints enforced by the MCP server - maximum 65536 characters for the complete comment (including footer which is added automatically), 10 mentions (@username), and 50 links. Exceeding these limits will result in an immediate error with specific guidance. NOTE: By default, this tool requires discussions:write permission. If your GitHub App lacks Discussions permission, set 'discussions: false' in the workflow's safe-outputs.add-comment configuration to exclude this permission. CONSTRAINTS: Maximum 3 comment(s) can be added.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "The comment text in Markdown format. This is the 'body' field - do not use 'comment_body' or other variations. Provide helpful, relevant information that adds value to the conversation. CONSTRAINTS: The complete comment (your body text + automatically added footer) must not exceed 65536 characters total. Maximum 10 mentions (@username), maximum 50 links (http/https URLs). A footer (~200-500 characters) is automatically appended with workflow attribution, so leave adequate space. If these limits are exceeded, the tool call will fail with a detailed error message indicating which constraint was violated.", + "type": "string" + }, + "item_number": { + "description": "The issue, pull request, or discussion number to comment on. This is the numeric ID from the GitHub URL (e.g., 123 in github.com/owner/repo/issues/123). If omitted, the tool auto-targets the issue, PR, or discussion that triggered this workflow. Auto-targeting only works for issue, pull_request, discussion, and comment event triggers — it does NOT work for schedule, workflow_dispatch, push, or workflow_run triggers. For those trigger types, always provide item_number explicitly, or the comment will be silently discarded.", + "type": "number" + } + }, + "required": [ + "body" + ], + "type": "object" + }, + "name": "add_comment" + }, + { + "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "reason": { + "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", + "type": "string" + }, + "tool": { + "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", + "type": "string" + } + }, + "required": [ + "reason" + ], + "type": "object" + }, + "name": "missing_tool" + }, + { + "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "message": { + "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", + "type": "string" + } + }, + "required": [ + "message" + ], + "type": "object" + }, + "name": "noop" + }, + { + "description": "Manage GitHub Projects: add issues/pull requests/draft issues, update item fields (status, priority, effort, dates), manage custom fields, and create project views. Use this to organize work by adding items to projects, updating field values, creating custom fields up-front, and setting up project views (table, board, roadmap).\n\nThree modes: (1) Add or update project items with custom field values; (2) Create project fields; (3) Create project views. This is the primary tool for ProjectOps automation - add items to projects, set custom fields for tracking, and organize project boards. CONSTRAINTS: Maximum 10 project operation(s) can be performed. Default project URL: \"https://github.com/orgs/githubnext/projects/122\".", + "inputSchema": { + "additionalProperties": false, + "properties": { + "content_number": { + "description": "Issue or pull request number to add to the project. This is the numeric ID from the GitHub URL (e.g., 123 in github.com/owner/repo/issues/123 for issue #123, or 456 in github.com/owner/repo/pull/456 for PR #456), or a temporary ID from a recent create_issue call (e.g., 'aw_abc123', '#aw_Test123'). Required when content_type is 'issue' or 'pull_request'.", + "type": [ + "number", + "string" + ] + }, + "content_type": { + "description": "Type of item to add to the project. Use 'issue' or 'pull_request' to add existing repo content, or 'draft_issue' to create a draft item inside the project. Required when operation is not specified.", + "enum": [ + "issue", + "pull_request", + "draft_issue" + ], + "type": "string" + }, + "create_if_missing": { + "description": "Whether to create the project if it doesn't exist. Defaults to false. Requires projects:write permission when true.", + "type": "boolean" + }, + "draft_body": { + "description": "Optional body for a Projects v2 draft issue (markdown). Only used when content_type is 'draft_issue'.", + "type": "string" + }, + "draft_issue_id": { + "description": "Temporary ID of an existing draft issue to update (e.g., 'aw_abc1', '#aw_Test123'). Use this to reference a draft created earlier with a matching temporary_id. When provided, draft_title is not required for updates.", + "pattern": "^#?aw_[A-Za-z0-9]{3,8}$", + "type": "string" + }, + "draft_title": { + "description": "Title for a Projects v2 draft issue. Required when content_type is 'draft_issue'.", + "type": "string" + }, + "field_definitions": { + "description": "Field definitions to create when operation is create_fields. Required when operation='create_fields'.", + "items": { + "additionalProperties": false, + "properties": { + "data_type": { + "description": "Field type. Use SINGLE_SELECT with options for enumerated values.", + "enum": [ + "TEXT", + "NUMBER", + "DATE", + "SINGLE_SELECT", + "ITERATION" + ], + "type": "string" + }, + "name": { + "description": "Field name to create (e.g., 'size', 'priority').", + "type": "string" + }, + "options": { + "description": "Options for SINGLE_SELECT fields.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "required": [ + "name", + "data_type" + ], + "type": "object" + }, + "type": "array" + }, + "fields": { + "description": "Custom field values to set on the project item (e.g., {'Status': 'In Progress', 'Priority': 'High'}). Field names must match custom fields defined in the project.", + "type": "object" + }, + "operation": { + "description": "Optional operation mode. Use create_fields to create required fields up-front, or create_view to add a project view. When omitted, the tool adds/updates project items.", + "enum": [ + "create_fields", + "create_view" + ], + "type": "string" + }, + "project": { + "description": "Full GitHub project URL (e.g., 'https://github.com/orgs/myorg/projects/42' or 'https://github.com/users/username/projects/5'), or a temporary project ID from a recent create_project call (e.g., '#aw_abc1', 'aw_Test123'). Project names or numbers alone are NOT accepted.", + "pattern": "^(https://github\\.com/(orgs|users)/[^/]+/projects/\\d+|#?aw_[A-Za-z0-9]{3,8})$", + "type": "string" + }, + "temporary_id": { + "description": "Unique temporary identifier for this draft issue (e.g., 'aw_abc1', '#aw_Test123'). Provide this when creating a new draft to enable future updates via draft_issue_id. Format: optional leading '#', then 'aw_' followed by 3 to 8 alphanumeric characters.", + "pattern": "^#?aw_[A-Za-z0-9]{3,8}$", + "type": "string" + }, + "view": { + "additionalProperties": false, + "description": "View definition to create when operation is create_view. Required when operation='create_view'.", + "properties": { + "filter": { + "type": "string" + }, + "layout": { + "enum": [ + "table", + "board", + "roadmap" + ], + "type": "string" + }, + "name": { + "type": "string" + }, + "visible_fields": { + "description": "Field IDs to show in the view (table/board only).", + "items": { + "type": "number" + }, + "type": "array" + } + }, + "required": [ + "name", + "layout" + ], + "type": "object" + } + }, + "required": [ + "project" + ], + "type": "object" + }, + "name": "update_project" + }, + { + "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "context": { + "description": "Additional context about the missing data or where it should come from (max 256 characters).", + "type": "string" + }, + "data_type": { + "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", + "type": "string" + }, + "reason": { + "description": "Explanation of why this data is needed to complete the task (max 256 characters).", + "type": "string" + } + }, + "required": [], + "type": "object" + }, + "name": "missing_data" + }, + { + "description": "Post a status update to a GitHub Project to communicate progress and health. Use this to provide stakeholders with regular updates on project status (on-track, at-risk, off-track, complete, inactive), timeline information, and progress summaries. Status updates create a historical record of project progress, enabling tracking over time and informed decision-making. CONSTRAINTS: Maximum 1 status update(s) can be created. Default project URL: \"https://github.com/orgs/githubnext/projects/122\".", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Status update body in markdown format describing progress, findings, trends, and next steps. Should provide stakeholders with clear understanding of project state.", + "type": "string" + }, + "project": { + "description": "Full GitHub project URL (e.g., 'https://github.com/orgs/myorg/projects/42' or 'https://github.com/users/username/projects/5'). Project names or numbers alone are NOT accepted.", + "pattern": "^https://github\\\\.com/(orgs|users)/[^/]+/projects/\\\\d+$", + "type": "string" + }, + "start_date": { + "description": "Optional project start date in YYYY-MM-DD format (e.g., '2026-01-06').", + "pattern": "^\\\\d{4}-\\\\d{2}-\\\\d{2}$", + "type": "string" + }, + "status": { + "description": "Status indicator for the project. Defaults to ON_TRACK. Values: ON_TRACK (progressing well), AT_RISK (has issues/blockers), OFF_TRACK (significantly behind), COMPLETE (finished), INACTIVE (paused/cancelled).", + "enum": [ + "ON_TRACK", + "AT_RISK", + "OFF_TRACK", + "COMPLETE", + "INACTIVE" + ], + "type": "string" + }, + "target_date": { + "description": "Optional project target/end date in YYYY-MM-DD format (e.g., '2026-12-31').", + "pattern": "^\\\\d{4}-\\\\d{2}-\\\\d{2}$", + "type": "string" + } + }, + "required": [ + "project", + "body" + ], + "type": "object" + }, + "name": "create_project_status_update" + } + ] + GH_AW_SAFE_OUTPUTS_TOOLS_EOF + cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' + { + "add_comment": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "item_number": { + "issueOrPRNumber": true + }, + "repo": { + "type": "string", + "maxLength": 256 + } + } + }, + "create_issue": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "labels": { + "type": "array", + "itemType": "string", + "itemSanitize": true, + "itemMaxLength": 128 + }, + "parent": { + "issueOrPRNumber": true + }, + "repo": { + "type": "string", + "maxLength": 256 + }, + "temporary_id": { + "type": "string" + }, + "title": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "create_project_status_update": { + "defaultMax": 10, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65536 + }, + "project": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 512, + "pattern": "^https://[^/]+/(orgs|users)/[^/]+/projects/\\d+", + "patternError": "must be a full GitHub project URL (e.g., https://github.com/orgs/myorg/projects/42)" + }, + "start_date": { + "type": "string", + "pattern": "^\\d{4}-\\d{2}-\\d{2}$", + "patternError": "must be in YYYY-MM-DD format" + }, + "status": { + "type": "string", + "enum": [ + "INACTIVE", + "ON_TRACK", + "AT_RISK", + "OFF_TRACK", + "COMPLETE" + ] + }, + "target_date": { + "type": "string", + "pattern": "^\\d{4}-\\d{2}-\\d{2}$", + "patternError": "must be in YYYY-MM-DD format" + } + } + }, + "missing_data": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "context": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "data_type": { + "type": "string", + "sanitize": true, + "maxLength": 128 + }, + "reason": { + "type": "string", + "sanitize": true, + "maxLength": 256 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + }, + "update_project": { + "defaultMax": 10, + "fields": { + "content_number": { + "issueNumberOrTemporaryId": true + }, + "content_type": { + "type": "string", + "enum": [ + "issue", + "pull_request", + "draft_issue" + ] + }, + "draft_body": { + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "draft_title": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "fields": { + "type": "object" + }, + "issue": { + "optionalPositiveInteger": true + }, + "project": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 512, + "pattern": "^https://[^/]+/(orgs|users)/[^/]+/projects/\\d+", + "patternError": "must be a full GitHub project URL (e.g., https://github.com/orgs/myorg/projects/42)" + }, + "pull_request": { + "optionalPositiveInteger": true + } + } + } + } + GH_AW_SAFE_OUTPUTS_VALIDATION_EOF + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash /opt/gh-aw/actions/start_safe_outputs_server.sh + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" + export DEBUG="*" + + export GH_AW_ENGINE="claude" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + + cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "container": "ghcr.io/github/github-mcp-server:v0.31.0", + "env": { + "GITHUB_LOCKDOWN_MODE": "$GITHUB_MCP_LOCKDOWN", + "GITHUB_PERSONAL_ACCESS_TOKEN": "$GITHUB_MCP_SERVER_TOKEN", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "context,repos,issues,pull_requests,actions,code_security" + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "$GH_AW_SAFE_OUTPUTS_API_KEY" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 + } + } + GH_AW_MCP_CONFIG_EOF + - name: Generate workflow overview + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); + await generateWorkflowOverview(core); + - name: Download prompt artifact + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts + - name: Clean git credentials + run: bash /opt/gh-aw/actions/clean_git_credentials.sh + - name: Execute Claude Code CLI + id: agentic_execution + # Allowed tools (sorted): + # - Bash + # - BashOutput + # - Edit + # - ExitPlanMode + # - Glob + # - Grep + # - KillBash + # - LS + # - MultiEdit + # - NotebookEdit + # - NotebookRead + # - Read + # - Task + # - TodoWrite + # - Write + # - mcp__github__download_workflow_run_artifact + # - mcp__github__get_code_scanning_alert + # - mcp__github__get_commit + # - mcp__github__get_dependabot_alert + # - mcp__github__get_discussion + # - mcp__github__get_discussion_comments + # - mcp__github__get_file_contents + # - mcp__github__get_job_logs + # - mcp__github__get_label + # - mcp__github__get_latest_release + # - mcp__github__get_me + # - mcp__github__get_notification_details + # - mcp__github__get_pull_request + # - mcp__github__get_pull_request_comments + # - mcp__github__get_pull_request_diff + # - mcp__github__get_pull_request_files + # - mcp__github__get_pull_request_review_comments + # - mcp__github__get_pull_request_reviews + # - mcp__github__get_pull_request_status + # - mcp__github__get_release_by_tag + # - mcp__github__get_secret_scanning_alert + # - mcp__github__get_tag + # - mcp__github__get_workflow_run + # - mcp__github__get_workflow_run_logs + # - mcp__github__get_workflow_run_usage + # - mcp__github__issue_read + # - mcp__github__list_branches + # - mcp__github__list_code_scanning_alerts + # - mcp__github__list_commits + # - mcp__github__list_dependabot_alerts + # - mcp__github__list_discussion_categories + # - mcp__github__list_discussions + # - mcp__github__list_issue_types + # - mcp__github__list_issues + # - mcp__github__list_label + # - mcp__github__list_notifications + # - mcp__github__list_pull_requests + # - mcp__github__list_releases + # - mcp__github__list_secret_scanning_alerts + # - mcp__github__list_starred_repositories + # - mcp__github__list_tags + # - mcp__github__list_workflow_jobs + # - mcp__github__list_workflow_run_artifacts + # - mcp__github__list_workflow_runs + # - mcp__github__list_workflows + # - mcp__github__pull_request_read + # - mcp__github__search_code + # - mcp__github__search_issues + # - mcp__github__search_orgs + # - mcp__github__search_pull_requests + # - mcp__github__search_repositories + # - mcp__github__search_users + timeout-minutes: 20 + run: | + set -o pipefail + # shellcheck disable=SC1003 + sudo -E awf --tty --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.23.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools Bash,BashOutput,Edit,ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,NotebookEdit,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + BASH_DEFAULT_TIMEOUT_MS: 60000 + BASH_MAX_TIMEOUT_MS: 60000 + DISABLE_BUG_COMMAND: 1 + DISABLE_ERROR_REPORTING: 1 + DISABLE_TELEMETRY: 1 + GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json + GH_AW_MODEL_AGENT_CLAUDE: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GITHUB_WORKSPACE: ${{ github.workspace }} + MCP_TIMEOUT: 120000 + MCP_TOOL_TIMEOUT: 60000 + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'ANTHROPIC_API_KEY,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Safe Outputs + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: safe-output + path: ${{ env.GH_AW_SAFE_OUTPUTS }} + if-no-files-found: warn + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Upload sanitized agent output + if: always() && env.GH_AW_AGENT_OUTPUT + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: agent-output + path: ${{ env.GH_AW_AGENT_OUTPUT }} + if-no-files-found: warn + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/agent-stdio.log + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_claude_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + # Upload repo memory as artifacts for push job + - name: Upload repo-memory artifact (campaigns) + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: repo-memory-campaigns + path: /tmp/gh-aw/repo-memory/campaigns + retention-days: 1 + if-no-files-found: ignore + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + if-no-files-found: ignore + # --- Threat Detection (inline) --- + - name: Check if detection needed + id: detection_guard + if: always() + env: + OUTPUT_TYPES: ${{ steps.collect_output.outputs.output_types }} + HAS_PATCH: ${{ steps.collect_output.outputs.has_patch }} + run: | + if [[ -n "$OUTPUT_TYPES" || "$HAS_PATCH" == "true" ]]; then + echo "run_detection=true" >> "$GITHUB_OUTPUT" + echo "Detection will run: output_types=$OUTPUT_TYPES, has_patch=$HAS_PATCH" + else + echo "run_detection=false" >> "$GITHUB_OUTPUT" + echo "Detection skipped: no agent outputs or patches to analyze" + fi + - name: Clear MCP configuration for detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + rm -f /tmp/gh-aw/mcp-config/mcp-servers.json + rm -f /home/runner/.copilot/mcp-config.json + rm -f "$GITHUB_WORKSPACE/.gemini/settings.json" + - name: Prepare threat detection files + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection/aw-prompts + cp /tmp/gh-aw/aw-prompts/prompt.txt /tmp/gh-aw/threat-detection/aw-prompts/prompt.txt 2>/dev/null || true + cp /tmp/gh-aw/agent_output.json /tmp/gh-aw/threat-detection/agent_output.json 2>/dev/null || true + for f in /tmp/gh-aw/aw-*.patch; do + [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true + done + echo "Prepared threat detection files:" + ls -la /tmp/gh-aw/threat-detection/ 2>/dev/null || true + - name: Setup threat detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_NAME: "Security Alert Burndown" + WORKFLOW_DESCRIPTION: "Orchestrator workflow for campaign 'security-alert-burndown'" + HAS_PATCH: ${{ steps.collect_output.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Execute Claude Code CLI + if: always() && steps.detection_guard.outputs.run_detection == 'true' + id: detection_agentic_execution + # Allowed tools (sorted): + # - Bash(cat) + # - Bash(grep) + # - Bash(head) + # - Bash(jq) + # - Bash(ls) + # - Bash(tail) + # - Bash(wc) + # - BashOutput + # - ExitPlanMode + # - Glob + # - Grep + # - KillBash + # - LS + # - NotebookRead + # - Read + # - Task + # - TodoWrite + timeout-minutes: 20 + run: | + set -o pipefail + # shellcheck disable=SC1003 + sudo -E awf --tty --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.23.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --allowed-tools '\''Bash(cat),Bash(grep),Bash(head),Bash(jq),Bash(ls),Bash(tail),Bash(wc),BashOutput,ExitPlanMode,Glob,Grep,KillBash,LS,NotebookRead,Read,Task,TodoWrite'\'' --debug-file /tmp/gh-aw/threat-detection/detection.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_CLAUDE:+ --model "$GH_AW_MODEL_DETECTION_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + BASH_DEFAULT_TIMEOUT_MS: 60000 + BASH_MAX_TIMEOUT_MS: 60000 + DISABLE_BUG_COMMAND: 1 + DISABLE_ERROR_REPORTING: 1 + DISABLE_TELEMETRY: 1 + GH_AW_MODEL_DETECTION_CLAUDE: ${{ vars.GH_AW_MODEL_DETECTION_CLAUDE || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GITHUB_WORKSPACE: ${{ github.workspace }} + MCP_TIMEOUT: 120000 + MCP_TOOL_TIMEOUT: 60000 + - name: Parse threat detection results + id: parse_detection_results + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + - name: Upload threat detection log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: threat-detection.log + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + - name: Set detection conclusion + id: detection_conclusion + if: always() + env: + RUN_DETECTION: ${{ steps.detection_guard.outputs.run_detection }} + DETECTION_SUCCESS: ${{ steps.parse_detection_results.outputs.success }} + run: | + if [[ "$RUN_DETECTION" != "true" ]]; then + echo "conclusion=skipped" >> "$GITHUB_OUTPUT" + echo "success=true" >> "$GITHUB_OUTPUT" + echo "Detection was not needed, marking as skipped" + elif [[ "$DETECTION_SUCCESS" == "true" ]]; then + echo "conclusion=success" >> "$GITHUB_OUTPUT" + echo "success=true" >> "$GITHUB_OUTPUT" + echo "Detection passed successfully" + else + echo "conclusion=failure" >> "$GITHUB_OUTPUT" + echo "success=false" >> "$GITHUB_OUTPUT" + echo "Detection found issues" + fi + + conclusion: + needs: + - activation + - agent + - push_repo_memory + - safe_outputs + if: (always()) && (needs.agent.result != 'skipped') + runs-on: ubuntu-slim + permissions: + contents: read + discussions: write + issues: write + pull-requests: write + outputs: + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process No-Op Messages + id: noop + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: "1" + GH_AW_WORKFLOW_NAME: "Security Alert Burndown" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/noop.cjs'); + await main(); + - name: Record Missing Tool + id: missing_tool + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Security Alert Burndown" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Handle Agent Failure + id: handle_agent_failure + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Security Alert Burndown" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "security-alert-burndown.campaign.g" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_REPO_MEMORY_VALIDATION_FAILED_campaigns: ${{ needs.push_repo_memory.outputs.validation_failed_campaigns }} + GH_AW_REPO_MEMORY_VALIDATION_ERROR_campaigns: ${{ needs.push_repo_memory.outputs.validation_error_campaigns }} + GH_AW_GROUP_REPORTS: "false" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Security Alert Burndown" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); + + push_repo_memory: + needs: agent + if: always() && needs.agent.outputs.detection_success == 'true' + runs-on: ubuntu-latest + permissions: + contents: write + outputs: + validation_error_campaigns: ${{ steps.push_repo_memory_campaigns.outputs.validation_error }} + validation_failed_campaigns: ${{ steps.push_repo_memory_campaigns.outputs.validation_failed }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + sparse-checkout: . + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Download repo-memory artifact (campaigns) + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 + continue-on-error: true + with: + name: repo-memory-campaigns + path: /tmp/gh-aw/repo-memory/campaigns + - name: Push repo-memory changes (campaigns) + id: push_repo_memory_campaigns + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ github.token }} + GITHUB_RUN_ID: ${{ github.run_id }} + GITHUB_SERVER_URL: ${{ github.server_url }} + ARTIFACT_DIR: /tmp/gh-aw/repo-memory/campaigns + MEMORY_ID: campaigns + TARGET_REPO: ${{ github.repository }} + BRANCH_NAME: memory/campaigns + MAX_FILE_SIZE: 10240 + MAX_FILE_COUNT: 100 + MAX_PATCH_SIZE: 10240 + ALLOWED_EXTENSIONS: '[]' + FILE_GLOB_FILTER: "security-alert-burndown/**" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/push_repo_memory.cjs'); + await main(); + + safe_outputs: + needs: agent + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.agent.outputs.detection_success == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + discussions: write + issues: write + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_ENGINE_ID: "claude" + GH_AW_WORKFLOW_ID: "security-alert-burndown.campaign.g" + GH_AW_WORKFLOW_NAME: "Security Alert Burndown" + outputs: + code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }} + code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }} + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + safe-output-projects: 'true' + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"max\":3},\"create_issue\":{\"max\":1},\"create_project_status_update\":{\"max\":1,\"project\":\"https://github.com/orgs/githubnext/projects/122\"},\"missing_data\":{},\"missing_tool\":{},\"update_project\":{\"max\":10,\"project\":\"https://github.com/orgs/githubnext/projects/122\"}}" + GH_AW_PROJECT_URL: "https://github.com/orgs/githubnext/projects/122" + GH_AW_PROJECT_GITHUB_TOKEN: ${{ secrets.GH_AW_PROJECT_GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_PROJECT_GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); + - name: Upload safe output items manifest + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: safe-output-items + path: /tmp/safe-output-items.jsonl + if-no-files-found: warn + diff --git a/.github/workflows/security-alert-burndown.campaign.g.md b/.github/workflows/security-alert-burndown.campaign.g.md new file mode 100644 index 0000000000..79387f1129 --- /dev/null +++ b/.github/workflows/security-alert-burndown.campaign.g.md @@ -0,0 +1,653 @@ +--- +name: "Security Alert Burndown" +description: "Orchestrator workflow for campaign 'security-alert-burndown'" +on: + roles: + - "admin" + - "maintainer" + - "write" + schedule: + - cron: "0 18 * * *" + workflow_dispatch: +concurrency: + group: "campaign-security-alert-burndown-orchestrator-${{ github.ref }}" + cancel-in-progress: false +engine: claude +safe-outputs: + add-comment: + max: 3 + create-issue: + max: 1 + create-project-status-update: + max: 1 + project: "https://github.com/orgs/githubnext/projects/122" + update-project: + max: 10 + project: "https://github.com/orgs/githubnext/projects/122" +runs-on: ubuntu-latest +tools: + bash: + - "*" + edit: null + github: + toolsets: + - default + - actions + - code_security + repo-memory: + - branch-name: memory/campaigns + file-glob: + - security-alert-burndown/** + id: campaigns +steps: +- name: Create workspace directory + run: mkdir -p ./.gh-aw +- env: + GH_AW_CAMPAIGN_ID: security-alert-burndown + GH_AW_CURSOR_PATH: /tmp/gh-aw/repo-memory/campaigns/security-alert-burndown/cursor.json + GH_AW_DISCOVERY_REPOS: githubnext/gh-aw + GH_AW_MAX_DISCOVERY_ITEMS: "50" + GH_AW_MAX_DISCOVERY_PAGES: "3" + GH_AW_PROJECT_URL: https://github.com/orgs/githubnext/projects/122 + GH_AW_TRACKER_LABEL: campaign:security-alert-burndown + GH_AW_WORKFLOWS: code-scanning-fixer,security-fix-pr,security-review + id: discovery + name: Run campaign discovery precomputation + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/campaign_discovery.cjs'); + await main(); +--- + + + + +# Campaign Orchestrator + +This workflow orchestrates the 'Security Alert Burndown' campaign. + +- Objective: Systematically burn down the code security alerts backlog, prioritizing file write vulnerabilities +- KPIs: + - High-Severity Alerts Fixed (primary): baseline 0 → target 20 over 30 days alerts + - File Write Vulnerabilities Fixed (supporting): baseline 0 → target 10 over 30 days alerts +- Associated workflows: code-scanning-fixer, security-fix-pr, security-review +- Memory paths: memory/campaigns/security-alert-burndown/** +- Metrics glob: `memory/campaigns/security-alert-burndown/metrics/*.json` +- Cursor glob: `memory/campaigns/security-alert-burndown/cursor.json` +- Project URL: https://github.com/orgs/githubnext/projects/122 +- Governance: max new items per run: 3 +- Governance: max discovery items per run: 50 +- Governance: max discovery pages per run: 3 +- Governance: opt-out labels: no-campaign, no-bot, wontfix +- Governance: max project updates per run: 10 +- Governance: max comments per run: 3 + +--- +# ORCHESTRATOR INSTRUCTIONS +--- +# Orchestrator Instructions + +This orchestrator coordinates a single campaign by discovering worker outputs, making deterministic decisions, +and synchronizing campaign state into a GitHub Project board. + +**Scope:** orchestration only (discovery, planning, pacing, reporting). +**Write authority:** all project write semantics are governed by **Project Update Instructions** and MUST be followed. + +--- + +## Traffic and Rate Limits (Required) + +- Minimize API calls; avoid full rescans when possible. +- Prefer incremental discovery with deterministic ordering (e.g., by `updatedAt`, tie-break by ID). +- Enforce strict pagination budgets; if a query requires many pages, stop early and continue next run. +- Use a durable cursor/checkpoint so the next run continues without rescanning. +- On throttling (HTTP 429 / rate-limit 403), do not retry aggressively; back off and end the run after reporting what remains. + + +**Cursor file (repo-memory)**: `memory/campaigns/security-alert-burndown/cursor.json` +**File system path**: `/tmp/gh-aw/repo-memory/campaigns/security-alert-burndown/cursor.json` +- If it exists: read first and continue from its boundary. +- If it does not exist: create it by end of run. +- Always write the updated cursor back to the same path. + + + +**Metrics snapshots (repo-memory)**: `memory/campaigns/security-alert-burndown/metrics/*.json` +**File system path**: `/tmp/gh-aw/repo-memory/campaigns/security-alert-burndown/metrics/*.json` +- Persist one append-only JSON metrics snapshot per run (new file per run; do not rewrite history). +- Use UTC date (`YYYY-MM-DD`) in the filename (example: `metrics/2025-12-22.json`). +- Each snapshot MUST include ALL required fields (even if zero): + - `campaign_id` (string): The campaign identifier + - `date` (string): UTC date in YYYY-MM-DD format + - `tasks_total` (number): Total number of tasks (>= 0, even if 0) + - `tasks_completed` (number): Completed task count (>= 0, even if 0) +- Optional fields (include only if available): `tasks_in_progress`, `tasks_blocked`, `velocity_per_day`, `estimated_completion` +- Example minimum valid snapshot: + ```json + { + "campaign_id": "security-alert-burndown", + "date": "2025-12-22", + "tasks_total": 0, + "tasks_completed": 0 + } + ``` + + + +**Read budget**: max discovery items per run: 50 + + +**Read budget**: max discovery pages per run: 3 + + +**Write budget**: max project updates per run: 10 + + +**Write budget**: max project comments per run: 3 + + +--- + +## Core Principles + +1. Workers are immutable and campaign-agnostic +2. The GitHub Project board is the authoritative campaign state +3. Correlation is explicit (tracker-id) +4. Reads and writes are separate steps (never interleave) +5. Idempotent operation is mandatory (safe to re-run) +6. Only predefined project fields may be updated +7. **Project Update Instructions take precedence for all project writes** + +--- + +## Execution Steps (Required Order) + +### Step 0 — Epic Issue Initialization [FIRST RUN ONLY] + +**Campaign Epic Issue Requirements:** +- Each project board MUST have exactly ONE Epic issue representing the campaign +- The Epic serves as the parent for all campaign work issues +- The Epic is narrative-only and tracks overall campaign progress + +**On every run, before other steps:** + +1) **Check for existing Epic issue** by searching the repository for: + - An open issue with label `epic` or `type:epic` + - Body text containing: `campaign_id: security-alert-burndown` + +2) **If no Epic issue exists**, create it using `create-issue`: + ```yaml + create-issue: + title: "Security Alert Burndown" + body: | + ## Campaign Overview + + **Objective**: Systematically burn down the code security alerts backlog, prioritizing file write vulnerabilities + + This Epic issue tracks the overall progress of the campaign. All work items are sub-issues of this Epic. + + **Campaign Details:** + - Campaign ID: `security-alert-burndown` + - Project Board: https://github.com/orgs/githubnext/projects/122 + - Worker Workflows: `code-scanning-fixer`, `security-fix-pr`, `security-review` + + --- + `campaign_id: security-alert-burndown` + labels: + - epic + - type:epic + ``` + +3) **After creating the Epic** (or if Epic exists but not on board), add it to the project board: + ```yaml + update-project: + project: "https://github.com/orgs/githubnext/projects/122" + campaign_id: "security-alert-burndown" + content_type: "issue" + content_number: + fields: + status: "In Progress" + campaign_id: "security-alert-burndown" + worker_workflow: "unknown" + repository: "" + priority: "High" + size: "Large" + start_date: "" + end_date: "" + ``` + +4) **Record the Epic issue number** in repo-memory for reference (e.g., in cursor file or metadata). + +**Note:** This step typically runs only on the first orchestrator execution. On subsequent runs, verify the Epic exists and is on the board, but do not recreate it. + +--- + +### Step 1 — Read State (Discovery) [NO WRITES] + +**IMPORTANT**: Discovery has been precomputed. Read the discovery manifest instead of performing GitHub-wide searches. + +1) Read the precomputed discovery manifest: `./.gh-aw/campaign.discovery.json` + - This manifest contains all discovered worker outputs with normalized metadata + - Schema version: v1 + - Fields: campaign_id, generated_at, discovery (total_items, cursor info), summary (counts), items (array of normalized items) + +2) Read current GitHub Project board state (items + required fields). + +3) Parse discovered items from the manifest: + - Each item has: url, content_type (issue/pull_request/discussion), number, repo, created_at, updated_at, state + - Closed items have: closed_at (for issues) or merged_at (for PRs) + - Items are pre-sorted by updated_at for deterministic processing + +4) Check the manifest summary for work counts: + - `needs_add_count`: Number of items that need to be added to the project + - `needs_update_count`: Number of items that need status updates + - If both are 0, you may skip to reporting step + +5) Discovery cursor is maintained automatically in repo-memory; do not modify it manually. + +### Step 2 — Make Decisions (Planning) [NO WRITES] + +5) Determine desired `status` strictly from explicit GitHub state: +- Open → `Todo` (or `In Progress` only if explicitly indicated elsewhere) +- Closed (issue/discussion) → `Done` +- Merged (PR) → `Done` + +**Why use explicit GitHub state?** - GitHub is the source of truth for work status. Inferring status from other signals (labels, comments) would be unreliable and could cause incorrect tracking. + +6) Calculate required date fields for each item (per Project Update Instructions): +- `start_date`: format `created_at` as `YYYY-MM-DD` +- `end_date`: + - if closed/merged → format `closed_at`/`merged_at` as `YYYY-MM-DD` + - if open → **today's date** formatted `YYYY-MM-DD` (required for roadmap view) + +**Why use today for open items?** - GitHub Projects requires end_date for roadmap views. Using today's date shows the item is actively tracked and updates automatically each run until completion. + +7) Do NOT implement idempotency by comparing against the board. You may compare for reporting only. + +**Why no comparison for idempotency?** - The safe-output system handles deduplication. Comparing would add complexity and potential race conditions. Trust the infrastructure. + +8) Apply write budget: +- If `MaxProjectUpdatesPerRun > 0`, select at most that many items this run using deterministic order + (e.g., oldest `updated_at` first; tie-break by ID/number). +- Defer remaining items to next run via cursor. + +**Why use deterministic order?** - Ensures predictable behavior and prevents starvation. Oldest items are processed first, ensuring fair treatment of all work items. The cursor saves progress for next run. + +### Step 3 — Write State (Execution) [WRITES ONLY] + +9) For each selected item, send an `update-project` request. +- Do NOT interleave reads. +- Do NOT pre-check whether the item is on the board. +- **All write semantics MUST follow Project Update Instructions**, including: + - first add → full required fields (status, campaign_id, worker_workflow, repo, priority, size, start_date, end_date) + - existing item → status-only update unless explicit backfill is required + +10) Record per-item outcome: success/failure + error details. + +### Step 4 — Report & Status Update + +11) **REQUIRED: Create a project status update summarizing this run** + +Every campaign run MUST create a status update using `create-project-status-update` safe output. This is the primary communication mechanism for conveying campaign progress to stakeholders. + +**Required Sections:** + +- **Most Important Findings**: Highlight the 2-3 most critical discoveries, insights, or blockers from this run +- **What Was Learned**: Document key learnings, patterns observed, or insights gained during this run +- **KPI Trends**: Report progress on EACH campaign KPI (High-Severity Alerts Fixed, File Write Vulnerabilities Fixed) with baseline → current → target format, including direction and velocity +- **Campaign Summary**: Tasks completed, in progress, blocked, and overall completion percentage +- **Next Steps**: Clear action items and priorities for the next run + +**Configuration:** +- Set appropriate status: ON_TRACK, AT_RISK, OFF_TRACK, or COMPLETE +- Use today's date for start_date and target_date (or appropriate future date for target) +- Body must be comprehensive yet concise (target: 200-400 words) + + +**Campaign KPIs to Report:** + +- **High-Severity Alerts Fixed** (primary): baseline 0 alerts → target 20 alerts over 30 days + +- **File Write Vulnerabilities Fixed** (supporting): baseline 0 alerts → target 10 alerts over 30 days + + + +Example status update: +```yaml +create-project-status-update: + project: "https://github.com/orgs/githubnext/projects/122" + status: "ON_TRACK" + start_date: "2026-01-06" + target_date: "2026-01-31" + body: | + ## Campaign Run Summary + + **Discovered:** 25 items (15 issues, 10 PRs) + **Processed:** 10 items added to project, 5 updated + **Completion:** 60% (30/50 total tasks) + + ## Most Important Findings + + 1. **Critical accessibility gaps identified**: 3 high-severity accessibility issues discovered in mobile navigation, requiring immediate attention + 2. **Documentation coverage acceleration**: Achieved 5% improvement in one week (best velocity so far) + 3. **Worker efficiency improving**: daily-doc-updater now processing 40% more items per run + + ## What Was Learned + + - Multi-device testing reveals issues that desktop-only testing misses - should be prioritized + - Documentation updates tied to code changes have higher accuracy and completeness + - Users report fewer issues when examples include error handling patterns + + ## KPI Trends + + **Documentation Coverage** (Primary KPI): + - Baseline: 85% → Current: 88% → Target: 95% + - Direction: ↑ Increasing (+3% this week, +1% velocity/week) + - Status: ON TRACK - At current velocity, will reach 95% in 7 weeks + + **Accessibility Score** (Supporting KPI): + - Baseline: 90% → Current: 91% → Target: 98% + - Direction: ↑ Increasing (+1% this month) + - Status: AT RISK - Slower progress than expected, may need dedicated focus + + **User-Reported Issues** (Supporting KPI): + - Baseline: 15/month → Current: 12/month → Target: 5/month + - Direction: ↓ Decreasing (-3 this month, -20% velocity) + - Status: ON TRACK - Trending toward target + + ## Next Steps + + 1. Address 3 critical accessibility issues identified this run (high priority) + 2. Continue processing remaining 15 discovered items + 3. Focus on accessibility improvements to accelerate supporting KPI + 4. Maintain current documentation coverage velocity +``` + +12) Report: +- counts discovered (by type) +- counts processed this run (by action: add/status_update/backfill/noop/failed) +- counts deferred due to budgets +- failures (with reasons) +- completion state (work items only) +- cursor advanced / remaining backlog estimate + +--- + +## Authority + +If any instruction in this file conflicts with **Project Update Instructions**, the Project Update Instructions win for all project writes. +--- +# PROJECT UPDATE INSTRUCTIONS (AUTHORITATIVE FOR WRITES) +--- +# Project Update Instructions (Authoritative Write Contract) + +## Project Board Integration + +This file defines the ONLY allowed rules for writing to the GitHub Project board. +If any other instructions conflict with this file, THIS FILE TAKES PRECEDENCE for all project writes. + +--- + +## 0) Hard Requirements (Do Not Deviate) + +- Writes MUST use only the `update-project` safe-output. +- All writes MUST target exactly: + - **Project URL**: `https://github.com/orgs/githubnext/projects/122` +- Every item MUST include: + - `campaign_id: "security-alert-burndown"` + +## Campaign ID + +All campaign tracking MUST key off `campaign_id: "security-alert-burndown"`. + +--- + +## 1) Required Project Fields (Must Already Exist) + +| Field | Type | Allowed / Notes | +|---|---|---| +| `status` | single-select | `Todo` / `In Progress` / `Review required` / `Blocked` / `Done` | +| `campaign_id` | text | Must equal `security-alert-burndown` | +| `worker_workflow` | text | workflow ID or `"unknown"` | +| `repository` | text | `owner/repo` | +| `priority` | single-select | `High` / `Medium` / `Low` | +| `size` | single-select | `Small` / `Medium` / `Large` | +| `start_date` | date | `YYYY-MM-DD` | +| `end_date` | date | `YYYY-MM-DD` | + +Field names are case-sensitive. + +--- + +## 2) Content Identification (Mandatory) + +Use **content number** (integer), never the URL as an identifier. + +- Issue URL: `.../issues/123` → `content_type: "issue"`, `content_number: 123` +- PR URL: `.../pull/456` → `content_type: "pull_request"`, `content_number: 456` + +--- + +## 3) Deterministic Field Rules (No Inference) + +These rules apply to any time you write fields: + +- `campaign_id`: always `security-alert-burndown` +- `worker_workflow`: workflow ID if known, else `"unknown"` +- `repository`: extract `owner/repo` from the issue/PR URL +- `priority`: default `Medium` unless explicitly known +- `size`: default `Medium` unless explicitly known +- `start_date`: issue/PR `created_at` formatted `YYYY-MM-DD` +- `end_date`: + - if closed/merged → `closed_at` / `merged_at` formatted `YYYY-MM-DD` + - if open → **today’s date** formatted `YYYY-MM-DD` (**required for roadmap view; do not leave blank**) + +For open items, `end_date` is a UI-required placeholder and does NOT represent actual completion. + +--- + +## 4) Read-Write Separation (Prevents Read/Write Mixing) + +1. **READ STEP (no writes)** — validate existence and gather metadata +2. **WRITE STEP (writes only)** — execute `update-project` + +Never interleave reads and writes. + +--- + +## 5) Adding an Issue or PR (First Write) + +### Adding New Issues + +When first adding an item to the project, you MUST write ALL required fields. + +```yaml +update-project: + project: "https://github.com/orgs/githubnext/projects/122" + campaign_id: "security-alert-burndown" + content_type: "issue" # or "pull_request" + content_number: 123 + fields: + status: "Todo" # "Done" if already closed/merged + campaign_id: "security-alert-burndown" + worker_workflow: "unknown" + repository: "owner/repo" + priority: "Medium" + size: "Medium" + start_date: "2025-12-15" + end_date: "2026-01-03" +``` + +--- + +## 6) Updating an Existing Item (Minimal Writes) + +### Updating Existing Items + +Preferred behavior is minimal, idempotent writes: + +- If item exists and `status` is unchanged → **No-op** +- If item exists and `status` differs → **Update `status` only** +- If any required field is missing/empty/invalid → **One-time full backfill** (repair only) + +### Status-only Update (Default) + +```yaml +update-project: + project: "https://github.com/orgs/githubnext/projects/122" + campaign_id: "security-alert-burndown" + content_type: "issue" # or "pull_request" + content_number: 123 + fields: + status: "Done" +``` + +### Full Backfill (Repair Only) + +```yaml +update-project: + project: "https://github.com/orgs/githubnext/projects/122" + campaign_id: "security-alert-burndown" + content_type: "issue" # or "pull_request" + content_number: 123 + fields: + status: "Done" + campaign_id: "security-alert-burndown" + worker_workflow: "WORKFLOW_ID" + repository: "owner/repo" + priority: "Medium" + size: "Medium" + start_date: "2025-12-15" + end_date: "2026-01-02" +``` + +--- + +## 7) Idempotency Rules + +- Matching status already set → **No-op** +- Different status → **Status-only update** +- Invalid/deleted/inaccessible URL → **Record failure and continue** + +## Write Operation Rules + +All writes MUST conform to this file and use `update-project` only. + +--- + +## 8) Logging + Failure Handling (Mandatory) + +For every attempted item, record: + +- `content_type`, `content_number`, `repository` +- action taken: `noop | add | status_update | backfill | failed` +- error details if failed + +Failures must not stop processing remaining items. + +--- + +## 9) Worker Workflow Policy + +- Workers are campaign-agnostic. +- Orchestrator populates `worker_workflow`. +- If `worker_workflow` cannot be determined, it MUST remain `"unknown"` unless explicitly reclassified by the orchestrator. + +--- + +## 10) Parent / Sub-Issue Rules (Campaign Hierarchy) + +- Each project board MUST have exactly **one Epic issue** representing the campaign. +- The Epic issue MUST: + - Be added to the project board + - Use the same `campaign_id` + - Use `worker_workflow: "unknown"` + +- All campaign work issues (non-epic) MUST be created as **sub-issues of the Epic**. +- Issues MUST NOT be re-parented based on worker assignment. + +- Pull requests cannot be sub-issues: + - PRs MUST reference their related issue via standard GitHub linking (e.g. “Closes #123”). + +- Worker grouping MUST be done via the `worker_workflow` project field, not via parent issues. + +- The Epic issue is narrative only. +- The project board is the sole authoritative source of campaign state. + +--- + +## Appendix — Machine Check Checklist (Optional) + +This checklist is designed to validate outputs before executing project writes. + +### A) Output Structure Checks + +- [ ] All writes use `update-project:` blocks (no other write mechanism). +- [ ] Each `update-project` block includes: + - [ ] `project: "https://github.com/orgs/githubnext/projects/122"` + - [ ] `campaign_id: "security-alert-burndown"` (top-level) + - [ ] `content_type` ∈ {`issue`, `pull_request`} + - [ ] `content_number` is an integer + - [ ] `fields` object is present + +### B) Field Validity Checks + +- [ ] `fields.status` ∈ {`Todo`, `In Progress`, `Review required`, `Blocked`, `Done`} +- [ ] `fields.campaign_id` is present on first-add/backfill and equals `security-alert-burndown` +- [ ] `fields.worker_workflow` is present on first-add/backfill and is either a known workflow ID or `"unknown"` +- [ ] `fields.repository` matches `owner/repo` +- [ ] `fields.priority` ∈ {`High`, `Medium`, `Low`} +- [ ] `fields.size` ∈ {`Small`, `Medium`, `Large`} +- [ ] `fields.start_date` matches `YYYY-MM-DD` +- [ ] `fields.end_date` matches `YYYY-MM-DD` + +### C) Update Semantics Checks + +- [ ] For existing items, payload is **status-only** unless explicitly doing a backfill repair. +- [ ] Backfill is used only when required fields are missing/empty/invalid. +- [ ] No payload overwrites `priority`/`size`/`worker_workflow` with defaults during a normal status update. + +### D) Read-Write Separation Checks + +- [ ] All reads occur before any writes (no read/write interleaving). +- [ ] Writes are batched separately from discovery. + +### E) Epic/Hierarchy Checks (Policy-Level) + +- [ ] Exactly one Epic exists for the campaign board. +- [ ] Epic is on the board and uses `worker_workflow: "unknown"`. +- [ ] All campaign work issues are sub-issues of the Epic (if supported by environment/tooling). +- [ ] PRs are linked to issues via GitHub linking (e.g. “Closes #123”). + +### F) Failure Handling Checks + +- [ ] Invalid/deleted/inaccessible items are logged as failures and processing continues. +- [ ] Idempotency is delegated to the `update-project` tool; no pre-filtering by board presence. +--- +# CLOSING INSTRUCTIONS (HIGHEST PRIORITY) +--- +# Closing Instructions (Highest Priority) + +Execute all four steps in strict order: + +1. Read State (no writes) +2. Make Decisions (no writes) +3. Write State (update-project only) +4. Report + +The following rules are mandatory and override inferred behavior: + +- The GitHub Project board is the single source of truth. +- All project writes MUST comply with `project_update_instructions.md`. +- State reads and state writes MUST NOT be interleaved. +- Do NOT infer missing data or invent values. +- Do NOT reorganize hierarchy. +- Do NOT overwrite fields except as explicitly allowed. +- Workers are immutable and campaign-agnostic. + +If any instruction conflicts, the Project Update Instructions take precedence for all writes. diff --git a/.github/workflows/security-compliance.lock.yml b/.github/workflows/security-compliance.lock.yml index 73d08277f9..21168b629b 100644 --- a/.github/workflows/security-compliance.lock.yml +++ b/.github/workflows/security-compliance.lock.yml @@ -250,7 +250,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -666,10 +666,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -697,7 +698,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -708,7 +710,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -793,7 +795,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -815,13 +817,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -866,7 +868,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -875,7 +877,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -977,7 +979,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1031,7 +1033,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1144,7 +1146,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1205,7 +1207,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1232,7 +1234,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index b0eeb10151..4e8adcd533 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -253,7 +253,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -303,7 +303,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -338,7 +338,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -774,10 +774,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -817,7 +818,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -828,7 +830,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -913,7 +915,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -936,13 +938,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -985,7 +987,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -993,7 +995,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1095,7 +1097,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1149,7 +1151,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1318,7 +1320,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1345,7 +1347,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1372,7 +1374,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1388,7 +1390,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/semantic-function-refactor.lock.yml b/.github/workflows/semantic-function-refactor.lock.yml index 39df34d436..cb45fefce5 100644 --- a/.github/workflows/semantic-function-refactor.lock.yml +++ b/.github/workflows/semantic-function-refactor.lock.yml @@ -218,7 +218,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -674,10 +674,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -719,7 +720,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -730,7 +732,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -883,7 +885,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -905,7 +907,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -948,7 +950,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1060,7 +1062,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1113,7 +1115,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1218,7 +1220,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1245,7 +1247,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/sergo.lock.yml b/.github/workflows/sergo.lock.yml index 3b7e0ca07c..e0222befaa 100644 --- a/.github/workflows/sergo.lock.yml +++ b/.github/workflows/sergo.lock.yml @@ -225,7 +225,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -280,7 +280,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -628,10 +628,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -673,7 +674,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -684,7 +686,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -839,7 +841,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -861,7 +863,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -902,7 +904,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -910,7 +912,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1022,7 +1024,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1077,7 +1079,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1190,7 +1192,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1217,7 +1219,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1244,7 +1246,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1260,7 +1262,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/shared/ci-data-analysis.md b/.github/workflows/shared/ci-data-analysis.md index deb63dd670..88b0833ace 100644 --- a/.github/workflows/shared/ci-data-analysis.md +++ b/.github/workflows/shared/ci-data-analysis.md @@ -46,14 +46,14 @@ steps: done - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: "24" cache: npm cache-dependency-path: actions/setup/js/package-lock.json - name: Setup Go - uses: actions/setup-go@v6 + uses: actions/setup-go@v6.3.0 with: go-version-file: go.mod cache: true diff --git a/.github/workflows/shared/mcp-debug.md b/.github/workflows/shared/mcp-debug.md index d8d460eb1c..176a4dad50 100644 --- a/.github/workflows/shared/mcp-debug.md +++ b/.github/workflows/shared/mcp-debug.md @@ -21,7 +21,7 @@ safe-outputs: pull-requests: write steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v6.0.2 - name: Post diagnostic report to pull request uses: actions/github-script@v8 with: @@ -131,7 +131,7 @@ safe-outputs: } steps: - name: Setup Go - uses: actions/setup-go@v6 + uses: actions/setup-go@v6.3.0 with: go-version-file: go.mod cache: true diff --git a/.github/workflows/shared/mcp/drain3.md b/.github/workflows/shared/mcp/drain3.md index 90d64ccecc..566654d081 100644 --- a/.github/workflows/shared/mcp/drain3.md +++ b/.github/workflows/shared/mcp/drain3.md @@ -16,7 +16,7 @@ mcp-servers: - search_pattern steps: - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6.2.0 with: python-version: '3.11' - name: Install Drain3 dependencies diff --git a/.github/workflows/shared/ollama-threat-scan.md b/.github/workflows/shared/ollama-threat-scan.md index f1a9edac18..6563cc6537 100644 --- a/.github/workflows/shared/ollama-threat-scan.md +++ b/.github/workflows/shared/ollama-threat-scan.md @@ -329,7 +329,7 @@ safe-outputs: - name: Upload scan results if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: ollama-scan-results path: | diff --git a/.github/workflows/shared/python-dataviz.md b/.github/workflows/shared/python-dataviz.md index d2f0bf458e..0ddae4a5dc 100644 --- a/.github/workflows/shared/python-dataviz.md +++ b/.github/workflows/shared/python-dataviz.md @@ -58,7 +58,7 @@ steps: - name: Upload charts if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: data-charts path: /tmp/gh-aw/python/charts/*.png @@ -67,7 +67,7 @@ steps: - name: Upload source files and data if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: python-source-and-data path: | diff --git a/.github/workflows/shared/trending-charts-simple.md b/.github/workflows/shared/trending-charts-simple.md index f49ec304aa..a88dcc7fed 100644 --- a/.github/workflows/shared/trending-charts-simple.md +++ b/.github/workflows/shared/trending-charts-simple.md @@ -21,7 +21,7 @@ steps: - name: Upload charts if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: trending-charts path: /tmp/gh-aw/python/charts/*.png @@ -30,7 +30,7 @@ steps: - name: Upload source files and data if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: trending-source-and-data path: | diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml index b83cbea817..c9dcda2c1e 100644 --- a/.github/workflows/slide-deck-maintainer.lock.yml +++ b/.github/workflows/slide-deck-maintainer.lock.yml @@ -23,7 +23,7 @@ # # Maintains the gh-aw slide deck by scanning repository content and detecting layout issues using Playwright # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"216ae8200889e1fdf9a0c0c5917c0653e9131f9a193a51c9a67f223ac00bc418"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"0b3d7f1cb6dbc12d69cb6f2f524b6c7eaec295bbc300df932437af7677e97e6c"} name: "Slide Deck Maintainer" "on": @@ -236,7 +236,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -287,7 +287,7 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 with: cache: npm cache-dependency-path: docs/package-lock.json @@ -301,7 +301,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -665,10 +665,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -703,7 +704,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -714,7 +716,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -839,7 +841,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -861,13 +863,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -910,7 +912,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -918,7 +920,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1021,7 +1023,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1076,7 +1078,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1250,7 +1252,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1261,7 +1263,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1306,7 +1308,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1333,7 +1335,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1349,7 +1351,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/slide-deck-maintainer.md b/.github/workflows/slide-deck-maintainer.md index d33fa24a3f..bb8c3e6921 100644 --- a/.github/workflows/slide-deck-maintainer.md +++ b/.github/workflows/slide-deck-maintainer.md @@ -50,7 +50,7 @@ network: - node steps: - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: "24" cache: npm diff --git a/.github/workflows/smoke-agent.lock.yml b/.github/workflows/smoke-agent.lock.yml index 20825a9240..a88f045158 100644 --- a/.github/workflows/smoke-agent.lock.yml +++ b/.github/workflows/smoke-agent.lock.yml @@ -242,7 +242,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -676,10 +676,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -730,7 +731,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -741,7 +743,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -803,7 +805,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -825,13 +827,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -876,7 +878,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -966,7 +968,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1021,7 +1023,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1188,7 +1190,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1232,7 +1234,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml index 0b7eac5d7b..eadc339f79 100644 --- a/.github/workflows/smoke-claude.lock.yml +++ b/.github/workflows/smoke-claude.lock.yml @@ -608,7 +608,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -657,7 +657,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -687,7 +687,7 @@ jobs: build-args: | BINARY=dist/gh-aw-linux-amd64 - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.25' - name: Capture GOROOT for AWF chroot mode @@ -698,7 +698,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -2068,10 +2068,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -2155,7 +2156,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -2166,7 +2168,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -2331,7 +2333,7 @@ jobs: SECRET_TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -2353,7 +2355,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -2403,7 +2405,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -2411,7 +2413,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -2525,7 +2527,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -2581,7 +2583,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -2764,7 +2766,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -2775,7 +2777,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -2820,7 +2822,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -2847,7 +2849,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -2863,7 +2865,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index 99603bc2dc..6ddd935de2 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -270,7 +270,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -318,7 +318,7 @@ jobs: persist-credentials: false fetch-depth: 2 - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.25' - name: Capture GOROOT for AWF chroot mode @@ -329,7 +329,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1033,10 +1033,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat > /tmp/gh-aw/mcp-config/config.toml << GH_AW_MCP_CONFIG_EOF [history] @@ -1170,7 +1171,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1181,7 +1183,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1245,7 +1247,7 @@ jobs: SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1267,13 +1269,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1325,7 +1327,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1333,7 +1335,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1424,7 +1426,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1480,7 +1482,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1658,7 +1660,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1685,7 +1687,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1712,7 +1714,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1728,7 +1730,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-copilot-arm.lock.yml b/.github/workflows/smoke-copilot-arm.lock.yml index 8e2e27f1b1..c67c019e01 100644 --- a/.github/workflows/smoke-copilot-arm.lock.yml +++ b/.github/workflows/smoke-copilot-arm.lock.yml @@ -273,7 +273,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -322,7 +322,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -352,7 +352,7 @@ jobs: build-args: | BINARY=dist/gh-aw-linux-amd64 - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.25' - name: Capture GOROOT for AWF chroot mode @@ -363,7 +363,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1588,10 +1588,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -1653,7 +1654,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1664,7 +1666,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1752,7 +1754,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1774,13 +1776,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1832,7 +1834,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1840,7 +1842,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1943,7 +1945,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -2001,7 +2003,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -2182,7 +2184,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -2209,7 +2211,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -2225,7 +2227,7 @@ jobs: steps: - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /opt/gh-aw/safe-jobs/ @@ -2270,7 +2272,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -2286,7 +2288,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml index 0737142a11..f657357409 100644 --- a/.github/workflows/smoke-copilot.lock.yml +++ b/.github/workflows/smoke-copilot.lock.yml @@ -269,7 +269,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -319,7 +319,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -349,7 +349,7 @@ jobs: build-args: | BINARY=dist/gh-aw-linux-amd64 - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.25' - name: Capture GOROOT for AWF chroot mode @@ -360,7 +360,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1585,10 +1585,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -1650,7 +1651,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1661,7 +1663,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1749,7 +1751,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1771,13 +1773,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1829,7 +1831,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1837,7 +1839,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1941,7 +1943,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1999,7 +2001,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -2179,7 +2181,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -2206,7 +2208,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -2222,7 +2224,7 @@ jobs: steps: - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /opt/gh-aw/safe-jobs/ @@ -2267,7 +2269,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -2283,7 +2285,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-gemini.lock.yml b/.github/workflows/smoke-gemini.lock.yml index b30ec4fee0..a017cdc86d 100644 --- a/.github/workflows/smoke-gemini.lock.yml +++ b/.github/workflows/smoke-gemini.lock.yml @@ -265,7 +265,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -317,7 +317,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -871,10 +871,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="gemini" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -910,7 +911,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -921,7 +923,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -991,7 +993,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1013,13 +1015,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1059,7 +1061,7 @@ jobs: const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); await main(); - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1067,7 +1069,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1166,7 +1168,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1222,7 +1224,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1400,7 +1402,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1427,7 +1429,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1454,7 +1456,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1470,7 +1472,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-multi-pr.lock.yml b/.github/workflows/smoke-multi-pr.lock.yml index 6b05b2e264..7602ee9c86 100644 --- a/.github/workflows/smoke-multi-pr.lock.yml +++ b/.github/workflows/smoke-multi-pr.lock.yml @@ -249,7 +249,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -693,10 +693,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -724,7 +725,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -735,7 +737,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -845,7 +847,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -867,13 +869,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -918,7 +920,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1021,7 +1023,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1076,7 +1078,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1272,7 +1274,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1283,7 +1285,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1328,7 +1330,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/smoke-project.lock.yml b/.github/workflows/smoke-project.lock.yml index c0f7aba7ad..ea6f8ba663 100644 --- a/.github/workflows/smoke-project.lock.yml +++ b/.github/workflows/smoke-project.lock.yml @@ -246,7 +246,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -1111,10 +1111,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -1142,7 +1143,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -1153,7 +1155,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1238,7 +1240,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1260,13 +1262,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1311,7 +1313,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1414,7 +1416,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1469,7 +1471,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1666,7 +1668,7 @@ jobs: safe-output-projects: 'true' - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1677,7 +1679,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1724,7 +1726,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/smoke-temporary-id.lock.yml b/.github/workflows/smoke-temporary-id.lock.yml index 81e28b6267..18132b6bdd 100644 --- a/.github/workflows/smoke-temporary-id.lock.yml +++ b/.github/workflows/smoke-temporary-id.lock.yml @@ -243,7 +243,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -735,10 +735,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -766,7 +767,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -777,7 +779,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -862,7 +864,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -884,13 +886,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -935,7 +937,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1037,7 +1039,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1092,7 +1094,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1270,7 +1272,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1297,7 +1299,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/smoke-test-tools.lock.yml b/.github/workflows/smoke-test-tools.lock.yml index 0bee867c53..4209ba2ddb 100644 --- a/.github/workflows/smoke-test-tools.lock.yml +++ b/.github/workflows/smoke-test-tools.lock.yml @@ -242,7 +242,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -289,17 +289,17 @@ jobs: with: persist-credentials: false - name: Setup .NET - uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4.3.1 + uses: actions/setup-dotnet@baa11fbfe1d6520db94683bd5c7a3818018e4309 # v5.1.0 with: dotnet-version: '8.0' - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.24' - name: Capture GOROOT for AWF chroot mode run: echo "GOROOT=$(go env GOROOT)" >> "$GITHUB_ENV" - name: Setup Java - uses: actions/setup-java@c1e323688fd81a25caa38c78aa6df2d33d3e20d9 # v4.8.0 + uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5.2.0 with: java-version: '21' distribution: temurin @@ -309,7 +309,7 @@ jobs: node-version: '20' package-manager-cache: false - name: Setup Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: '3.11' - name: Create gh-aw temp directory @@ -638,10 +638,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -669,7 +670,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -680,7 +682,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -765,7 +767,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -787,13 +789,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -838,7 +840,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -940,7 +942,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -995,7 +997,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1157,7 +1159,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1184,7 +1186,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/smoke-workflow-call.lock.yml b/.github/workflows/smoke-workflow-call.lock.yml index dab6524d72..f461f4f4e1 100644 --- a/.github/workflows/smoke-workflow-call.lock.yml +++ b/.github/workflows/smoke-workflow-call.lock.yml @@ -207,7 +207,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -578,10 +578,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -609,7 +610,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -620,7 +622,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -725,7 +727,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -747,13 +749,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -798,7 +800,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -900,7 +902,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -955,7 +957,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1095,7 +1097,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1122,7 +1124,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml index c1beff2b8f..b593e71fc2 100644 --- a/.github/workflows/stale-repo-identifier.lock.yml +++ b/.github/workflows/stale-repo-identifier.lock.yml @@ -29,7 +29,7 @@ # - shared/python-dataviz.md # - shared/trending-charts-simple.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"8a297bc60cde682ca7a29c71f7f1c5598fedf143ccf36dbc0cd331050b8cce01"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"4b7860dbcfa8b3096adab9e2250bdba5a76645fb92bf7045b6469af99c59e6e5"} name: "Stale Repository Identifier" "on": @@ -243,7 +243,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -300,7 +300,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -308,7 +308,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -324,7 +324,7 @@ jobs: pip install --user --quiet numpy pandas matplotlib seaborn scipy - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-charts @@ -332,7 +332,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: trending-source-and-data @@ -347,7 +347,7 @@ jobs: ORGANIZATION: ${{ env.ORGANIZATION }} id: stale-repos name: Run stale-repos tool - uses: github/stale-repos@a21e55567b83cf3c3f3f9085d3038dc6cee02598 # v3.0.2 + uses: github/stale-repos@6084a41431c4ce8842a7e879b1a15082b88742ae # v8.0.4 - env: INACTIVE_REPOS: ${{ steps.stale-repos.outputs.inactiveRepos }} name: Save stale repos output @@ -357,7 +357,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -751,10 +751,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -782,7 +783,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -793,7 +795,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -881,7 +883,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -903,13 +905,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -952,7 +954,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -960,7 +962,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -969,7 +971,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1071,7 +1073,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1126,7 +1128,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1233,7 +1235,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1260,7 +1262,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1287,7 +1289,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1303,7 +1305,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: trending-data-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1348,7 +1350,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1359,7 +1361,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/stale-repo-identifier.md b/.github/workflows/stale-repo-identifier.md index 52868606df..c08ce5b711 100644 --- a/.github/workflows/stale-repo-identifier.md +++ b/.github/workflows/stale-repo-identifier.md @@ -66,7 +66,7 @@ env: steps: - name: Run stale-repos tool id: stale-repos - uses: github/stale-repos@v3.0.2 + uses: github/stale-repos@v8.0.4 env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} ORGANIZATION: ${{ env.ORGANIZATION }} diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml index 491d082719..8b09878e63 100644 --- a/.github/workflows/static-analysis-report.lock.yml +++ b/.github/workflows/static-analysis-report.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -270,7 +270,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -312,7 +312,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -685,10 +685,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -725,7 +726,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -736,7 +738,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -875,7 +877,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -897,7 +899,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -938,7 +940,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -946,7 +948,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1058,7 +1060,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1113,7 +1115,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1221,7 +1223,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1248,7 +1250,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1275,7 +1277,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1291,7 +1293,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/step-name-alignment.lock.yml b/.github/workflows/step-name-alignment.lock.yml index 8c007d3e16..3196c6c40e 100644 --- a/.github/workflows/step-name-alignment.lock.yml +++ b/.github/workflows/step-name-alignment.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -266,7 +266,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -636,10 +636,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -665,7 +666,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -676,7 +678,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -830,7 +832,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -852,7 +854,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -893,7 +895,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -901,7 +903,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1013,7 +1015,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1067,7 +1069,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1172,7 +1174,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1199,7 +1201,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1226,7 +1228,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1242,7 +1244,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/sub-issue-closer.lock.yml b/.github/workflows/sub-issue-closer.lock.yml index 3cfa865978..3e8615626d 100644 --- a/.github/workflows/sub-issue-closer.lock.yml +++ b/.github/workflows/sub-issue-closer.lock.yml @@ -206,7 +206,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -697,10 +697,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -728,7 +729,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -739,7 +741,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -824,7 +826,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -846,13 +848,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -897,7 +899,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -999,7 +1001,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1054,7 +1056,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1161,7 +1163,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1188,7 +1190,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml index 16f63c47e5..1026ce4d6f 100644 --- a/.github/workflows/super-linter.lock.yml +++ b/.github/workflows/super-linter.lock.yml @@ -27,7 +27,7 @@ # Imports: # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"9478bb2a1b6bc6a3985225850de425cd0cffc1974ed94a4ef9f84f438d8cb6b4"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"47646a07e95e9cbf938dd46af0201fec96e3d530cd4cf8feaf53992cc764cb34"} name: "Super Linter Report" "on": @@ -225,7 +225,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -279,7 +279,7 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Download super-linter log - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: super-linter-log path: /tmp/gh-aw/ @@ -288,7 +288,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -653,10 +653,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -684,7 +685,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -695,7 +697,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -780,7 +782,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -802,13 +804,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -851,7 +853,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -859,7 +861,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -961,7 +963,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1015,7 +1017,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1120,7 +1122,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1147,7 +1149,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1163,7 +1165,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: fetch-depth: 0 persist-credentials: false @@ -1193,7 +1195,7 @@ jobs: fi - name: Upload super-linter log if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: super-linter-log path: super-linter.log @@ -1220,7 +1222,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1236,7 +1238,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/super-linter.md b/.github/workflows/super-linter.md index d0a99ac358..3e80030f08 100644 --- a/.github/workflows/super-linter.md +++ b/.github/workflows/super-linter.md @@ -29,7 +29,7 @@ jobs: statuses: write steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v6.0.2 with: # super-linter needs the full git history to get the # list of files that changed across commits @@ -66,14 +66,14 @@ jobs: - name: Upload super-linter log if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7.0.0 with: name: super-linter-log path: super-linter.log retention-days: 7 steps: - name: Download super-linter log - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v8.0.0 with: name: super-linter-log path: /tmp/gh-aw/ diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml index a3f22cdf9b..1bb6865a3a 100644 --- a/.github/workflows/technical-doc-writer.lock.yml +++ b/.github/workflows/technical-doc-writer.lock.yml @@ -28,7 +28,7 @@ # - ../agents/technical-doc-writer.agent.md # - ../skills/documentation/SKILL.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"41fe7e00a5eab6c9e52e791d6b9e373a6d9f236a58f2f770997eab47ea4b374e"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"0fed3bd9dc465cabf5a8de79bbefc40ae0d13eb11acaba1c2a62d541a5924d57"} name: "Rebuild the documentation after making changes" "on": @@ -234,7 +234,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -314,7 +314,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -746,10 +746,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -777,7 +778,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -788,7 +790,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -876,7 +878,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -898,13 +900,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -947,7 +949,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -955,7 +957,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -964,7 +966,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1067,7 +1069,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1124,7 +1126,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1251,7 +1253,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1262,7 +1264,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1307,7 +1309,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1334,7 +1336,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1350,7 +1352,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1395,7 +1397,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1406,7 +1408,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/technical-doc-writer.md b/.github/workflows/technical-doc-writer.md index bd9fb8fb9d..acbb580c07 100644 --- a/.github/workflows/technical-doc-writer.md +++ b/.github/workflows/technical-doc-writer.md @@ -45,7 +45,7 @@ safe-outputs: steps: - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: '24' cache: 'npm' diff --git a/.github/workflows/terminal-stylist.lock.yml b/.github/workflows/terminal-stylist.lock.yml index 567f9f68b0..37f9ee188d 100644 --- a/.github/workflows/terminal-stylist.lock.yml +++ b/.github/workflows/terminal-stylist.lock.yml @@ -214,7 +214,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -599,10 +599,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -638,7 +639,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -649,7 +651,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -734,7 +736,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -756,13 +758,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -807,7 +809,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -909,7 +911,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -963,7 +965,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1071,7 +1073,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1098,7 +1100,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/test-create-pr-error-handling.lock.yml b/.github/workflows/test-create-pr-error-handling.lock.yml index fae9e84e51..09d6c4366e 100644 --- a/.github/workflows/test-create-pr-error-handling.lock.yml +++ b/.github/workflows/test-create-pr-error-handling.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -264,7 +264,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -633,10 +633,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -662,7 +663,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -673,7 +675,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -811,7 +813,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -833,7 +835,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -874,7 +876,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -882,7 +884,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -995,7 +997,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1050,7 +1052,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1174,7 +1176,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1185,7 +1187,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1230,7 +1232,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1257,7 +1259,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1273,7 +1275,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/test-dispatcher.lock.yml b/.github/workflows/test-dispatcher.lock.yml index 8125b15c55..51d3fac2c2 100644 --- a/.github/workflows/test-dispatcher.lock.yml +++ b/.github/workflows/test-dispatcher.lock.yml @@ -201,7 +201,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -551,10 +551,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -582,7 +583,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -593,7 +595,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -678,7 +680,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -700,13 +702,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -751,7 +753,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -853,7 +855,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -905,7 +907,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1009,7 +1011,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1036,7 +1038,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/test-project-url-default.lock.yml b/.github/workflows/test-project-url-default.lock.yml index 0737ab98d3..6c3994d96f 100644 --- a/.github/workflows/test-project-url-default.lock.yml +++ b/.github/workflows/test-project-url-default.lock.yml @@ -201,7 +201,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -794,10 +794,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -825,7 +826,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -836,7 +838,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -921,7 +923,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -943,13 +945,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -994,7 +996,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1096,7 +1098,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1148,7 +1150,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1253,7 +1255,7 @@ jobs: safe-output-projects: 'true' - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1282,7 +1284,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/test-workflow.lock.yml b/.github/workflows/test-workflow.lock.yml index 6e50ac47c0..b51c60fd43 100644 --- a/.github/workflows/test-workflow.lock.yml +++ b/.github/workflows/test-workflow.lock.yml @@ -201,7 +201,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -336,10 +336,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -360,7 +361,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -371,7 +373,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -454,7 +456,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -499,7 +501,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml index c83494b8c9..27f9d2ecdc 100644 --- a/.github/workflows/tidy.lock.yml +++ b/.github/workflows/tidy.lock.yml @@ -23,7 +23,7 @@ # # Automatically formats and tidies code files (Go, JS, TypeScript) when code changes are pushed or on command # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"2808846f8bc82fccb0f29bd33f47aedcfd87f781bd89eb80af5ce458dfc407fe"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"c1ab537bf460e015b81437b1d7b086f0abfa3872ab893889dc7ca6173e4e0d73"} name: "Tidy" "on": @@ -252,7 +252,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -301,13 +301,13 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 with: cache: npm cache-dependency-path: actions/setup/js/package-lock.json node-version: "24" - name: Setup Go - uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: cache: true go-version-file: go.mod @@ -720,10 +720,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -751,7 +752,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -762,7 +764,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -872,7 +874,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -895,13 +897,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -946,7 +948,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1049,7 +1051,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1103,7 +1105,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1290,7 +1292,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1301,7 +1303,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1346,7 +1348,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/tidy.md b/.github/workflows/tidy.md index 5e5e16ca62..1d14c9c0f1 100644 --- a/.github/workflows/tidy.md +++ b/.github/workflows/tidy.md @@ -48,13 +48,13 @@ safe-outputs: missing-tool: steps: - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: "24" cache: npm cache-dependency-path: actions/setup/js/package-lock.json - name: Setup Go - uses: actions/setup-go@v6 + uses: actions/setup-go@v6.3.0 with: go-version-file: go.mod cache: true diff --git a/.github/workflows/typist.lock.yml b/.github/workflows/typist.lock.yml index c0dbc10b5f..b677005fb2 100644 --- a/.github/workflows/typist.lock.yml +++ b/.github/workflows/typist.lock.yml @@ -217,7 +217,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -609,10 +609,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -654,7 +655,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -665,7 +667,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -818,7 +820,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -840,7 +842,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -883,7 +885,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -995,7 +997,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1049,7 +1051,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1157,7 +1159,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1184,7 +1186,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/ubuntu-image-analyzer.lock.yml b/.github/workflows/ubuntu-image-analyzer.lock.yml index 3225790361..a14a5d2e32 100644 --- a/.github/workflows/ubuntu-image-analyzer.lock.yml +++ b/.github/workflows/ubuntu-image-analyzer.lock.yml @@ -214,7 +214,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -623,10 +623,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -654,7 +655,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -665,7 +667,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -776,7 +778,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -798,13 +800,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -849,7 +851,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -952,7 +954,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1006,7 +1008,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1180,7 +1182,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1191,7 +1193,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1236,7 +1238,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml index ce28f1569e..e6a08aaa82 100644 --- a/.github/workflows/unbloat-docs.lock.yml +++ b/.github/workflows/unbloat-docs.lock.yml @@ -28,7 +28,7 @@ # - shared/docs-server-lifecycle.md # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"bb2aae4e487311cc80f8cd9db35b06a07b8271fdd8a7ff61694c5e1876a09be3"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"e12f2743b5993b77b7036fde9cf5e74e20de84de58f72454fcfb45df90afae4d"} name: "Documentation Unbloat" "on": @@ -260,7 +260,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -305,7 +305,7 @@ jobs: - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - name: Setup Node.js @@ -328,7 +328,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -765,10 +765,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="claude" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh { @@ -813,7 +814,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -824,7 +826,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -1024,7 +1026,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -1047,7 +1049,7 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} @@ -1088,7 +1090,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -1096,7 +1098,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1105,7 +1107,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1218,7 +1220,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1275,7 +1277,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1463,7 +1465,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1474,7 +1476,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1519,7 +1521,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1546,7 +1548,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1562,7 +1564,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1607,7 +1609,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1618,7 +1620,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/unbloat-docs.md b/.github/workflows/unbloat-docs.md index f89474d2d9..ac975b290a 100644 --- a/.github/workflows/unbloat-docs.md +++ b/.github/workflows/unbloat-docs.md @@ -94,12 +94,12 @@ timeout-minutes: 30 # Build steps for documentation steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v6.0.2 with: persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@v6.2.0 with: node-version: '24' cache: 'npm' diff --git a/.github/workflows/video-analyzer.lock.yml b/.github/workflows/video-analyzer.lock.yml index 98104540db..660adf566c 100644 --- a/.github/workflows/video-analyzer.lock.yml +++ b/.github/workflows/video-analyzer.lock.yml @@ -220,7 +220,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -635,10 +635,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -666,7 +667,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -677,7 +679,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -779,7 +781,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -801,13 +803,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -852,7 +854,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -954,7 +956,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1007,7 +1009,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1112,7 +1114,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1139,7 +1141,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/weekly-editors-health-check.lock.yml b/.github/workflows/weekly-editors-health-check.lock.yml index 29287cfaff..f2f8a5d1e5 100644 --- a/.github/workflows/weekly-editors-health-check.lock.yml +++ b/.github/workflows/weekly-editors-health-check.lock.yml @@ -213,7 +213,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -649,10 +649,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -687,7 +688,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -698,7 +700,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -812,7 +814,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -834,13 +836,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -885,7 +887,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -894,7 +896,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -997,7 +999,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1052,7 +1054,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1182,7 +1184,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1193,7 +1195,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1238,7 +1240,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1284,7 +1286,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1295,7 +1297,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml index f240777e32..693b45b5dd 100644 --- a/.github/workflows/weekly-issue-summary.lock.yml +++ b/.github/workflows/weekly-issue-summary.lock.yml @@ -77,6 +77,14 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/validate_context_variables.cjs'); await main(); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false - name: Check workflow file timestamps uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 env: @@ -221,7 +229,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -275,7 +283,7 @@ jobs: run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n" - if: always() name: Upload charts - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: data-charts @@ -283,7 +291,7 @@ jobs: retention-days: 30 - if: always() name: Upload source files and data - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: if-no-files-found: warn name: python-source-and-data @@ -296,7 +304,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -654,10 +662,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -685,7 +694,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -696,7 +706,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -784,7 +794,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -806,13 +816,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -855,7 +865,7 @@ jobs: echo 'AWF binary not installed, skipping firewall log summary' fi - name: Upload cache-memory data as artifact - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 if: always() with: name: cache-memory @@ -863,7 +873,7 @@ jobs: # Upload safe-outputs assets for upload_assets job - name: Upload Safe Outputs assets if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -872,7 +882,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -974,7 +984,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1030,7 +1040,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1142,7 +1152,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1169,7 +1179,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl @@ -1196,7 +1206,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) id: download_cache_default - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: cache-memory @@ -1212,7 +1222,7 @@ jobs: fi - name: Save cache-memory to cache (default) if: steps.check_cache_default.outputs.has_content == 'true' - uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 with: key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -1257,7 +1267,7 @@ jobs: echo "Git configured with standard GitHub Actions identity" - name: Download assets continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: safe-outputs-assets path: /tmp/gh-aw/safeoutputs/assets/ @@ -1268,7 +1278,7 @@ jobs: find /tmp/gh-aw/safeoutputs/assets/ -maxdepth 1 -ls - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/weekly-safe-outputs-spec-review.lock.yml b/.github/workflows/weekly-safe-outputs-spec-review.lock.yml index e3630cd464..51cda2830d 100644 --- a/.github/workflows/weekly-safe-outputs-spec-review.lock.yml +++ b/.github/workflows/weekly-safe-outputs-spec-review.lock.yml @@ -208,7 +208,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -615,10 +615,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -646,7 +647,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -657,7 +659,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -742,7 +744,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -764,13 +766,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -815,7 +817,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -918,7 +920,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -972,7 +974,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1102,7 +1104,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1113,7 +1115,7 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ @@ -1158,7 +1160,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/workflow-generator.lock.yml b/.github/workflows/workflow-generator.lock.yml index f8d17df0dd..cb4d24d848 100644 --- a/.github/workflows/workflow-generator.lock.yml +++ b/.github/workflows/workflow-generator.lock.yml @@ -235,7 +235,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -743,10 +743,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -774,7 +775,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -785,7 +787,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -870,7 +872,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -892,13 +894,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -943,7 +945,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1045,7 +1047,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1099,7 +1101,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1276,7 +1278,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1319,7 +1321,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/workflow-health-manager.lock.yml b/.github/workflows/workflow-health-manager.lock.yml index 88ea75b9a9..31deed04ed 100644 --- a/.github/workflows/workflow-health-manager.lock.yml +++ b/.github/workflows/workflow-health-manager.lock.yml @@ -227,7 +227,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -804,10 +804,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -835,7 +836,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -846,7 +848,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -931,7 +933,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -953,13 +955,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -1004,7 +1006,7 @@ jobs: # Upload repo memory as artifacts for push job - name: Upload repo-memory artifact (default) if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: repo-memory-default path: /tmp/gh-aw/repo-memory/default @@ -1013,7 +1015,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -1115,7 +1117,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1171,7 +1173,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1315,7 +1317,7 @@ jobs: git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" echo "Git configured with standard GitHub Actions identity" - name: Download repo-memory artifact (default) - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 continue-on-error: true with: name: repo-memory-default @@ -1378,7 +1380,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1405,7 +1407,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/workflow-normalizer.lock.yml b/.github/workflows/workflow-normalizer.lock.yml index 9b5e7b50a2..9147009ba9 100644 --- a/.github/workflows/workflow-normalizer.lock.yml +++ b/.github/workflows/workflow-normalizer.lock.yml @@ -213,7 +213,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -263,7 +263,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -678,10 +678,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -721,7 +722,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -732,7 +734,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -817,7 +819,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -839,13 +841,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -890,7 +892,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -992,7 +994,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1045,7 +1047,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1155,7 +1157,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1182,7 +1184,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/.github/workflows/workflow-skill-extractor.lock.yml b/.github/workflows/workflow-skill-extractor.lock.yml index 984d2ef487..e6ac91f47a 100644 --- a/.github/workflows/workflow-skill-extractor.lock.yml +++ b/.github/workflows/workflow-skill-extractor.lock.yml @@ -212,7 +212,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -674,10 +674,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -705,7 +706,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -716,7 +718,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -821,7 +823,7 @@ jobs: SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload Safe Outputs if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output path: ${{ env.GH_AW_SAFE_OUTPUTS }} @@ -843,13 +845,13 @@ jobs: await main(); - name: Upload sanitized agent output if: always() && env.GH_AW_AGENT_OUTPUT - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-output path: ${{ env.GH_AW_AGENT_OUTPUT }} if-no-files-found: warn - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -894,7 +896,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | @@ -996,7 +998,7 @@ jobs: await main(); - name: Upload threat detection log if: always() && steps.detection_guard.outputs.run_detection == 'true' - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: threat-detection.log path: /tmp/gh-aw/threat-detection/detection.log @@ -1050,7 +1052,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1158,7 +1160,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1185,7 +1187,7 @@ jobs: await main(); - name: Upload safe output items manifest if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: safe-output-items path: /tmp/safe-output-items.jsonl diff --git a/DEADCODE.md b/DEADCODE.md new file mode 100644 index 0000000000..c7280e9726 --- /dev/null +++ b/DEADCODE.md @@ -0,0 +1,61 @@ +# Dead Code Removal Guide + +## How to find dead code + +```bash +deadcode ./cmd/... ./internal/tools/... 2>/dev/null +``` + +**Critical:** Always include `./internal/tools/...` — it covers separate binaries called by the Makefile (e.g. `make actions-build`). Running `./cmd/...` alone gives false positives. + +## Verification after every batch + +```bash +go build ./... +go vet ./... +go vet -tags=integration ./... # catches integration test files invisible without the tag +make fmt +``` + +## Known pitfalls + +**WASM binary** — `cmd/gh-aw-wasm/main.go` has `//go:build js && wasm` so deadcode cannot analyse it. Before deleting anything from `pkg/workflow/`, check that file. Currently uses: +- `compiler.ParseWorkflowString` +- `compiler.CompileToYAML` + +**Test helpers** — `pkg/workflow/compiler_test_helpers.go` shows 3 dead functions but is used by 15 test files. Don't delete it. + +**Constant/embed rescue** — Some otherwise-dead files contain live constants or `//go:embed` directives. Extract them before deleting the file. + +--- + +## Current dead code (276 functions, as of 2026-02-28) + +Run the command above to regenerate. Top files by dead function count: + +| File | Dead | Notes | +|------|------|-------| +| `pkg/workflow/js.go` | 17 | Get*/bundle stubs; many have no callers anywhere | +| `pkg/workflow/compiler_types.go` | 17 | `With*` option funcs + getters; check WASM first | +| `pkg/workflow/artifact_manager.go` | 14 | Many test callers; do last | +| `pkg/constants/constants.go` | 13 | All `String()`/`IsValid()` on semantic type aliases | +| `pkg/workflow/domains.go` | 10 | Check callers | +| `pkg/workflow/expression_builder.go` | 9 | Check callers | +| `pkg/workflow/validation_helpers.go` | 6 | Check callers | +| `pkg/cli/docker_images.go` | 6 | Check callers | +| `pkg/workflow/permissions_factory.go` | 5 | Check callers | +| `pkg/workflow/map_helpers.go` | 5 | Check callers | +| `pkg/workflow/engine_helpers.go` | 5 | Check callers | +| `pkg/console/console.go` | 5 | Check callers | +| `pkg/workflow/safe_outputs_env.go` | 4 | Check callers | +| `pkg/workflow/expression_nodes.go` | 4 | Check callers | + +~80 additional files have 1–3 dead functions each. + +## Suggested approach + +1. Pick a file with 5+ dead functions. +2. For each dead function, check callers: `grep -rn "FuncName" --include="*.go"`. If only test callers, also remove the tests. +3. Remove the function and any now-unused imports. +4. Run the verification commands above. +5. Commit per logical group, keep PRs small and reviewable. diff --git a/Makefile b/Makefile index 2fd3669d7e..e43c3205f1 100644 --- a/Makefile +++ b/Makefile @@ -550,15 +550,15 @@ fmt-go: .PHONY: fmt-cjs fmt-cjs: @echo "→ Formatting JavaScript files..." - @cd actions/setup/js && npm run format:cjs - @npx prettier --write 'scripts/**/*.js' --ignore-path .prettierignore + @cd actions/setup/js && npm run format:cjs --silent >/dev/null 2>&1 + @npx prettier --write 'scripts/**/*.js' --ignore-path .prettierignore --log-level=error 2>&1 @echo "✓ JavaScript files formatted" # Format JSON files in pkg directory (excluding actions/setup/js, which is handled by npm script) .PHONY: fmt-json fmt-json: @echo "→ Formatting JSON files..." - @cd actions/setup/js && npm run format:pkg-json + @cd actions/setup/js && npm run format:pkg-json --silent >/dev/null 2>&1 @echo "✓ JSON files formatted" # Check formatting diff --git a/docs/src/content/docs/agent-factory-status.mdx b/docs/src/content/docs/agent-factory-status.mdx index d68c06d718..c920c6b73b 100644 --- a/docs/src/content/docs/agent-factory-status.mdx +++ b/docs/src/content/docs/agent-factory-status.mdx @@ -130,6 +130,7 @@ These are experimental agentic workflows used by the GitHub Next team to learn, | [Safe Output Health Monitor](https://github.com/github/gh-aw/blob/main/.github/workflows/safe-output-health.md) | claude | [![Safe Output Health Monitor](https://github.com/github/gh-aw/actions/workflows/safe-output-health.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/safe-output-health.lock.yml) | - | - | | [Schema Consistency Checker](https://github.com/github/gh-aw/blob/main/.github/workflows/schema-consistency-checker.md) | claude | [![Schema Consistency Checker](https://github.com/github/gh-aw/actions/workflows/schema-consistency-checker.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/schema-consistency-checker.lock.yml) | - | - | | [Scout](https://github.com/github/gh-aw/blob/main/.github/workflows/scout.md) | claude | [![Scout](https://github.com/github/gh-aw/actions/workflows/scout.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/scout.lock.yml) | - | `/scout` | +| [Security Alert Burndown](https://github.com/github/gh-aw/blob/main/.github/workflows/security-alert-burndown.campaign.g.md) | claude | [![Security Alert Burndown](https://github.com/github/gh-aw/actions/workflows/security-alert-burndown.campaign.g.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/security-alert-burndown.campaign.g.lock.yml) | `0 18 * * *` | - | | [Security Compliance Campaign](https://github.com/github/gh-aw/blob/main/.github/workflows/security-compliance.md) | copilot | [![Security Compliance Campaign](https://github.com/github/gh-aw/actions/workflows/security-compliance.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/security-compliance.lock.yml) | - | - | | [Security Review Agent 🔒](https://github.com/github/gh-aw/blob/main/.github/workflows/security-review.md) | copilot | [![Security Review Agent 🔒](https://github.com/github/gh-aw/actions/workflows/security-review.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/security-review.lock.yml) | - | `/security` | | [Semantic Function Refactoring](https://github.com/github/gh-aw/blob/main/.github/workflows/semantic-function-refactor.md) | claude | [![Semantic Function Refactoring](https://github.com/github/gh-aw/actions/workflows/semantic-function-refactor.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/semantic-function-refactor.lock.yml) | - | - | @@ -144,6 +145,7 @@ These are experimental agentic workflows used by the GitHub Next team to learn, | [Smoke Multi PR](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-multi-pr.md) | copilot | [![Smoke Multi PR](https://github.com/github/gh-aw/actions/workflows/smoke-multi-pr.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/smoke-multi-pr.lock.yml) | - | - | | [Smoke Project](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-project.md) | copilot | [![Smoke Project](https://github.com/github/gh-aw/actions/workflows/smoke-project.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/smoke-project.lock.yml) | - | - | | [Smoke Temporary ID](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-temporary-id.md) | copilot | [![Smoke Temporary ID](https://github.com/github/gh-aw/actions/workflows/smoke-temporary-id.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/smoke-temporary-id.lock.yml) | - | - | +| [Smoke Workflow Call](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-workflow-call.md) | copilot | [![Smoke Workflow Call](https://github.com/github/gh-aw/actions/workflows/smoke-workflow-call.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/smoke-workflow-call.lock.yml) | - | - | | [Stale Repository Identifier](https://github.com/github/gh-aw/blob/main/.github/workflows/stale-repo-identifier.md) | copilot | [![Stale Repository Identifier](https://github.com/github/gh-aw/actions/workflows/stale-repo-identifier.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/stale-repo-identifier.lock.yml) | - | - | | [Static Analysis Report](https://github.com/github/gh-aw/blob/main/.github/workflows/static-analysis-report.md) | claude | [![Static Analysis Report](https://github.com/github/gh-aw/actions/workflows/static-analysis-report.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/static-analysis-report.lock.yml) | - | - | | [Step Name Alignment](https://github.com/github/gh-aw/blob/main/.github/workflows/step-name-alignment.md) | claude | [![Step Name Alignment](https://github.com/github/gh-aw/actions/workflows/step-name-alignment.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/step-name-alignment.lock.yml) | `daily` | - | diff --git a/docs/src/content/docs/guides/deterministic-agentic-patterns.md b/docs/src/content/docs/guides/deterministic-agentic-patterns.md index 3423279e32..b601defd04 100644 --- a/docs/src/content/docs/guides/deterministic-agentic-patterns.md +++ b/docs/src/content/docs/guides/deterministic-agentic-patterns.md @@ -135,11 +135,7 @@ safe-outputs: inputs: summary: {required: true, type: string} steps: - - run: | - echo "## 🤖 AI Code Review\n\n${{ inputs.summary }}" > /tmp/report.md - gh pr comment ${{ github.event.pull_request.number }} --body-file /tmp/report.md - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - ... --- # Code Review Agent diff --git a/docs/src/content/docs/guides/network-configuration.md b/docs/src/content/docs/guides/network-configuration.md index e473ae1395..7dc4526915 100644 --- a/docs/src/content/docs/guides/network-configuration.md +++ b/docs/src/content/docs/guides/network-configuration.md @@ -36,7 +36,7 @@ network: | `rust` | crates.io | Rust crates | | `github` | githubusercontent.com | GitHub resources | | `terraform` | HashiCorp registry | Terraform modules | -| `playwright` | Browser downloads | Web testing | +| `playwright` | Browser downloads | Web testing ([reference](/gh-aw/reference/playwright/)) | | `linux-distros` | Debian, Ubuntu, Alpine | Linux packages | ## Common Configuration Patterns @@ -192,5 +192,6 @@ View complete ecosystem domain lists in the [ecosystem domains source](https://g ## Related Documentation - [Network Permissions Reference](/gh-aw/reference/network/) - Complete network configuration reference +- [Playwright Reference](/gh-aw/reference/playwright/) - Browser automation and network requirements - [Security Guide](/gh-aw/introduction/architecture/) - Security best practices - [Troubleshooting](/gh-aw/troubleshooting/common-issues/) - Common issues and solutions diff --git a/docs/src/content/docs/patterns/multi-repo-ops.md b/docs/src/content/docs/patterns/multi-repo-ops.md index 0ebf5a5d51..ecba4d9212 100644 --- a/docs/src/content/docs/patterns/multi-repo-ops.md +++ b/docs/src/content/docs/patterns/multi-repo-ops.md @@ -141,8 +141,12 @@ Enable GitHub toolsets to allow agents to query multiple repositories: tools: github: toolsets: [repos, issues, pull_requests, actions] + github-token: ${{ secrets.CROSS_REPO_PAT }} # Required for cross-repo reading ``` +> [!IMPORTANT] +> When reading from repositories other than the workflow's repository, you must configure additional authentication. The default `GITHUB_TOKEN` only has access to the current repository. Use a PAT, GitHub App token, or the magic secret `GH_AW_GITHUB_MCP_SERVER_TOKEN`. See [GitHub Tools Reference](/gh-aw/reference/github-tools/#cross-repository-reading) for details. + **Available Operations:** - **repos**: Read files, search code, list commits, get releases - **issues**: List and search issues across repositories diff --git a/docs/src/content/docs/reference/cross-repository.md b/docs/src/content/docs/reference/cross-repository.md index caec1e7cee..23f69eef53 100644 --- a/docs/src/content/docs/reference/cross-repository.md +++ b/docs/src/content/docs/reference/cross-repository.md @@ -9,20 +9,19 @@ Cross-repository operations enable workflows to access code from multiple reposi ## Overview -Cross-repository features fall into two categories: +Cross-repository features fall into three categories: 1. **Code access** - Check out code from multiple repositories into the workflow workspace using the `checkout:` frontmatter field -2. **Resource creation** - Create issues, PRs, comments, and other resources in external repositories using `target-repo` and `allowed-repos` in safe outputs +2. **GitHub tools** - Read information from other repositories using GitHub Tools with additional authentication +3. **Safe outputs** - Create issues, PRs, comments, and other resources in external repositories using `target-repo` and `allowed-repos` in safe outputs -Both require authentication beyond the default `GITHUB_TOKEN`, which is scoped to the current repository only. +All require authentication beyond the default `GITHUB_TOKEN`, which is scoped to the current repository only. -## Repository Checkout (`checkout:`) +## Cross-repository Checkout (`checkout:`) The `checkout:` frontmatter field controls how `actions/checkout` is invoked in the agent job. Configure custom checkout settings or check out multiple repositories. -### Single Repository Configuration - -Override default checkout settings for the main repository: +If only a the current repository, you can use `checkout:` to override default checkout settings (e.g., fetch depth, sparse checkout) without needing to define a custom job: ```yaml wrap checkout: @@ -30,9 +29,7 @@ checkout: github-token: ${{ secrets.MY_TOKEN }} # Custom authentication ``` -### Multiple Repository Checkout - -Check out additional repositories alongside the main repository: +You can also use `checkout:` to check out additional repositories alongside the main repository: ```yaml wrap checkout: @@ -58,8 +55,17 @@ checkout: | `lfs` | boolean | Download Git LFS objects. | | `current` | boolean | Marks this checkout as the primary working repository. The agent uses this as the default target for all GitHub operations. Only one checkout may set `current: true`; the compiler rejects workflows where multiple checkouts enable it. | -> [!TIP] -> Credentials are always removed after checkout (`persist-credentials: false` is enforced) to prevent credential exfiltration by agents. +### Checkout Merging + +Multiple `checkout:` configurations can target the same path and repository. This is useful for monorepos where different parts of the repository must be merged into the same workspace directory with different settings (e.g., sparse checkout for some paths, full checkout for others). + +When multiple `checkout:` entries target the same repository and path, their configurations are merged with the following rules: + +- **Fetch depth**: Deepest value wins (`0` = full history always takes precedence) +- **Sparse patterns**: Merged (union of all patterns) +- **LFS**: OR-ed (if any config enables `lfs`, the merged configuration enables it) +- **Submodules**: First non-empty value wins for each `(repository, path)`; once set, later values are ignored +- **Ref/Token**: First-seen wins ### Marking a Primary Repository (`current: true`) @@ -67,55 +73,28 @@ When a workflow running from a central repository targets a different repository ```yaml wrap checkout: - - path: . # central/control repo - repository: org/target-repo path: ./target github-token: ${{ secrets.CROSS_REPO_PAT }} current: true # agent's primary target ``` -### Multiple Checkout Merging - -When multiple configurations target the same path and repository: - -- **Fetch depth**: Deepest value wins (`0` = full history always takes precedence) -- **Sparse patterns**: Merged (union of all patterns) -- **LFS**: OR-ed (if any config enables `lfs`, the merged configuration enables it) -- **Submodules**: First non-empty value wins for each `(repository, path)`; once set, later values are ignored -- **Ref/Token**: First-seen wins +## GitHub Tools - Reading Other Repositories -### Example: Monorepo Development +When using [GitHub Tools](/gh-aw/reference/github-tools/) to read information from repositories other than the one where the workflow is running, you must configure additional authorization. The default `GITHUB_TOKEN` is scoped to the current repository only and cannot access other repositories. -```aw wrap ---- -on: - pull_request: - types: [opened, synchronize] +Configure the additional authentication in your GitHub Tools configuration. For example, using a PAT: -checkout: - - path: . - fetch-depth: 0 - - repository: org/shared-libs - path: ./libs/shared - ref: main - github-token: ${{ secrets.LIBS_PAT }} - - repository: org/config-repo - path: ./config - sparse-checkout: | - defaults/ - overrides/ - -permissions: - contents: read - pull-requests: read ---- - -# Cross-Repo PR Analysis +```yaml wrap +tools: + github: + toolsets: [repos, issues, pull_requests] + github-token: ${{ secrets.CROSS_REPO_PAT }} +``` -Analyze this PR considering shared library compatibility and configuration standards. +See [GitHub Tools Reference](/gh-aw/reference/github-tools/#cross-repository-reading) for complete details on configuring cross-repository read access for GitHub Tools. -Check compatibility with shared libraries in `./libs/shared` and verify configuration against standards in `./config`. -``` +This authentication is for **reading** information from GitHub. Authorization for **writing** to other repositories (creating issues, PRs, comments) is configured separately, see below. ## Cross-Repository Safe Outputs @@ -154,8 +133,47 @@ When `allowed-repos` is specified: - Target repository (from `target-repo` or current repo) is always implicitly allowed - Creates a union of allowed destinations +## Examples + +### Example: Monorepo Development + +This uses multiple `checkout:` entries to check out different parts of the same repository with different settings: + +```aw wrap +--- +on: + pull_request: + types: [opened, synchronize] + +checkout: + - path: . + fetch-depth: 0 + - repository: org/shared-libs + path: ./libs/shared + ref: main + github-token: ${{ secrets.LIBS_PAT }} + - repository: org/config-repo + path: ./config + sparse-checkout: | + defaults/ + overrides/ + +permissions: + contents: read + pull-requests: read +--- + +# Cross-Repo PR Analysis + +Analyze this PR considering shared library compatibility and configuration standards. + +Check compatibility with shared libraries in `./libs/shared` and verify configuration against standards in `./config`. +``` + ### Example: Hub-and-Spoke Tracking +This creates issues in a central tracking repository when issues are opened in component repositories: + ```aw wrap --- on: @@ -185,38 +203,39 @@ Analyze the issue and create a tracking issue that: - Tags relevant teams for coordination ``` -## Authentication - -Cross-repository operations require authentication with access to target repositories. +### Example: Cross-Repository Analysis -### Personal Access Token (PAT) +This checks out multiple repositories and compares code patterns across them: -Create a fine-grained PAT with access to target repositories: +```aw wrap +--- +on: + issue_comment: + types: [created] -```yaml wrap -safe-outputs: - github-token: ${{ secrets.CROSS_REPO_PAT }} - create-issue: - target-repo: "org/target-repo" -``` +tools: + github: + toolsets: [repos, issues, pull_requests] + github-token: ${{ secrets.CROSS_REPO_PAT }} -**Required permissions** (on target repositories only): +permissions: + contents: read + issues: read -| Operation | Permissions | -|-----------|-------------| -| Create/update issues | `issues: write` | -| Create PRs | `contents: write`, `pull-requests: write` | -| Add comments | `issues: write` or `pull-requests: write` | -| Checkout code | `contents: read` | +safe-outputs: + github-token: ${{ secrets.CROSS_REPO_WRITE_PAT }} + add-comment: + max: 1 +--- -> [!TIP] -> **Security Best Practice**: Scope PATs to have read access on source repositories and write access only on target repositories. Use separate tokens for different operations when possible. +# Multi-Repository Code Search -### GitHub App Installation Token +Search for similar patterns across org/repo-a, org/repo-b, and org/repo-c. -For enhanced security, use GitHub Apps. See [Authentication Reference](/gh-aw/reference/auth/#using-a-github-app-for-authentication) for complete configuration examples. +Analyze how each repository implements authentication and provide a comparison. +``` -## Deterministic Multi-Repo Workflows +### Example: Deterministic Multi-Repo Workflows For direct repository access without agent involvement, use custom steps with `actions/checkout`: @@ -253,6 +272,7 @@ This approach provides full control over checkout timing and configuration. - [MultiRepoOps Pattern](/gh-aw/patterns/multi-repo-ops/) - Cross-repository workflow pattern - [CentralRepoOps Pattern](/gh-aw/patterns/central-repo-ops/) - Central control plane pattern +- [GitHub Tools Reference](/gh-aw/reference/github-tools/) - Complete GitHub Tools configuration - [Safe Outputs Reference](/gh-aw/reference/safe-outputs/) - Complete safe output configuration - [Authentication Reference](/gh-aw/reference/auth/) - PAT and GitHub App setup - [Multi-Repository Examples](/gh-aw/examples/multi-repo/) - Complete working examples diff --git a/docs/src/content/docs/reference/frontmatter-hash-specification.md b/docs/src/content/docs/reference/frontmatter-hash-specification.md index 04b7ae5183..262c0a6dc8 100644 --- a/docs/src/content/docs/reference/frontmatter-hash-specification.md +++ b/docs/src/content/docs/reference/frontmatter-hash-specification.md @@ -36,7 +36,7 @@ Include the following frontmatter fields in the hash computation: - `tracker-id` - Workflow tracker identifier **Tool and Integration:** -- `tools` - Tool configurations (Playwright, etc.) +- `tools` - Tool configurations (GitHub, Playwright, etc.) - `mcp-servers` - MCP server configurations - `network` - Network access permissions - `safe-outputs` - Safe output configurations diff --git a/docs/src/content/docs/reference/frontmatter.md b/docs/src/content/docs/reference/frontmatter.md index 0a8108a6d3..d61d0ee348 100644 --- a/docs/src/content/docs/reference/frontmatter.md +++ b/docs/src/content/docs/reference/frontmatter.md @@ -588,7 +588,7 @@ checkout: ref: main ``` -See [Cross-Repository Operations](/gh-aw/reference/cross-repository/#repository-checkout-checkout) for complete documentation on checkout configuration options, merging behavior, and cross-repo examples. +See [Cross-Repository Operations](/gh-aw/reference/cross-repository/) for complete documentation on checkout configuration options, merging behavior, and cross-repo examples. ## Custom Steps (`steps:`) diff --git a/docs/src/content/docs/reference/github-tools.md b/docs/src/content/docs/reference/github-tools.md index ecdbe774ea..f95ac5ac50 100644 --- a/docs/src/content/docs/reference/github-tools.md +++ b/docs/src/content/docs/reference/github-tools.md @@ -127,6 +127,30 @@ Alternatively, you can set the magic secret `GH_AW_GITHUB_MCP_SERVER_TOKEN` to a gh aw secrets set GH_AW_GITHUB_MCP_SERVER_TOKEN --value "" ``` +## Cross-Repository Reading + +When GitHub Tools need to read information from repositories other than the one where the workflow is running, additional authorization is required. The default `GITHUB_TOKEN` only has access to the current repository. + +Configure cross-repository read access using the same authentication methods described above: + +```yaml wrap +tools: + github: + toolsets: [repos, issues, pull_requests] + github-token: ${{ secrets.CROSS_REPO_PAT }} +``` + +This enables operations like: +- Reading files and searching code in external repositories +- Querying issues and pull requests from other repos +- Accessing commits, releases, and workflow runs across repositories +- Reading organization-level information + +> [!NOTE] +> This authorization is for **reading** from GitHub. For **writing** to other repositories (creating issues, PRs, comments), configure authentication separately through [Safe Outputs](/gh-aw/reference/safe-outputs/) with cross-repository operations. + +For complete cross-repository workflow patterns and examples, see [Cross-Repository Operations](/gh-aw/reference/cross-repository/). + ## Related Documentation - [Tools Reference](/gh-aw/reference/tools/) - All tool configurations diff --git a/docs/src/content/docs/reference/glossary.md b/docs/src/content/docs/reference/glossary.md index 4ed28fb556..779627891b 100644 --- a/docs/src/content/docs/reference/glossary.md +++ b/docs/src/content/docs/reference/glossary.md @@ -61,11 +61,11 @@ A transparent proxy service that enables unified HTTP access to multiple MCP ser ### MCP Server -A service that implements the Model Context Protocol to provide specific capabilities to AI agents. Examples include the GitHub MCP server (for GitHub API operations), Playwright MCP server (for browser automation), or custom MCP servers for specialized tools. +A service that implements the Model Context Protocol to provide specific capabilities to AI agents. Examples include the GitHub MCP server (for GitHub API operations), Playwright MCP server (for browser automation), or custom MCP servers for specialized tools. See [Playwright Reference](/gh-aw/reference/playwright/) for browser automation configuration. ### Tools -Capabilities that an AI agent can use during workflow execution. Tools are configured in the frontmatter and include GitHub operations (`github:`), file editing (`edit:`), web access (`web-fetch:`, `web-search:`), shell commands (`bash:`), browser automation (`playwright:`), and custom MCP servers. +Capabilities that an AI agent can use during workflow execution. Tools are configured in the frontmatter and include GitHub operations ([`github:`](/gh-aw/reference/github-tools/)), file editing (`edit:`), web access (`web-fetch:`, `web-search:`), shell commands (`bash:`), browser automation ([`playwright:`](/gh-aw/reference/playwright/)), and custom MCP servers. ## Security and Outputs diff --git a/docs/src/content/docs/reference/mcp-gateway.md b/docs/src/content/docs/reference/mcp-gateway.md index bb6aebf3a4..78da6acef7 100644 --- a/docs/src/content/docs/reference/mcp-gateway.md +++ b/docs/src/content/docs/reference/mcp-gateway.md @@ -246,6 +246,8 @@ The `gateway` section is required and configures gateway-specific behavior: | `startupTimeout` | integer | No | Server startup timeout in seconds (default: 30) | | `toolTimeout` | integer | No | Tool invocation timeout in seconds (default: 60) | | `payloadDir` | string | No | Directory path for storing large payload JSON files for authenticated clients | +| `payloadPathPrefix` | string | No | Path prefix to remap payload paths for agent containers (e.g., /workspace/payloads) | +| `payloadSizeThreshold` | integer | No | Size threshold in bytes for storing payloads to disk (default: 524288 = 512KB) | #### 4.1.3.1 Payload Directory Path Validation @@ -301,6 +303,70 @@ Empty or malformed: **Compliance Test**: T-CFG-005 - Payload Directory Path Validation +#### 4.1.3.2 Payload Path Prefix for Agent Containers + +When the optional `payloadPathPrefix` field is provided in the gateway configuration, it specifies a path prefix used to remap payload file paths returned to clients. This enables agents running in containers to access payload files via mounted volumes. + +**How it works**: + +1. Gateway saves payload to actual filesystem: `/tmp/jq-payloads/session123/query456/payload.json` +2. Gateway returns remapped path to client: `/workspace/payloads/session123/query456/payload.json` +3. Agent container mounts volume: `-v /tmp/jq-payloads:/workspace/payloads` +4. Agent can now access the file at the returned path ✅ + +**Configuration Example**: + +```toml +[gateway] +payload_dir = "/tmp/jq-payloads" +payload_path_prefix = "/workspace/payloads" +port = 8080 +domain = "localhost" +apiKey = "secret" +``` + +**Use Cases**: +- Agents running in containers with different filesystem layouts +- Docker-in-Docker scenarios where host paths need remapping +- Environments with controlled volume mounts for security + +**Requirements**: +- If specified, the path prefix SHOULD match a mounted volume in the agent container +- The gateway MUST use this prefix when returning `payloadPath` to clients +- The gateway MUST still save files to the actual filesystem path (`payloadDir`) + +#### 4.1.3.3 Payload Size Threshold + +The `payloadSizeThreshold` field (default: 524288 bytes = 512KB) controls when response payloads are stored to disk versus returned inline. + +**Behavior**: +- Payloads **smaller than or equal** to threshold: Returned inline in the response +- Payloads **larger than** threshold: Stored to disk, metadata returned with `payloadPath` + +**Default Value**: 524288 bytes (512KB) + +**Rationale**: The 512KB default accommodates typical MCP tool responses including GitHub API queries (list_commits, list_issues, etc.) without triggering disk storage. This prevents agent looping issues when payloadPath is not accessible in agent containers. + +**Configuration Example**: + +```toml +[gateway] +payload_size_threshold = 1048576 # 1MB - minimize disk storage +# OR +payload_size_threshold = 262144 # 256KB - more aggressive disk storage +``` + +**Configuration Methods**: +- CLI flag: `--payload-size-threshold ` +- Environment variable: `MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD=` +- TOML config file: `payload_size_threshold = ` in `[gateway]` section +- Default if not specified: 524288 bytes (512KB) + +**Requirements**: +- Threshold MUST be a positive integer representing bytes +- Gateway MUST compare actual payload size against threshold before deciding storage method +- Threshold MAY be adjusted based on deployment needs (memory vs disk I/O trade-offs) + #### 4.1.3a Top-Level Configuration Fields The following fields MAY be specified at the top level of the configuration: diff --git a/docs/src/content/docs/reference/network.md b/docs/src/content/docs/reference/network.md index 23dd7c144c..753b94e9e7 100644 --- a/docs/src/content/docs/reference/network.md +++ b/docs/src/content/docs/reference/network.md @@ -99,7 +99,7 @@ Mix ecosystem identifiers with specific domains for fine-grained control: | `linux-distros` | Debian, Alpine, and other Linux package repositories | | `dotnet`, `dart`, `go`, `haskell`, `java`, `node`, `perl`, `php`, `python`, `ruby`, `rust`, `swift` | Language-specific package managers and registries | | `terraform` | HashiCorp and Terraform domains | -| `playwright` | Playwright testing framework domains | +| `playwright` | Playwright testing framework domains (see [Playwright Reference](/gh-aw/reference/playwright/)) | Common identifiers: `python` (PyPI/pip), `node` (npm/yarn/pnpm), `containers` (Docker Hub/GHCR), `go` (proxy.golang.org). See the [Network Configuration Guide](/gh-aw/guides/network-configuration/) for complete domain lists. @@ -290,4 +290,5 @@ Use `gh aw logs --run-id ` to view firewall activity and identify blocke - [Network Configuration Guide](/gh-aw/guides/network-configuration/) - Practical examples and common patterns - [Frontmatter](/gh-aw/reference/frontmatter/) - Complete frontmatter configuration guide - [Tools](/gh-aw/reference/tools/) - Tool-specific network access configuration +- [Playwright](/gh-aw/reference/playwright/) - Browser automation and network requirements - [Security Guide](/gh-aw/introduction/architecture/) - Comprehensive security guidance diff --git a/docs/src/content/docs/reference/playwright.md b/docs/src/content/docs/reference/playwright.md new file mode 100644 index 0000000000..e3a74ca100 --- /dev/null +++ b/docs/src/content/docs/reference/playwright.md @@ -0,0 +1,209 @@ +--- +title: Playwright +description: Configure Playwright browser automation for testing web applications, accessibility analysis, and visual testing in your agentic workflows +sidebar: + order: 720 +--- + +Configure Playwright for browser automation and testing in your agentic workflows. Playwright enables headless browser control for accessibility testing, visual regression detection, end-to-end testing, and web scraping. + +```yaml wrap +tools: + playwright: + playwright: + version: "1.56.1" # Optional: specify version, defaults to 1.56.1 + playwright: + version: "latest" # Use the latest available version +``` + +## Configuration Options + +### Version + +Specify the Playwright version to use: + +```yaml wrap +tools: + playwright: + version: "1.56.1" # Pin to specific version (default) + playwright: + version: "latest" # Use latest available version +``` + +**Default**: `1.56.1` (when `version` is not specified) + +## Network Access Configuration + +Domain access for Playwright is controlled by the top-level [`network:`](/gh-aw/reference/network/) field. By default, Playwright can only access `localhost` and `127.0.0.1`. + +### Using Ecosystem Identifiers + +```yaml wrap +network: + allowed: + - defaults + - playwright # Enables browser downloads + - github # For testing GitHub pages + - node # For testing Node.js apps +``` + +### Custom Domains + +Add specific domains for the sites you want to test: + +```yaml wrap +network: + allowed: + - defaults + - playwright + - "example.com" # Matches example.com and subdomains + - "*.staging.example.com" # Wildcard for staging environments +``` + +**Automatic subdomain matching**: When you allow `example.com`, all subdomains like `api.example.com`, `www.example.com`, and `staging.example.com` are automatically allowed. + +### Default Localhost Access + +Without any `network:` configuration, Playwright defaults to: + +```yaml wrap +network: + allowed: + - "localhost" + - "127.0.0.1" +``` + +This is sufficient for testing local development servers. + +## GitHub Actions Compatibility + +Playwright runs in a Docker container on GitHub Actions runners. To ensure Chromium functions correctly, gh-aw automatically configures required security flags: + +- `--security-opt seccomp=unconfined` - Allows Chromium's sandboxing mechanisms +- `--ipc=host` - Enables inter-process communication for browser processes + +These flags are automatically applied starting with **gh-aw version 0.41.0 and later**. No manual configuration is needed. + +## Browser Support + +Playwright includes three browser engines: + +- **Chromium** - Chrome/Edge engine (most commonly used) +- **Firefox** - Mozilla Firefox engine +- **WebKit** - Safari engine + +All three browsers are available in the Playwright Docker container. Your workflow can use any or all of them based on your testing needs. + +## Common Use Cases + +### Accessibility Testing + +```aw wrap +--- +on: + schedule: + - cron: "0 9 * * *" # Daily at 9 AM + +tools: + playwright: + +network: + allowed: + - defaults + - playwright + - "docs.example.com" + +permissions: + issues: write + contents: read + +safe-outputs: + create-issue: + title-prefix: "[a11y] " + labels: [accessibility, automated] + max: 3 +--- + +# Accessibility Audit + +Use Playwright to check docs.example.com for WCAG 2.1 Level AA compliance. + +Run automated accessibility checks using axe-core and report: +- Missing alt text on images +- Insufficient color contrast +- Missing ARIA labels +- Keyboard navigation issues + +Create an issue for each category of problems found. +``` + +### Visual Regression Testing + +```aw wrap +--- +on: + pull_request: + types: [opened, synchronize] + +tools: + playwright: + +network: + allowed: + - defaults + - playwright + - github + +permissions: + pull-requests: write + contents: read + +safe-outputs: + add-comment: + max: 1 +--- + +# Visual Regression Check + +Compare screenshots of the documentation site before and after this PR. + +Test on multiple viewports (mobile, tablet, desktop) and report any visual differences. +``` + +### End-to-End Testing + +```aw wrap +--- +on: + workflow_dispatch: + +tools: + playwright: + bash: [":*"] + +network: + allowed: + - defaults + - playwright + - "localhost" + +permissions: + contents: read +--- + +# E2E Testing + +Start the development server locally and run end-to-end tests with Playwright. + +1. Start the dev server on localhost:3000 +2. Test the complete user journey +3. Report any failures with screenshots +``` + +## Related Documentation + +- [Tools Reference](/gh-aw/reference/tools/) - All tool configurations +- [Network Permissions](/gh-aw/reference/network/) - Network access control +- [Network Configuration Guide](/gh-aw/guides/network-configuration/) - Common network patterns +- [Safe Outputs Reference](/gh-aw/reference/safe-outputs/) - Configure output creation +- [Frontmatter](/gh-aw/reference/frontmatter/) - All frontmatter configuration options diff --git a/docs/src/content/docs/reference/tools.md b/docs/src/content/docs/reference/tools.md index 67f9b51a42..fda4a25e01 100644 --- a/docs/src/content/docs/reference/tools.md +++ b/docs/src/content/docs/reference/tools.md @@ -1,6 +1,6 @@ --- title: Tools -description: Configure GitHub API tools, browser automation, and AI capabilities available to your agentic workflows, including GitHub tools, Playwright, and custom MCP servers. +description: Configure GitHub API tools, browser automation, and AI capabilities available to your agentic workflows, including GitHub tools and custom MCP servers. sidebar: order: 700 --- @@ -15,7 +15,9 @@ tools: Some tools are available by default. All tools declared in imported components are merged into the final workflow. -## Edit Tool (`edit:`) +## Built-in Tools + +### Edit Tool (`edit:`) Allows file editing in the GitHub Actions workspace. @@ -24,7 +26,19 @@ tools: edit: ``` -## Bash Tool (`bash:`) +### GitHub Tools (`github:`) + +Configure GitHub API operations including toolsets, remote/local modes, and authentication. + +```yaml wrap +tools: + github: + toolsets: [repos, issues] +``` + +See **[GitHub Tools Reference](/gh-aw/reference/github-tools/)** for complete configuration options. + +### Bash Tool (`bash:`) Enables shell command execution in the workspace. Defaults to safe commands (`echo`, `ls`, `pwd`, `cat`, `head`, `tail`, `grep`, `wc`, `sort`, `uniq`, `date`). @@ -38,7 +52,7 @@ tools: Use wildcards like `git:*` for command families or `:*` for unrestricted access. -## Web Tools +### Web Tools Enable web content fetching and search capabilities: @@ -50,59 +64,53 @@ tools: **Note:** Some engines require third-party Model Context Protocol (MCP) servers for web search. See [Using Web Search](/gh-aw/guides/web-search/). -## GitHub Tools (`github:`) - -Configure GitHub API operations including toolsets, remote/local modes, and authentication. - -See **[GitHub Tools Reference](/gh-aw/reference/github-tools/)** for complete configuration options. - -## Playwright Tool (`playwright:`) +### Playwright Tool (`playwright:`) Configure Playwright for browser automation and testing: ```yaml wrap tools: playwright: - version: "1.56.1" # Optional: defaults to 1.56.1, use "latest" for newest + version: "1.56.1" # Optional: specify version ``` -**Domain Access**: Controlled by the top-level [`network:`](/gh-aw/reference/network/) field. Uses ecosystem bundles (`defaults`, `github`, `node`, `python`, etc.). Defaults to `["localhost", "127.0.0.1"]`. Domains auto-include subdomains. +See **[Playwright Reference](/gh-aw/reference/playwright/)** for complete configuration options, network access, browser support, and example workflows. -**GitHub Actions Compatibility**: Playwright runs in a Docker container with security flags required for Chromium to function on GitHub Actions runners (`--security-opt seccomp=unconfined` and `--ipc=host`). These flags are automatically configured by gh-aw version 0.41.0 and later. - -## Built-in MCP Tools - -### Agentic Workflows (`agentic-workflows:`) +### Cache Memory (`cache-memory:`) -Provides workflow introspection, log analysis, and debugging tools. Requires `actions: read` permission: +Persistent memory storage across workflow runs for trends and historical data. ```yaml wrap -permissions: - actions: read tools: - agentic-workflows: + cache-memory: ``` -See [MCP Server](/gh-aw/reference/gh-aw-as-mcp-server/) for available operations. +See **[Cache Memory Reference](/gh-aw/reference/cache-memory/)** for complete configuration options and usage examples. -### Cache Memory (`cache-memory:`) +### Repo Memory (`repo-memory:`) -Persistent memory storage across workflow runs for trends and historical data. +Repository-specific memory storage for maintaining context across executions. ```yaml wrap tools: - cache-memory: + repo-memory: ``` -### Repo Memory (`repo-memory:`) +See **[Repo Memory Reference](/gh-aw/reference/repo-memory/)** for complete configuration options and usage examples. -Repository-specific memory storage for maintaining context across executions. +### Introspection on Agentic Workflows (`agentic-workflows:`) + +Provides workflow introspection, log analysis, and debugging tools. Requires `actions: read` permission: ```yaml wrap +permissions: + actions: read tools: - repo-memory: + agentic-workflows: ``` +See [GH-AW as an MCP Server](/gh-aw/reference/gh-aw-as-mcp-server/) for available operations. + ## Custom MCP Servers (`mcp-servers:`) Integrate custom Model Context Protocol servers for third-party services: @@ -161,6 +169,9 @@ The `registry` field is informational and does not affect server execution. It c ## Related Documentation - [GitHub Tools](/gh-aw/reference/github-tools/) - GitHub API operations, toolsets, and modes +- [Playwright](/gh-aw/reference/playwright/) - Browser automation and testing configuration +- [Cache Memory](/gh-aw/reference/cache-memory/) - Persistent memory across workflow runs +- [Repo Memory](/gh-aw/reference/repo-memory/) - Repository-specific memory storage - [Safe Inputs](/gh-aw/reference/safe-inputs/) - Define custom inline tools with JavaScript or shell scripts - [Frontmatter](/gh-aw/reference/frontmatter/) - All frontmatter configuration options - [Network Permissions](/gh-aw/reference/network/) - Network access control for AI engines diff --git a/docs/src/content/docs/reference/triggers.md b/docs/src/content/docs/reference/triggers.md index f4fb1a847b..bb9b0b7924 100644 --- a/docs/src/content/docs/reference/triggers.md +++ b/docs/src/content/docs/reference/triggers.md @@ -378,6 +378,51 @@ on: A pre-activation check runs the search query against the current repository. If matches are below the threshold (default `min: 1`), the workflow is skipped. Can be combined with `skip-if-match` for complex conditions. +## Trigger Shorthands + +Instead of writing full YAML trigger configurations, you can use natural-language shorthand strings with `on:`. The compiler expands these into standard GitHub Actions trigger syntax and automatically includes `workflow_dispatch` so the workflow can also be run manually. + +For label-based shorthands (`on: issue labeled bug`, `on: pull_request labeled needs-review`), see [Label Filtering](#label-filtering-names) above. + +### Push and Pull Request + +```yaml wrap +on: push to main # Push to specific branch +on: push tags v* # Push tags matching pattern +on: pull_request opened # PR with activity type +on: pull_request merged # PR merged (maps to closed + merge condition) +on: pull_request affecting src/** # PR touching paths (opened, synchronize, reopened) +on: pull_request opened affecting docs/** # Activity type + path filter +``` + +`pull` is an alias for `pull_request`. Valid activity types: `opened`, `edited`, `closed`, `reopened`, `synchronize`, `assigned`, `unassigned`, `labeled`, `unlabeled`, `review_requested`, `merged`. + +### Issues and Discussions + +```yaml wrap +on: issue opened # Issue with activity type +on: issue opened labeled bug # Issue opened with specific label (adds job condition) +on: discussion created # Discussion with activity type +``` + +Valid issue types: `opened`, `edited`, `closed`, `reopened`, `assigned`, `unassigned`, `labeled`, `unlabeled`, `deleted`, `transferred`. Valid discussion types: `created`, `edited`, `deleted`, `transferred`, `pinned`, `unpinned`, `labeled`, `unlabeled`, `locked`, `unlocked`, `category_changed`, `answered`, `unanswered`. + +### Other Shorthands + +```yaml wrap +on: manual # workflow_dispatch (run manually) +on: manual with input version # workflow_dispatch with a string input +on: workflow completed ci-test # Trigger after another workflow completes +on: comment created # Issue or PR comment created +on: release published # Release event (published, created, prereleased, etc.) +on: repository starred # Repository starred (maps to watch event) +on: repository forked # Repository forked +on: dependabot pull request # PR from Dependabot (adds actor condition) +on: security alert # Code scanning alert +on: code scanning alert # Alias for security alert (code scanning alert) +on: api dispatch custom-event # Repository dispatch with custom event type +``` + ## Related Documentation - [Schedule Syntax](/gh-aw/reference/schedule-syntax/) - Complete schedule format reference diff --git a/docs/src/content/docs/setup/cli.md b/docs/src/content/docs/setup/cli.md index dc73911b24..1ddeb082d4 100644 --- a/docs/src/content/docs/setup/cli.md +++ b/docs/src/content/docs/setup/cli.md @@ -60,7 +60,7 @@ curl -sL https://raw.githubusercontent.com/github/gh-aw/main/install-gh-aw.sh | curl -sL https://raw.githubusercontent.com/github/gh-aw/main/install-gh-aw.sh | bash -s v0.1.0 # Pinned ``` -Installs to `~/.local/share/gh/extensions/gh-aw/gh-aw`. Supports Linux, macOS, FreeBSD, and Windows. Works behind corporate firewalls using direct release download URLs. +Installs to `~/.local/share/gh/extensions/gh-aw/gh-aw`. Supports Linux, macOS, FreeBSD, Windows, and Android (Termux). Works behind corporate firewalls using direct release download URLs. ### GitHub Actions Setup Action @@ -411,6 +411,8 @@ gh aw remove my-workflow Update workflows based on `source` field (`owner/repo/path@ref`). By default, performs a 3-way merge to preserve local changes; use `--no-merge` to override with upstream. Semantic versions update within same major version. +By default, `update` also force-updates all GitHub Actions referenced in your workflows (both in `actions-lock.json` and workflow files) to their latest major version. Use `--disable-release-bump` to restrict force-updates to core `actions/*` actions only. + If no workflows in the repository contain a `source` field, the command exits gracefully with an informational message rather than an error. This is expected behavior for repositories that have not yet added updatable workflows. ```bash wrap @@ -418,9 +420,10 @@ gh aw update # Update all with source field gh aw update ci-doctor # Update specific workflow (3-way merge) gh aw update ci-doctor --no-merge # Override local changes with upstream gh aw update ci-doctor --major --force # Allow major version updates +gh aw update --disable-release-bump # Update workflows; only force-update core actions/* ``` -**Options:** `--dir`, `--no-merge`, `--major`, `--force`, `--engine`, `--no-stop-after`, `--stop-after` +**Options:** `--dir`, `--no-merge`, `--major`, `--force`, `--engine`, `--no-stop-after`, `--stop-after`, `--disable-release-bump` #### `upgrade` diff --git a/pkg/cli/copilot_agent_test.go b/pkg/cli/copilot_agent_test.go index 0ed4ebfde5..2829182d6e 100644 --- a/pkg/cli/copilot_agent_test.go +++ b/pkg/cli/copilot_agent_test.go @@ -7,8 +7,6 @@ import ( "path/filepath" "strings" "testing" - - "github.com/github/gh-aw/pkg/logger" ) func TestCopilotCodingAgentDetector_IsGitHubCopilotCodingAgent(t *testing.T) { @@ -245,49 +243,6 @@ func TestExtractToolName(t *testing.T) { } } -func TestExtractErrorMessage(t *testing.T) { - tests := []struct { - name string - line string - expected string - }{ - { - name: "removes ISO timestamp", - line: "2024-01-15T10:00:00.123Z ERROR: Connection failed", - expected: "Connection failed", - }, - { - name: "removes bracketed timestamp", - line: "[2024-01-15 10:00:00] ERROR: File not found", - expected: "File not found", - }, - { - name: "removes log level prefix", - line: "ERROR: Invalid input", - expected: "Invalid input", - }, - { - name: "handles warning prefix", - line: "WARNING: Deprecated API", - expected: "Deprecated API", - }, - { - name: "handles plain message", - line: " Simple error message ", - expected: "Simple error message", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := logger.ExtractErrorMessage(tt.line) - if result != tt.expected { - t.Errorf("Expected '%s', got '%s'", tt.expected, result) - } - }) - } -} - func TestIntegration_CopilotCodingAgentWithAudit(t *testing.T) { // Create a temporary directory that simulates a GitHub Copilot coding agent run // NOTE: GitHub Copilot coding agent runs do NOT have aw_info.json (that's for agentic workflows) diff --git a/pkg/cli/exec.go b/pkg/cli/exec.go deleted file mode 100644 index 4d50af16ce..0000000000 --- a/pkg/cli/exec.go +++ /dev/null @@ -1,139 +0,0 @@ -package cli - -import ( - "bytes" - "os" - "os/exec" - "strings" - - "github.com/cli/go-gh/v2" - "github.com/github/gh-aw/pkg/logger" -) - -var execLog = logger.New("cli:exec") - -// ghExecOrFallback executes a gh CLI command if GH_TOKEN is available, -// otherwise falls back to an alternative command. -// The gh CLI arguments are inferred from the fallback command arguments. -// Returns the stdout, stderr, and error from whichever command was executed. -func ghExecOrFallback(fallbackCmd string, fallbackArgs []string, fallbackEnv []string) (string, string, error) { - ghToken := os.Getenv("GH_TOKEN") - - if ghToken != "" { - // Use gh CLI when GH_TOKEN is available - // Infer gh args from fallback args - ghArgs := inferGhArgs(fallbackCmd, fallbackArgs) - execLog.Printf("Using gh CLI: gh %s", strings.Join(ghArgs, " ")) - stdout, stderr, err := gh.Exec(ghArgs...) - return stdout.String(), stderr.String(), err - } - - // Fall back to alternative command when GH_TOKEN is not available - execLog.Printf("Using fallback command: %s %s", fallbackCmd, strings.Join(fallbackArgs, " ")) - cmd := exec.Command(fallbackCmd, fallbackArgs...) - - // Add custom environment variables if provided - if len(fallbackEnv) > 0 { - cmd.Env = append(os.Environ(), fallbackEnv...) - } - - // Capture stdout and stderr separately like gh.Exec - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - err := cmd.Run() - return stdout.String(), stderr.String(), err -} - -// inferGhArgs infers gh CLI arguments from fallback command arguments -func inferGhArgs(fallbackCmd string, fallbackArgs []string) []string { - if fallbackCmd != "git" || len(fallbackArgs) == 0 { - // For non-git commands, use gh exec - return append([]string{"exec", "--", fallbackCmd}, fallbackArgs...) - } - - // Handle git commands - gitCmd := fallbackArgs[0] - - switch gitCmd { - case "clone": - // git clone [options] - // -> gh repo clone [options] - return buildGhCloneArgs(fallbackArgs[1:]) - default: - // For other git commands, use gh exec - return append([]string{"exec", "--", "git"}, fallbackArgs...) - } -} - -// buildGhCloneArgs builds gh repo clone arguments from git clone arguments -func buildGhCloneArgs(gitArgs []string) []string { - ghArgs := []string{"repo", "clone"} - - var repoURL string - var targetDir string - var otherArgs []string - - // Options that take a value - optsWithValue := map[string]bool{ - "--branch": true, - "--depth": true, - "--origin": true, - "--template": true, - "--config": true, - "--server-option": true, - "--upload-pack": true, - "--reference": true, - "--reference-if-able": true, - "--separate-git-dir": true, - } - - // Parse git clone arguments - for i := 0; i < len(gitArgs); i++ { - arg := gitArgs[i] - if strings.HasPrefix(arg, "https://") || strings.HasPrefix(arg, "git@") { - repoURL = arg - } else if strings.HasPrefix(arg, "-") { - // It's an option - otherArgs = append(otherArgs, arg) - // Check if this option takes a value - if optsWithValue[arg] && i+1 < len(gitArgs) { - i++ // Move to next arg - otherArgs = append(otherArgs, gitArgs[i]) - } - } else if repoURL != "" && targetDir == "" { - // This is the target directory - targetDir = arg - } - } - - // Extract repo slug from URL (remove https://github.com/ or enterprise domain) - repoSlug := extractRepoSlug(repoURL) - - // Build gh args: gh repo clone -- [git options] - ghArgs = append(ghArgs, repoSlug) - if targetDir != "" { - ghArgs = append(ghArgs, targetDir) - } - - if len(otherArgs) > 0 { - ghArgs = append(ghArgs, "--") - ghArgs = append(ghArgs, otherArgs...) - } - - return ghArgs -} - -// extractRepoSlug extracts the owner/repo slug from a GitHub URL -func extractRepoSlug(repoURL string) string { - githubHost := getGitHubHost() - - // Remove the GitHub host from the URL - slug := strings.TrimPrefix(repoURL, githubHost+"/") - - // Remove .git suffix if present - slug = strings.TrimSuffix(slug, ".git") - - return slug -} diff --git a/pkg/cli/exec_test.go b/pkg/cli/exec_test.go deleted file mode 100644 index 8e9137d146..0000000000 --- a/pkg/cli/exec_test.go +++ /dev/null @@ -1,229 +0,0 @@ -//go:build !integration - -package cli - -import ( - "strings" - "testing" -) - -func TestGhExecOrFallback(t *testing.T) { - tests := []struct { - name string - ghToken string - fallbackCmd string - fallbackArgs []string - fallbackEnv []string - expectError bool - description string - }{ - { - name: "uses git when GH_TOKEN not set", - ghToken: "", - fallbackCmd: "echo", - fallbackArgs: []string{"fallback executed"}, - fallbackEnv: nil, - expectError: false, - description: "should use fallback command when GH_TOKEN is not set", - }, - { - name: "uses fallback with custom env", - ghToken: "", - fallbackCmd: "sh", - fallbackArgs: []string{"-c", "echo $TEST_VAR"}, - fallbackEnv: []string{"TEST_VAR=test_value"}, - expectError: false, - description: "should pass custom environment variables to fallback command", - }, - { - name: "fallback command failure", - ghToken: "", - fallbackCmd: "false", // command that always fails - fallbackArgs: []string{}, - fallbackEnv: nil, - expectError: true, - description: "should return error when fallback command fails", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Set or unset GH_TOKEN based on test case - if tt.ghToken != "" { - t.Setenv("GH_TOKEN", tt.ghToken) - } - - stdout, _, err := ghExecOrFallback(tt.fallbackCmd, tt.fallbackArgs, tt.fallbackEnv) - - if tt.expectError && err == nil { - t.Errorf("Expected error for test '%s', got nil", tt.description) - } else if !tt.expectError && err != nil { - t.Errorf("Unexpected error for test '%s': %v", tt.description, err) - } - - // For successful fallback tests, verify output - if !tt.expectError && tt.fallbackCmd == "echo" { - if !strings.Contains(stdout, "fallback executed") { - t.Errorf("Expected stdout to contain 'fallback executed', got: %s", stdout) - } - } - - // For env test, verify environment variable was passed - if !tt.expectError && tt.fallbackCmd == "sh" && len(tt.fallbackEnv) > 0 { - if !strings.Contains(stdout, "test_value") { - t.Errorf("Expected stdout to contain 'test_value', got: %s", stdout) - } - } - - // With separated stdout/stderr, we don't expect both to be populated - // This is a change from the previous CombinedOutput behavior - }) - } -} - -func TestGhExecOrFallbackWithGHToken(t *testing.T) { - // This test verifies behavior when GH_TOKEN is set - // Note: We can't easily test actual gh.Exec without a real token, - // so we test that the function attempts to use gh CLI - - // Set a placeholder token - t.Setenv("GH_TOKEN", "placeholder_token_for_test") - - // This will likely fail since we don't have a valid token, - // but we're testing that it attempts gh.Exec path - _, _, err := ghExecOrFallback( - "echo", - []string{"fallback"}, - nil, - ) - - // We expect an error because gh.Exec will fail with invalid token/nonexistent repo - // The important part is that it tried the gh.Exec path - if err == nil { - // If it succeeded, it means it used the fallback, which is wrong - t.Error("Expected function to attempt gh.Exec with GH_TOKEN set") - } -} - -func TestGhExecOrFallbackIntegration(t *testing.T) { - // Integration test: verify the function works end-to-end without GH_TOKEN - // (GH_TOKEN is not set by default in this test) - - // Use a simple command that we know will work - stdout, _, err := ghExecOrFallback( - "echo", - []string{"integration test output"}, - nil, - ) - - if err != nil { - t.Errorf("Unexpected error in integration test: %v", err) - } - - if !strings.Contains(stdout, "integration test output") { - t.Errorf("Expected output to contain 'integration test output', got: %s", stdout) - } -} - -func TestExtractRepoSlug(t *testing.T) { - tests := []struct { - name string - repoURL string - githubHost string - expectedSlug string - }{ - { - name: "standard GitHub URL", - repoURL: "https://github.com/owner/repo", - githubHost: "", - expectedSlug: "owner/repo", - }, - { - name: "GitHub URL with .git suffix", - repoURL: "https://github.com/owner/repo.git", - githubHost: "", - expectedSlug: "owner/repo", - }, - { - name: "enterprise GitHub URL", - repoURL: "https://github.enterprise.com/owner/repo", - githubHost: "https://github.enterprise.com", - expectedSlug: "owner/repo", - }, - { - name: "enterprise GitHub URL with .git", - repoURL: "https://github.enterprise.com/owner/repo.git", - githubHost: "https://github.enterprise.com", - expectedSlug: "owner/repo", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Set environment - if tt.githubHost != "" { - t.Setenv("GITHUB_SERVER_URL", tt.githubHost) - } - - slug := extractRepoSlug(tt.repoURL) - if slug != tt.expectedSlug { - t.Errorf("Expected slug '%s', got '%s'", tt.expectedSlug, slug) - } - }) - } -} - -func TestInferGhArgs(t *testing.T) { - tests := []struct { - name string - fallbackCmd string - fallbackArgs []string - expectedGhArgs []string - }{ - { - name: "git clone simple", - fallbackCmd: "git", - fallbackArgs: []string{"clone", "https://github.com/owner/repo", "/tmp/dir"}, - expectedGhArgs: []string{"repo", "clone", "owner/repo", "/tmp/dir"}, - }, - { - name: "git clone with depth", - fallbackCmd: "git", - fallbackArgs: []string{"clone", "--depth", "1", "https://github.com/owner/repo", "/tmp/dir"}, - expectedGhArgs: []string{"repo", "clone", "owner/repo", "/tmp/dir", "--", "--depth", "1"}, - }, - { - name: "git clone with branch", - fallbackCmd: "git", - fallbackArgs: []string{"clone", "--depth", "1", "https://github.com/owner/repo", "/tmp/dir", "--branch", "main"}, - expectedGhArgs: []string{"repo", "clone", "owner/repo", "/tmp/dir", "--", "--depth", "1", "--branch", "main"}, - }, - { - name: "git checkout", - fallbackCmd: "git", - fallbackArgs: []string{"-C", "/tmp/dir", "checkout", "abc123"}, - expectedGhArgs: []string{"exec", "--", "git", "-C", "/tmp/dir", "checkout", "abc123"}, - }, - { - name: "non-git command", - fallbackCmd: "echo", - fallbackArgs: []string{"hello"}, - expectedGhArgs: []string{"exec", "--", "echo", "hello"}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - ghArgs := inferGhArgs(tt.fallbackCmd, tt.fallbackArgs) - if len(ghArgs) != len(tt.expectedGhArgs) { - t.Errorf("Expected %d args, got %d: %v", len(tt.expectedGhArgs), len(ghArgs), ghArgs) - return - } - for i, arg := range ghArgs { - if arg != tt.expectedGhArgs[i] { - t.Errorf("Arg %d: expected '%s', got '%s'", i, tt.expectedGhArgs[i], arg) - } - } - }) - } -} diff --git a/pkg/cli/logs_display.go b/pkg/cli/logs_display.go deleted file mode 100644 index 06df28487d..0000000000 --- a/pkg/cli/logs_display.go +++ /dev/null @@ -1,220 +0,0 @@ -// This file provides command-line interface functionality for gh-aw. -// This file (logs_display.go) contains functions for displaying workflow logs information -// to the console, including summary tables and metrics. -// -// Key responsibilities: -// - Rendering workflow logs overview tables -// - Formatting metrics for display (duration, tokens, cost) -// - Aggregating totals across multiple runs - -package cli - -import ( - "fmt" - "os" - "path/filepath" - "strconv" - "strings" - "time" - - "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" - "github.com/github/gh-aw/pkg/timeutil" -) - -var logsDisplayLog = logger.New("cli:logs_display") - -// displayLogsOverview displays a summary table of workflow runs and metrics -func displayLogsOverview(processedRuns []ProcessedRun, verbose bool) { - if len(processedRuns) == 0 { - logsDisplayLog.Print("No processed runs to display") - return - } - - logsDisplayLog.Printf("Displaying logs overview: runs=%d, verbose=%v", len(processedRuns), verbose) - - // Prepare table data - headers := []string{"Run ID", "Workflow", "Status", "Duration", "Tokens", "Cost ($)", "Turns", "Errors", "Warnings", "Missing Tools", "Missing Data", "Noops", "Safe Items", "Created", "Logs Path"} - var rows [][]string - - var totalTokens int - var totalCost float64 - var totalDuration time.Duration - var totalTurns int - var totalErrors int - var totalWarnings int - var totalMissingTools int - var totalMissingData int - var totalNoops int - var totalSafeItems int - - for _, pr := range processedRuns { - run := pr.Run - // Format duration - durationStr := "" - if run.Duration > 0 { - durationStr = timeutil.FormatDuration(run.Duration) - totalDuration += run.Duration - } - - // Format cost - costStr := "" - if run.EstimatedCost > 0 { - costStr = fmt.Sprintf("%.3f", run.EstimatedCost) - totalCost += run.EstimatedCost - } - - // Format tokens - tokensStr := "" - if run.TokenUsage > 0 { - tokensStr = console.FormatNumber(run.TokenUsage) - totalTokens += run.TokenUsage - } - - // Format turns - turnsStr := "" - if run.Turns > 0 { - turnsStr = strconv.Itoa(run.Turns) - totalTurns += run.Turns - } - - // Format errors - errorsStr := strconv.Itoa(run.ErrorCount) - totalErrors += run.ErrorCount - - // Format warnings - warningsStr := strconv.Itoa(run.WarningCount) - totalWarnings += run.WarningCount - - // Format missing tools - var missingToolsStr string - if verbose && len(pr.MissingTools) > 0 { - // In verbose mode, show actual tool names - toolNames := make([]string, len(pr.MissingTools)) - for i, tool := range pr.MissingTools { - toolNames[i] = tool.Tool - } - missingToolsStr = strings.Join(toolNames, ", ") - // Truncate if too long - if len(missingToolsStr) > 30 { - missingToolsStr = missingToolsStr[:27] + "..." - } - } else { - // In normal mode, just show the count - missingToolsStr = strconv.Itoa(run.MissingToolCount) - } - totalMissingTools += run.MissingToolCount - - // Format missing data - var missingDataStr string - if verbose && len(pr.MissingData) > 0 { - // In verbose mode, show actual data types - dataTypes := make([]string, len(pr.MissingData)) - for i, data := range pr.MissingData { - dataTypes[i] = data.DataType - } - missingDataStr = strings.Join(dataTypes, ", ") - // Truncate if too long - if len(missingDataStr) > 30 { - missingDataStr = missingDataStr[:27] + "..." - } - } else { - // In normal mode, just show the count - missingDataStr = strconv.Itoa(run.MissingDataCount) - } - totalMissingData += run.MissingDataCount - - // Format noops - var noopsStr string - if verbose && len(pr.Noops) > 0 { - // In verbose mode, show truncated message preview - messages := make([]string, len(pr.Noops)) - for i, noop := range pr.Noops { - msg := noop.Message - if len(msg) > 30 { - msg = msg[:27] + "..." - } - messages[i] = msg - } - noopsStr = strings.Join(messages, ", ") - // Truncate if too long - if len(noopsStr) > 30 { - noopsStr = noopsStr[:27] + "..." - } - } else { - // In normal mode, just show the count - noopsStr = strconv.Itoa(run.NoopCount) - } - totalNoops += run.NoopCount - - // Format safe items count - safeItemsStr := strconv.Itoa(run.SafeItemsCount) - totalSafeItems += run.SafeItemsCount - - // Truncate workflow name if too long - workflowName := run.WorkflowName - if len(workflowName) > 20 { - workflowName = workflowName[:17] + "..." - } - - // Format relative path - relPath, _ := filepath.Rel(".", run.LogsPath) - - // Format status - show conclusion directly for completed runs - statusStr := run.Status - if run.Status == "completed" && run.Conclusion != "" { - statusStr = run.Conclusion - } - - row := []string{ - strconv.FormatInt(run.DatabaseID, 10), - workflowName, - statusStr, - durationStr, - tokensStr, - costStr, - turnsStr, - errorsStr, - warningsStr, - missingToolsStr, - missingDataStr, - noopsStr, - safeItemsStr, - run.CreatedAt.Format("2006-01-02"), - relPath, - } - rows = append(rows, row) - } - - // Prepare total row - totalRow := []string{ - fmt.Sprintf("TOTAL (%d runs)", len(processedRuns)), - "", - "", - timeutil.FormatDuration(totalDuration), - console.FormatNumber(totalTokens), - fmt.Sprintf("%.3f", totalCost), - strconv.Itoa(totalTurns), - strconv.Itoa(totalErrors), - strconv.Itoa(totalWarnings), - strconv.Itoa(totalMissingTools), - strconv.Itoa(totalMissingData), - strconv.Itoa(totalNoops), - strconv.Itoa(totalSafeItems), - "", - "", - } - - // Render table using console helper - tableConfig := console.TableConfig{ - Title: "Workflow Logs Overview", - Headers: headers, - Rows: rows, - ShowTotal: true, - TotalRow: totalRow, - } - - logsDisplayLog.Printf("Rendering table: total_tokens=%d, total_cost=%.3f, total_duration=%s", totalTokens, totalCost, totalDuration) - - fmt.Fprint(os.Stderr, console.RenderTable(tableConfig)) -} diff --git a/pkg/cli/logs_overview_test.go b/pkg/cli/logs_overview_test.go index 7324be2a4e..e78c4f19f7 100644 --- a/pkg/cli/logs_overview_test.go +++ b/pkg/cli/logs_overview_test.go @@ -4,61 +4,9 @@ package cli import ( "testing" - "time" ) // TestLogsOverviewIncludesMissingTools verifies that the overview table includes missing tools count -func TestLogsOverviewIncludesMissingTools(t *testing.T) { - processedRuns := []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 12345, - WorkflowName: "Test Workflow A", - Status: "completed", - Conclusion: "success", - CreatedAt: time.Now(), - Duration: 5 * time.Minute, - TokenUsage: 1000, - EstimatedCost: 0.01, - Turns: 3, - ErrorCount: 0, - WarningCount: 2, - MissingToolCount: 1, - LogsPath: "/tmp/gh-aw/run-12345", - }, - MissingTools: []MissingToolReport{ - {Tool: "terraform", Reason: "Infrastructure automation needed"}, - }, - }, - { - Run: WorkflowRun{ - DatabaseID: 67890, - WorkflowName: "Test Workflow B", - Status: "completed", - Conclusion: "failure", - CreatedAt: time.Now(), - Duration: 3 * time.Minute, - TokenUsage: 500, - EstimatedCost: 0.005, - Turns: 2, - ErrorCount: 1, - WarningCount: 0, - MissingToolCount: 3, - LogsPath: "/tmp/gh-aw/run-67890", - }, - MissingTools: []MissingToolReport{ - {Tool: "kubectl", Reason: "K8s management"}, - {Tool: "docker", Reason: "Container runtime"}, - {Tool: "helm", Reason: "K8s package manager"}, - }, - }, - } - - // Capture output by redirecting - this is a smoke test to ensure displayLogsOverview doesn't panic - // and that it processes the MissingToolCount field - displayLogsOverview(processedRuns, false) - displayLogsOverview(processedRuns, true) -} // TestWorkflowRunStructHasMissingToolCount verifies that WorkflowRun has the MissingToolCount field func TestWorkflowRunStructHasMissingToolCount(t *testing.T) { @@ -116,118 +64,6 @@ func TestLogsOverviewHeaderIncludesMissing(t *testing.T) { } // TestDisplayLogsOverviewWithVariousMissingToolCounts tests different scenarios -func TestDisplayLogsOverviewWithVariousMissingToolCounts(t *testing.T) { - testCases := []struct { - name string - processedRuns []ProcessedRun - expectedNonPanic bool - }{ - { - name: "no missing tools", - processedRuns: []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 1, - WorkflowName: "Clean Workflow", - MissingToolCount: 0, - LogsPath: "/tmp/gh-aw/run-1", - }, - MissingTools: []MissingToolReport{}, - }, - }, - expectedNonPanic: true, - }, - { - name: "single missing tool", - processedRuns: []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 2, - WorkflowName: "Workflow with One Missing", - MissingToolCount: 1, - LogsPath: "/tmp/gh-aw/run-2", - }, - MissingTools: []MissingToolReport{ - {Tool: "terraform", Reason: "Need IaC"}, - }, - }, - }, - expectedNonPanic: true, - }, - { - name: "multiple missing tools", - processedRuns: []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 3, - WorkflowName: "Workflow with Multiple Missing", - MissingToolCount: 5, - LogsPath: "/tmp/gh-aw/run-3", - }, - MissingTools: []MissingToolReport{ - {Tool: "terraform", Reason: "IaC"}, - {Tool: "kubectl", Reason: "K8s"}, - {Tool: "docker", Reason: "Containers"}, - {Tool: "helm", Reason: "Packages"}, - {Tool: "argocd", Reason: "GitOps"}, - }, - }, - }, - expectedNonPanic: true, - }, - { - name: "mixed missing tool counts", - processedRuns: []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 4, - WorkflowName: "Workflow A", - MissingToolCount: 0, - LogsPath: "/tmp/gh-aw/run-4", - }, - MissingTools: []MissingToolReport{}, - }, - { - Run: WorkflowRun{ - DatabaseID: 5, - WorkflowName: "Workflow B", - MissingToolCount: 2, - LogsPath: "/tmp/gh-aw/run-5", - }, - MissingTools: []MissingToolReport{ - {Tool: "kubectl", Reason: "K8s"}, - {Tool: "docker", Reason: "Containers"}, - }, - }, - { - Run: WorkflowRun{ - DatabaseID: 6, - WorkflowName: "Workflow C", - MissingToolCount: 1, - LogsPath: "/tmp/gh-aw/run-6", - }, - MissingTools: []MissingToolReport{ - {Tool: "helm", Reason: "Packages"}, - }, - }, - }, - expectedNonPanic: true, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - // This test ensures displayLogsOverview doesn't panic with various missing tool counts - defer func() { - if r := recover(); r != nil && tc.expectedNonPanic { - t.Errorf("displayLogsOverview panicked with: %v", r) - } - }() - displayLogsOverview(tc.processedRuns, false) - displayLogsOverview(tc.processedRuns, true) - }) - } -} // TestTotalMissingToolsCalculation verifies totals are calculated correctly func TestTotalMissingToolsCalculation(t *testing.T) { @@ -252,83 +88,8 @@ func TestTotalMissingToolsCalculation(t *testing.T) { } // TestOverviewDisplayConsistency verifies that the overview function is consistent -func TestOverviewDisplayConsistency(t *testing.T) { - // Create a run with known values - processedRuns := []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 99999, - WorkflowName: "Consistency Test", - Status: "completed", - Conclusion: "success", - Duration: 10 * time.Minute, - TokenUsage: 2000, - EstimatedCost: 0.02, - Turns: 5, - ErrorCount: 1, - WarningCount: 3, - MissingToolCount: 2, - CreatedAt: time.Date(2024, 1, 15, 10, 30, 0, 0, time.UTC), - LogsPath: "/tmp/gh-aw/run-99999", - }, - MissingTools: []MissingToolReport{ - {Tool: "terraform", Reason: "IaC"}, - {Tool: "kubectl", Reason: "K8s"}, - }, - }, - } - - // Call displayLogsOverview - it should not panic and should handle all fields - defer func() { - if r := recover(); r != nil { - t.Errorf("displayLogsOverview panicked: %v", r) - } - }() - - displayLogsOverview(processedRuns, false) - displayLogsOverview(processedRuns, true) -} // TestMissingToolsIntegration tests the full flow from ProcessedRun to display -func TestMissingToolsIntegration(t *testing.T) { - // Create a ProcessedRun with missing tools - processedRuns := []ProcessedRun{ - { - Run: WorkflowRun{ - DatabaseID: 11111, - WorkflowName: "Integration Test Workflow", - Status: "completed", - Conclusion: "success", - MissingToolCount: 2, - }, - MissingTools: []MissingToolReport{ - { - Tool: "terraform", - Reason: "Infrastructure automation needed", - Alternatives: "Manual AWS console", - Timestamp: "2024-01-15T10:30:00Z", - WorkflowName: "Integration Test Workflow", - RunID: 11111, - }, - { - Tool: "kubectl", - Reason: "Kubernetes cluster management", - WorkflowName: "Integration Test Workflow", - RunID: 11111, - }, - }, - }, - } - - // Verify count is correct - if processedRuns[0].Run.MissingToolCount != 2 { - t.Errorf("Expected MissingToolCount to be 2, got %d", processedRuns[0].Run.MissingToolCount) - } - - // Display should work without panicking - displayLogsOverview(processedRuns, false) - displayLogsOverview(processedRuns, true) -} // TestMissingToolCountFieldAccessibility verifies field is accessible func TestMissingToolCountFieldAccessibility(t *testing.T) { diff --git a/pkg/cli/mcp_inspect_safe_inputs_inspector.go b/pkg/cli/mcp_inspect_safe_inputs_inspector.go deleted file mode 100644 index 386c513579..0000000000 --- a/pkg/cli/mcp_inspect_safe_inputs_inspector.go +++ /dev/null @@ -1,134 +0,0 @@ -package cli - -import ( - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "time" - - "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/parser" - "github.com/github/gh-aw/pkg/types" - "github.com/github/gh-aw/pkg/workflow" -) - -// spawnSafeInputsInspector generates safe-inputs MCP server files, starts the HTTP server, -// and launches the inspector to inspect it -func spawnSafeInputsInspector(workflowFile string, verbose bool) error { - mcpInspectLog.Printf("Spawning safe-inputs inspector for workflow: %s", workflowFile) - - // Check if node is available - if _, err := exec.LookPath("node"); err != nil { - return fmt.Errorf("node not found. Please install Node.js to run the safe-inputs MCP server: %w", err) - } - - // Resolve the workflow file path - workflowPath, err := ResolveWorkflowPath(workflowFile) - if err != nil { - return err - } - - // Convert to absolute path if needed - if !filepath.IsAbs(workflowPath) { - cwd, err := os.Getwd() - if err != nil { - return fmt.Errorf("failed to get current directory: %w", err) - } - workflowPath = filepath.Join(cwd, workflowPath) - } - - if verbose { - fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Inspecting safe-inputs from: "+workflowPath)) - } - - // Use the workflow compiler to parse the file and resolve imports - // This ensures that imported safe-inputs are properly merged - compiler := workflow.NewCompiler( - workflow.WithVerbose(verbose), - ) - workflowData, err := compiler.ParseWorkflowFile(workflowPath) - if err != nil { - return fmt.Errorf("failed to parse workflow file: %w", err) - } - - // Get safe-inputs configuration from the parsed WorkflowData - // This includes both direct and imported safe-inputs configurations - safeInputsConfig := workflowData.SafeInputs - if safeInputsConfig == nil || len(safeInputsConfig.Tools) == 0 { - return errors.New("no safe-inputs configuration found in workflow") - } - - fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Found %d safe-input tool(s) to configure", len(safeInputsConfig.Tools)))) - - // Create temporary directory for safe-inputs files - tmpDir, err := os.MkdirTemp("", "gh-aw-safe-inputs-*") - if err != nil { - return fmt.Errorf("failed to create temporary directory: %w", err) - } - defer func() { - if err := os.RemoveAll(tmpDir); err != nil && verbose { - fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to cleanup temporary directory: %v", err))) - } - }() - - if verbose { - fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Created temporary directory: "+tmpDir)) - } - - // Write safe-inputs files to temporary directory - if err := writeSafeInputsFiles(tmpDir, safeInputsConfig, verbose); err != nil { - return fmt.Errorf("failed to write safe-inputs files: %w", err) - } - - // Find an available port for the HTTP server - port := findAvailablePort(safeInputsStartPort, verbose) - if port == 0 { - return errors.New("failed to find an available port for the HTTP server") - } - - if verbose { - fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Using port %d for safe-inputs HTTP server", port))) - } - - // Start the HTTP server - serverCmd, err := startSafeInputsHTTPServer(tmpDir, port, verbose) - if err != nil { - return fmt.Errorf("failed to start safe-inputs HTTP server: %w", err) - } - defer func() { - if serverCmd.Process != nil { - // Try graceful shutdown first - if err := serverCmd.Process.Signal(os.Interrupt); err != nil && verbose { - fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to send interrupt signal: %v", err))) - } - // Wait a moment for graceful shutdown - time.Sleep(500 * time.Millisecond) - // Attempt force kill (may fail if process already exited gracefully, which is fine) - _ = serverCmd.Process.Kill() - } - }() - - // Wait for the server to start up - if !waitForServerReady(port, 5*time.Second, verbose) { - return errors.New("safe-inputs HTTP server failed to start within timeout") - } - - fmt.Fprintln(os.Stderr, console.FormatSuccessMessage("Safe-inputs HTTP server started successfully")) - fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Server running on: http://localhost:%d", port))) - fmt.Fprintln(os.Stderr) - - // Create MCP server config for the safe-inputs server - safeInputsMCPConfig := parser.MCPServerConfig{ - BaseMCPServerConfig: types.BaseMCPServerConfig{ - Type: "http", - URL: fmt.Sprintf("http://localhost:%d", port), - Env: make(map[string]string), - }, - Name: "safeinputs", - } - - // Inspect the safe-inputs MCP server using the Go SDK (like other MCP servers) - return inspectMCPServer(safeInputsMCPConfig, "", verbose, false) -} diff --git a/pkg/cli/mcp_inspect_safe_inputs_test.go b/pkg/cli/mcp_inspect_safe_inputs_test.go deleted file mode 100644 index 60d89e2e8f..0000000000 --- a/pkg/cli/mcp_inspect_safe_inputs_test.go +++ /dev/null @@ -1,264 +0,0 @@ -//go:build !integration - -package cli - -import ( - "os" - "path/filepath" - "strings" - "testing" - - "github.com/github/gh-aw/pkg/workflow" -) - -// TestSpawnSafeInputsInspector_NoSafeInputs tests the error case when workflow has no safe-inputs -func TestSpawnSafeInputsInspector_NoSafeInputs(t *testing.T) { - // Create temporary directory with a workflow file - tmpDir := t.TempDir() - workflowsDir := filepath.Join(tmpDir, ".github", "workflows") - if err := os.MkdirAll(workflowsDir, 0755); err != nil { - t.Fatalf("Failed to create workflows directory: %v", err) - } - - // Create a test workflow file WITHOUT safe-inputs - workflowContent := `--- -on: push -engine: copilot ---- -# Test Workflow - -This workflow has no safe-inputs configuration. -` - workflowPath := filepath.Join(workflowsDir, "test.md") - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write workflow file: %v", err) - } - - // Change to the temporary directory - originalDir, _ := os.Getwd() - defer os.Chdir(originalDir) - os.Chdir(tmpDir) - - // Try to spawn safe-inputs inspector - should fail - err := spawnSafeInputsInspector("test", false) - if err == nil { - t.Error("Expected error when workflow has no safe-inputs, got nil") - } - - // Verify error message mentions "no safe-inputs" - if err != nil && err.Error() != "no safe-inputs configuration found in workflow" { - t.Errorf("Expected specific error message, got: %v", err) - } -} - -// TestSpawnSafeInputsInspector_WithSafeInputs tests file generation with a real workflow -func TestSpawnSafeInputsInspector_WithSafeInputs(t *testing.T) { - // This test verifies that the function correctly parses a workflow and generates files - // We can't actually start the server or inspector in a test, but we can verify file generation - - // Create temporary directory with a workflow file - tmpDir := t.TempDir() - workflowsDir := filepath.Join(tmpDir, ".github", "workflows") - if err := os.MkdirAll(workflowsDir, 0755); err != nil { - t.Fatalf("Failed to create workflows directory: %v", err) - } - - // Create a test workflow file with safe-inputs - workflowContent := `--- -on: push -engine: copilot -safe-inputs: - echo-tool: - description: "Echo a message" - inputs: - message: - type: string - description: "Message to echo" - required: true - run: | - echo "$message" ---- -# Test Workflow - -This workflow has safe-inputs configuration. -` - workflowPath := filepath.Join(workflowsDir, "test.md") - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write workflow file: %v", err) - } - - // Change to the temporary directory - originalDir, _ := os.Getwd() - defer os.Chdir(originalDir) - os.Chdir(tmpDir) - - // We can't fully test spawnSafeInputsInspector because it tries to start a server - // and launch the inspector, but we can test the file generation part separately - // by calling writeSafeInputsFiles directly - - // Parse the workflow using the compiler to get safe-inputs config - // (including any imported safe-inputs) - compiler := workflow.NewCompiler() - workflowData, err := compiler.ParseWorkflowFile(workflowPath) - if err != nil { - t.Fatalf("Failed to parse workflow: %v", err) - } - - safeInputsConfig := workflowData.SafeInputs - if safeInputsConfig == nil { - t.Fatal("Expected safe-inputs config to be parsed") - } - - // Create a temp directory for files - filesDir := t.TempDir() - - // Write files - err = writeSafeInputsFiles(filesDir, safeInputsConfig, false) - if err != nil { - t.Fatalf("writeSafeInputsFiles failed: %v", err) - } - - // Verify the echo-tool.sh file was created - toolPath := filepath.Join(filesDir, "echo-tool.sh") - if _, err := os.Stat(toolPath); os.IsNotExist(err) { - t.Error("echo-tool.sh not found") - } - - // Verify tools.json contains the echo-tool - toolsPath := filepath.Join(filesDir, "tools.json") - toolsContent, err := os.ReadFile(toolsPath) - if err != nil { - t.Fatalf("Failed to read tools.json: %v", err) - } - - // Simple check that the tool name is in the JSON - if len(toolsContent) < 50 { - t.Error("tools.json seems too short") - } -} - -// TestSpawnSafeInputsInspector_WithImportedSafeInputs tests that imported safe-inputs are resolved -func TestSpawnSafeInputsInspector_WithImportedSafeInputs(t *testing.T) { - // Create temporary directory with workflow and shared files - tmpDir := t.TempDir() - workflowsDir := filepath.Join(tmpDir, ".github", "workflows") - sharedDir := filepath.Join(workflowsDir, "shared") - if err := os.MkdirAll(sharedDir, 0755); err != nil { - t.Fatalf("Failed to create workflows directory: %v", err) - } - - // Create a shared workflow file with safe-inputs - sharedContent := `--- -safe-inputs: - shared-tool: - description: "Shared tool from import" - inputs: - param: - type: string - description: "A parameter" - required: true - run: | - echo "Shared: $param" ---- -# Shared Workflow -` - sharedPath := filepath.Join(sharedDir, "shared.md") - if err := os.WriteFile(sharedPath, []byte(sharedContent), 0644); err != nil { - t.Fatalf("Failed to write shared workflow file: %v", err) - } - - // Create a test workflow file that imports the shared workflow - workflowContent := `--- -on: push -engine: copilot -imports: - - shared/shared.md -safe-inputs: - local-tool: - description: "Local tool" - inputs: - message: - type: string - description: "Message to echo" - required: true - run: | - echo "$message" ---- -# Test Workflow - -This workflow imports safe-inputs from shared/shared.md. -` - workflowPath := filepath.Join(workflowsDir, "test.md") - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write workflow file: %v", err) - } - - // Change to the temporary directory - originalDir, _ := os.Getwd() - defer os.Chdir(originalDir) - os.Chdir(tmpDir) - - // Parse the workflow using the compiler to get safe-inputs config - // This should include both local and imported safe-inputs - compiler := workflow.NewCompiler() - workflowData, err := compiler.ParseWorkflowFile(workflowPath) - if err != nil { - t.Fatalf("Failed to parse workflow: %v", err) - } - - safeInputsConfig := workflowData.SafeInputs - if safeInputsConfig == nil { - t.Fatal("Expected safe-inputs config to be parsed") - } - - // Verify both local and imported tools are present - if len(safeInputsConfig.Tools) != 2 { - t.Errorf("Expected 2 tools (local + imported), got %d", len(safeInputsConfig.Tools)) - } - - // Verify local tool exists - if _, exists := safeInputsConfig.Tools["local-tool"]; !exists { - t.Error("Expected local-tool to be present") - } - - // Verify imported tool exists - if _, exists := safeInputsConfig.Tools["shared-tool"]; !exists { - t.Error("Expected shared-tool (from import) to be present") - } - - // Create a temp directory for files - filesDir := t.TempDir() - - // Write files - err = writeSafeInputsFiles(filesDir, safeInputsConfig, false) - if err != nil { - t.Fatalf("writeSafeInputsFiles failed: %v", err) - } - - // Verify both tool handler files were created - localToolPath := filepath.Join(filesDir, "local-tool.sh") - if _, err := os.Stat(localToolPath); os.IsNotExist(err) { - t.Error("local-tool.sh not found") - } - - sharedToolPath := filepath.Join(filesDir, "shared-tool.sh") - if _, err := os.Stat(sharedToolPath); os.IsNotExist(err) { - t.Error("shared-tool.sh not found") - } - - // Verify tools.json contains both tools - toolsPath := filepath.Join(filesDir, "tools.json") - toolsContent, err := os.ReadFile(toolsPath) - if err != nil { - t.Fatalf("Failed to read tools.json: %v", err) - } - - // Check that both tool names are in the JSON - toolsJSON := string(toolsContent) - if !strings.Contains(toolsJSON, "local-tool") { - t.Error("tools.json should contain 'local-tool'") - } - if !strings.Contains(toolsJSON, "shared-tool") { - t.Error("tools.json should contain 'shared-tool'") - } -} diff --git a/pkg/cli/update_actions.go b/pkg/cli/update_actions.go index 7f0097d16e..04ec324cce 100644 --- a/pkg/cli/update_actions.go +++ b/pkg/cli/update_actions.go @@ -36,8 +36,10 @@ func isCoreAction(repo string) bool { } // UpdateActions updates GitHub Actions versions in .github/aw/actions-lock.json -// It checks each action for newer releases and updates the SHA if a newer version is found -func UpdateActions(allowMajor, verbose bool) error { +// It checks each action for newer releases and updates the SHA if a newer version is found. +// By default all actions are updated to the latest major version; pass disableReleaseBump=true +// to revert to the old behaviour where only core (actions/*) actions bypass the --major flag. +func UpdateActions(allowMajor, verbose, disableReleaseBump bool) error { updateLog.Print("Starting action updates") if verbose { @@ -77,8 +79,9 @@ func UpdateActions(allowMajor, verbose bool) error { for key, entry := range actionsLock.Entries { updateLog.Printf("Checking action: %s@%s", entry.Repo, entry.Version) - // Core actions (actions/*) always update to the latest major version - effectiveAllowMajor := allowMajor || isCoreAction(entry.Repo) + // By default all actions are force-updated to the latest major version. + // When disableReleaseBump is set, only core actions (actions/*) bypass the --major flag. + effectiveAllowMajor := !disableReleaseBump || allowMajor || isCoreAction(entry.Repo) // Check for latest release latestVersion, latestSHA, err := getLatestActionRelease(entry.Repo, entry.Version, effectiveAllowMajor, verbose) @@ -467,10 +470,13 @@ func marshalActionsLockSorted(actionsLock *actionsLockFile) ([]byte, error) { return []byte(buf.String()), nil } -// actionRefPattern matches "uses: actions/repo@SHA-or-tag" in workflow files. +// actionRefPattern matches "uses: org/repo@SHA-or-tag" in workflow files for any org. +// Requires the org to start with an alphanumeric character and contain only alphanumeric, +// hyphens, or underscores (no dots, matching GitHub's org naming rules) to exclude local +// paths (e.g. "./..."). Repository names may additionally contain dots. // Captures: (1) indentation+uses prefix, (2) repo path, (3) SHA or version tag, // (4) optional version comment (e.g., "v6.0.2" from "# v6.0.2"), (5) trailing whitespace. -var actionRefPattern = regexp.MustCompile(`(uses:\s+)(actions/[a-zA-Z0-9_.-]+(?:/[a-zA-Z0-9_.-]+)*)@([a-fA-F0-9]{40}|[^\s#\n]+?)(\s*#\s*\S+)?(\s*)$`) +var actionRefPattern = regexp.MustCompile(`(uses:\s+)([a-zA-Z0-9][a-zA-Z0-9_-]*/[a-zA-Z0-9_.-]+(?:/[a-zA-Z0-9_.-]+)*)@([a-fA-F0-9]{40}|[^\s#\n]+?)(\s*#\s*\S+)?(\s*)$`) // getLatestActionReleaseFn is the function used to fetch the latest release for an action. // It can be replaced in tests to avoid network calls. @@ -483,10 +489,11 @@ type latestReleaseResult struct { } // UpdateActionsInWorkflowFiles scans all workflow .md files under workflowsDir -// (recursively) and updates any "uses: actions/*@version" references to the latest -// major version. Updated files are recompiled. Core actions (actions/*) always update -// to latest major. -func UpdateActionsInWorkflowFiles(workflowsDir, engineOverride string, verbose bool) error { +// (recursively) and updates any "uses: org/repo@version" references to the latest +// major version. Updated files are recompiled. By default all actions are updated to +// the latest major version; pass disableReleaseBump=true to only update core +// (actions/*) references. +func UpdateActionsInWorkflowFiles(workflowsDir, engineOverride string, verbose, disableReleaseBump bool) error { if workflowsDir == "" { workflowsDir = getWorkflowsDir() } @@ -514,7 +521,7 @@ func UpdateActionsInWorkflowFiles(workflowsDir, engineOverride string, verbose b return nil } - updated, newContent, err := updateActionRefsInContent(string(content), cache, verbose) + updated, newContent, err := updateActionRefsInContent(string(content), cache, !disableReleaseBump, verbose) if err != nil { if verbose { fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to update action refs in %s: %v", path, err))) @@ -552,10 +559,13 @@ func UpdateActionsInWorkflowFiles(workflowsDir, engineOverride string, verbose b return nil } -// updateActionRefsInContent replaces outdated "uses: actions/*@version" references +// updateActionRefsInContent replaces outdated "uses: org/repo@version" references // in content with the latest major version and SHA. Returns (changed, newContent, error). // cache is keyed by "repo@currentVersion" and avoids redundant API calls across lines/files. -func updateActionRefsInContent(content string, cache map[string]latestReleaseResult, verbose bool) (bool, string, error) { +// When allowMajor is true (the default), all matched actions are updated to the latest +// major version. When allowMajor is false (--disable-release-bump), non-core (non +// actions/*) action refs are skipped; core actions are always updated. +func updateActionRefsInContent(content string, cache map[string]latestReleaseResult, allowMajor, verbose bool) (bool, string, error) { changed := false lines := strings.Split(content, "\n") @@ -578,6 +588,12 @@ func updateActionRefsInContent(content string, cache map[string]latestReleaseRes trailing = line[match[10]:match[11]] } + // When release bumps are disabled, skip non-core (non actions/*) action refs. + effectiveAllowMajor := allowMajor || isCoreAction(repo) + if !effectiveAllowMajor { + continue + } + // Determine the "current version" to pass to getLatestActionReleaseFn isSHA := IsCommitSHA(ref) currentVersion := ref @@ -600,7 +616,7 @@ func updateActionRefsInContent(content string, cache map[string]latestReleaseRes cacheKey := repo + "|" + currentVersion result, cached := cache[cacheKey] if !cached { - latestVersion, latestSHA, err := getLatestActionReleaseFn(repo, currentVersion, true, verbose) + latestVersion, latestSHA, err := getLatestActionReleaseFn(repo, currentVersion, effectiveAllowMajor, verbose) if err != nil { updateLog.Printf("Failed to get latest release for %s: %v", repo, err) continue diff --git a/pkg/cli/update_actions_test.go b/pkg/cli/update_actions_test.go index b0da5e8146..21edb94c63 100644 --- a/pkg/cli/update_actions_test.go +++ b/pkg/cli/update_actions_test.go @@ -275,20 +275,20 @@ func TestIsCoreAction(t *testing.T) { } func TestUpdateActionRefsInContent_NonCoreActionsUnchanged(t *testing.T) { - // Non-actions/* org references should not be modified by updateActionRefsInContent - // since it only processes "uses: actions/" prefixed references. + // When allowMajor=false (--disable-release-bump), non-actions/* org references + // should not be modified because they are not core actions. input := `steps: - uses: docker/login-action@v3 - uses: github/codeql-action/upload-sarif@v3 - run: echo hello` cache := make(map[string]latestReleaseResult) - changed, newContent, err := updateActionRefsInContent(input, cache, false) + changed, newContent, err := updateActionRefsInContent(input, cache, false, false) if err != nil { t.Fatalf("updateActionRefsInContent() error = %v", err) } if changed { - t.Errorf("updateActionRefsInContent() changed = true, want false for non-actions/* refs") + t.Errorf("updateActionRefsInContent() changed = true, want false for non-actions/* refs with allowMajor=false") } if newContent != input { t.Errorf("updateActionRefsInContent() modified content for non-actions/* refs\nGot: %s\nWant: %s", newContent, input) @@ -302,7 +302,7 @@ steps: - run: echo world` cache := make(map[string]latestReleaseResult) - changed, _, err := updateActionRefsInContent(input, cache, false) + changed, _, err := updateActionRefsInContent(input, cache, true, false) if err != nil { t.Fatalf("updateActionRefsInContent() error = %v", err) } @@ -338,7 +338,7 @@ func TestUpdateActionRefsInContent_VersionTagReplacement(t *testing.T) { - run: echo hello` cache := make(map[string]latestReleaseResult) - changed, got, err := updateActionRefsInContent(input, cache, false) + changed, got, err := updateActionRefsInContent(input, cache, true, false) if err != nil { t.Fatalf("updateActionRefsInContent() error = %v", err) } @@ -365,7 +365,7 @@ func TestUpdateActionRefsInContent_SHAPinnedReplacement(t *testing.T) { want := " uses: actions/checkout@" + newSHA + " # v6.0.2" cache := make(map[string]latestReleaseResult) - changed, got, err := updateActionRefsInContent(input, cache, false) + changed, got, err := updateActionRefsInContent(input, cache, true, false) if err != nil { t.Fatalf("updateActionRefsInContent() error = %v", err) } @@ -394,7 +394,7 @@ func TestUpdateActionRefsInContent_CacheReusedAcrossLines(t *testing.T) { - uses: actions/github-script@v7` cache := make(map[string]latestReleaseResult) - changed, _, err := updateActionRefsInContent(input, cache, false) + changed, _, err := updateActionRefsInContent(input, cache, true, false) if err != nil { t.Fatalf("updateActionRefsInContent() error = %v", err) } @@ -405,3 +405,41 @@ func TestUpdateActionRefsInContent_CacheReusedAcrossLines(t *testing.T) { t.Errorf("getLatestActionReleaseFn called %d times, want 1 (cache should prevent second call)", callCount) } } + +func TestUpdateActionRefsInContent_AllOrgsUpdatedWhenAllowMajor(t *testing.T) { + // With allowMajor=true (default behaviour), non-actions/* org references should + // also be updated to the latest major version. + orig := getLatestActionReleaseFn + defer func() { getLatestActionReleaseFn = orig }() + + getLatestActionReleaseFn = func(repo, currentVersion string, allowMajor, verbose bool) (string, string, error) { + switch repo { + case "docker/login-action": + return "v4", "newsha11234567890123456789012345678901234", nil + case "github/codeql-action": + return "v4", "newsha21234567890123456789012345678901234", nil + default: + return currentVersion, "", nil + } + } + + input := `steps: + - uses: docker/login-action@v3 + - uses: github/codeql-action@v3` + + want := `steps: + - uses: docker/login-action@v4 + - uses: github/codeql-action@v4` + + cache := make(map[string]latestReleaseResult) + changed, got, err := updateActionRefsInContent(input, cache, true, false) + if err != nil { + t.Fatalf("updateActionRefsInContent() error = %v", err) + } + if !changed { + t.Error("updateActionRefsInContent() changed = false, want true") + } + if got != want { + t.Errorf("updateActionRefsInContent() output mismatch\nGot:\n%s\nWant:\n%s", got, want) + } +} diff --git a/pkg/cli/update_command.go b/pkg/cli/update_command.go index b6c5a0fe35..e14edae860 100644 --- a/pkg/cli/update_command.go +++ b/pkg/cli/update_command.go @@ -43,6 +43,7 @@ Examples: ` + string(constants.CLIExtensionPrefix) + ` update --no-merge # Override local changes with upstream ` + string(constants.CLIExtensionPrefix) + ` update repo-assist --major # Allow major version updates ` + string(constants.CLIExtensionPrefix) + ` update --force # Force update even if no changes + ` + string(constants.CLIExtensionPrefix) + ` update --disable-release-bump # Update without force-bumping all action versions ` + string(constants.CLIExtensionPrefix) + ` update --dir custom/workflows # Update workflows in custom directory`, RunE: func(cmd *cobra.Command, args []string) error { majorFlag, _ := cmd.Flags().GetBool("major") @@ -53,12 +54,13 @@ Examples: noStopAfter, _ := cmd.Flags().GetBool("no-stop-after") stopAfter, _ := cmd.Flags().GetString("stop-after") noMergeFlag, _ := cmd.Flags().GetBool("no-merge") + disableReleaseBump, _ := cmd.Flags().GetBool("disable-release-bump") if err := validateEngine(engineOverride); err != nil { return err } - return RunUpdateWorkflows(args, majorFlag, forceFlag, verbose, engineOverride, workflowDir, noStopAfter, stopAfter, noMergeFlag) + return RunUpdateWorkflows(args, majorFlag, forceFlag, verbose, engineOverride, workflowDir, noStopAfter, stopAfter, noMergeFlag, disableReleaseBump) }, } @@ -69,6 +71,7 @@ Examples: cmd.Flags().Bool("no-stop-after", false, "Remove any stop-after field from the workflow") cmd.Flags().String("stop-after", "", "Override stop-after value in the workflow (e.g., '+48h', '2025-12-31 23:59:59')") cmd.Flags().Bool("no-merge", false, "Override local changes with upstream version instead of merging") + cmd.Flags().Bool("disable-release-bump", false, "Disable automatic major version bumps for all actions (only core actions/* are force-updated)") // Register completions for update command cmd.ValidArgsFunction = CompleteWorkflowNames @@ -80,8 +83,8 @@ Examples: // RunUpdateWorkflows updates workflows from their source repositories. // Each workflow is compiled immediately after update. -func RunUpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, engineOverride string, workflowsDir string, noStopAfter bool, stopAfter string, noMerge bool) error { - updateLog.Printf("Starting update process: workflows=%v, allowMajor=%v, force=%v, noMerge=%v", workflowNames, allowMajor, force, noMerge) +func RunUpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, engineOverride string, workflowsDir string, noStopAfter bool, stopAfter string, noMerge bool, disableReleaseBump bool) error { + updateLog.Printf("Starting update process: workflows=%v, allowMajor=%v, force=%v, noMerge=%v, disableReleaseBump=%v", workflowNames, allowMajor, force, noMerge, disableReleaseBump) var firstErr error @@ -90,15 +93,16 @@ func RunUpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, } // Update GitHub Actions versions in actions-lock.json. - // Core actions (actions/*) are always updated to the latest major version. - if err := UpdateActions(allowMajor, verbose); err != nil { + // By default all actions are updated to the latest major version. + // Pass --disable-release-bump to revert to only forcing updates for core (actions/*) actions. + if err := UpdateActions(allowMajor, verbose, disableReleaseBump); err != nil { // Non-fatal: warn but don't fail the update fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to update actions-lock.json: %v", err))) } // Update action references in user-provided steps within workflow .md files. - // This covers both generated and hand-written steps that reference actions/*. - if err := UpdateActionsInWorkflowFiles(workflowsDir, engineOverride, verbose); err != nil { + // By default all org/repo@version references are updated to the latest major version. + if err := UpdateActionsInWorkflowFiles(workflowsDir, engineOverride, verbose, disableReleaseBump); err != nil { // Non-fatal: warn but don't fail the update fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to update action references in workflow files: %v", err))) } diff --git a/pkg/cli/update_command_test.go b/pkg/cli/update_command_test.go index b1486a3ace..03414c4eb3 100644 --- a/pkg/cli/update_command_test.go +++ b/pkg/cli/update_command_test.go @@ -813,7 +813,7 @@ func TestUpdateActions_NoFile(t *testing.T) { os.Chdir(tmpDir) // Should not error when file doesn't exist - err := UpdateActions(false, false) + err := UpdateActions(false, false, false) if err != nil { t.Errorf("Expected no error when actions-lock.json doesn't exist, got: %v", err) } @@ -844,7 +844,7 @@ func TestUpdateActions_EmptyFile(t *testing.T) { os.Chdir(tmpDir) // Should not error with empty file - err := UpdateActions(false, false) + err := UpdateActions(false, false, false) if err != nil { t.Errorf("Expected no error with empty actions-lock.json, got: %v", err) } @@ -873,7 +873,7 @@ func TestUpdateActions_InvalidJSON(t *testing.T) { os.Chdir(tmpDir) // Should error with invalid JSON - err := UpdateActions(false, false) + err := UpdateActions(false, false, false) if err == nil { t.Error("Expected error with invalid JSON, got nil") } @@ -980,7 +980,7 @@ func TestRunUpdateWorkflows_NoSourceWorkflows(t *testing.T) { os.Chdir(tmpDir) // Running update with no source workflows should succeed with an info message, not an error - err := RunUpdateWorkflows(nil, false, false, false, "", "", false, "", false) + err := RunUpdateWorkflows(nil, false, false, false, "", "", false, "", false, false) assert.NoError(t, err, "Should not error when no workflows with source field exist") } @@ -996,7 +996,7 @@ func TestRunUpdateWorkflows_SpecificWorkflowNotFound(t *testing.T) { os.Chdir(tmpDir) // Running update with a specific name that doesn't exist should fail - err := RunUpdateWorkflows([]string{"nonexistent"}, false, false, false, "", "", false, "", false) + err := RunUpdateWorkflows([]string{"nonexistent"}, false, false, false, "", "", false, "", false, false) require.Error(t, err, "Should error when specified workflow not found") assert.Contains(t, err.Error(), "no workflows found matching the specified names") } diff --git a/pkg/cli/upgrade_command.go b/pkg/cli/upgrade_command.go index 41d15bf763..3bb9982da4 100644 --- a/pkg/cli/upgrade_command.go +++ b/pkg/cli/upgrade_command.go @@ -189,7 +189,7 @@ func runUpgradeCommand(verbose bool, workflowDir string, noFix bool, noCompile b fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Updating GitHub Actions versions...")) upgradeLog.Print("Updating GitHub Actions versions") - if err := UpdateActions(false, verbose); err != nil { + if err := UpdateActions(false, verbose, false); err != nil { upgradeLog.Printf("Failed to update actions: %v", err) // Don't fail the upgrade if action updates fail - this is non-critical fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to update actions: %v", err))) diff --git a/pkg/cli/validation_output.go b/pkg/cli/validation_output.go deleted file mode 100644 index f27ad3663b..0000000000 --- a/pkg/cli/validation_output.go +++ /dev/null @@ -1,54 +0,0 @@ -package cli - -import ( - "fmt" - "os" - - "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" -) - -var validationOutputLog = logger.New("cli:validation_output") - -// FormatValidationError formats validation errors for console output -// Preserves structured error content while applying console styling -// -// This function bridges the gap between pure validation logic (plain text errors) -// and CLI presentation layer (styled console output). By keeping validation errors -// as plain text at the validation layer, we maintain testability and reusability -// while providing consistent styled output in CLI contexts. -// -// The function handles both simple single-line errors and complex multi-line -// structured errors (like GitHubToolsetValidationError) by applying console -// formatting to preserve the error structure and readability. -func FormatValidationError(err error) string { - if err == nil { - return "" - } - - errMsg := err.Error() - validationOutputLog.Printf("Formatting validation error: %s", errMsg) - - // Apply console formatting to the entire error message - // This preserves structured multi-line errors while adding visual styling - return console.FormatErrorMessage(errMsg) -} - -// PrintValidationError prints a validation error to stderr with console formatting -// -// This is a convenience helper that combines formatting and printing in one call. -// All validation errors should be printed using this function to ensure consistent -// styling across the CLI. -// -// Example usage: -// -// if err := ValidateWorkflow(config); err != nil { -// PrintValidationError(err) -// return err -// } -func PrintValidationError(err error) { - if err == nil { - return - } - fmt.Fprintln(os.Stderr, FormatValidationError(err)) -} diff --git a/pkg/cli/validation_output_test.go b/pkg/cli/validation_output_test.go deleted file mode 100644 index 167e36bf0e..0000000000 --- a/pkg/cli/validation_output_test.go +++ /dev/null @@ -1,234 +0,0 @@ -//go:build !integration - -package cli - -import ( - "errors" - "strings" - "testing" - - "github.com/github/gh-aw/pkg/workflow" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// TestFormatValidationError verifies that validation errors are formatted with console styling -func TestFormatValidationError(t *testing.T) { - tests := []struct { - name string - err error - expectEmpty bool - mustContain []string - mustNotChange string // Content that must be preserved - }{ - { - name: "nil error returns empty string", - err: nil, - expectEmpty: true, - }, - { - name: "simple single-line error", - err: errors.New("missing required field 'engine'"), - expectEmpty: false, - mustContain: []string{ - "missing required field 'engine'", - }, - mustNotChange: "missing required field 'engine'", - }, - { - name: "error with example", - err: errors.New("invalid engine: unknown. Valid engines are: copilot, claude, codex, custom. Example: engine: copilot"), - expectEmpty: false, - mustContain: []string{ - "invalid engine", - "Valid engines are", - "Example:", - }, - mustNotChange: "invalid engine: unknown. Valid engines are: copilot, claude, codex, custom. Example: engine: copilot", - }, - { - name: "multi-line error", - err: errors.New(`invalid configuration: - field 'engine' is required - field 'on' is missing`), - expectEmpty: false, - mustContain: []string{ - "invalid configuration", - "field 'engine' is required", - "field 'on' is missing", - }, - }, - { - name: "structured validation error (GitHubToolsetValidationError)", - err: workflow.NewGitHubToolsetValidationError(map[string][]string{ - "issues": {"list_issues", "create_issue"}, - }), - expectEmpty: false, - mustContain: []string{ - "ERROR", - "issues", - "list_issues", - "create_issue", - "Suggested fix", - }, - }, - { - name: "error with formatting characters", - err: errors.New("path must be relative, got: /absolute/path"), - mustContain: []string{ - "path must be relative", - "/absolute/path", - }, - mustNotChange: "path must be relative, got: /absolute/path", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatValidationError(tt.err) - - if tt.expectEmpty { - assert.Empty(t, result, "Expected empty string for nil error") - return - } - - // Verify content is preserved - if tt.mustNotChange != "" { - assert.Contains(t, result, tt.mustNotChange, - "Formatted error must contain original error message") - } - - // Verify all required content is present - for _, expected := range tt.mustContain { - assert.Contains(t, result, expected, - "Formatted error must contain: %s", expected) - } - - // Verify formatting is applied (should not be identical to plain error) - if tt.err != nil && !tt.expectEmpty { - plainMsg := tt.err.Error() - // The formatted message should be longer (due to ANSI codes or prefix) - // or at minimum have the error symbol prefix - if result == plainMsg { - t.Errorf("Expected formatting to be applied, but result matches plain error.\nPlain: %s\nFormatted: %s", - plainMsg, result) - } - } - }) - } -} - -// TestPrintValidationError verifies that PrintValidationError outputs to stderr -// Note: This is a smoke test to ensure the function doesn't panic -func TestPrintValidationError(t *testing.T) { - tests := []struct { - name string - err error - }{ - { - name: "nil error does not panic", - err: nil, - }, - { - name: "simple error does not panic", - err: errors.New("test error"), - }, - { - name: "complex structured error does not panic", - err: workflow.NewGitHubToolsetValidationError(map[string][]string{ - "repos": {"get_repository"}, - }), - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // This test ensures PrintValidationError doesn't panic - // Actual output testing would require capturing stderr - require.NotPanics(t, func() { - PrintValidationError(tt.err) - }, "PrintValidationError should not panic") - }) - } -} - -// TestFormatValidationErrorPreservesStructure verifies that multi-line errors maintain their structure -func TestFormatValidationErrorPreservesStructure(t *testing.T) { - // Create a structured error with multiple lines and sections - structuredErr := workflow.NewGitHubToolsetValidationError(map[string][]string{ - "issues": {"list_issues", "create_issue"}, - "actions": {"list_workflows"}, - }) - - result := FormatValidationError(structuredErr) - - // Verify structure is preserved - require.NotEmpty(t, result, "Result should not be empty") - - // Verify line breaks are maintained (multi-line error) - assert.Contains(t, result, "\n", "Multi-line structure should be preserved") - - // Verify all sections are present - sections := []string{ - "ERROR", - "actions", - "issues", - "list_workflows", - "list_issues", - "create_issue", - "Suggested fix", - "toolsets:", - } - - for _, section := range sections { - assert.Contains(t, result, section, - "Structured error should contain section: %s", section) - } - - // Verify the error message contains the original structured content - originalMsg := structuredErr.Error() - lines := strings.SplitSeq(originalMsg, "\n") - for line := range lines { - if strings.TrimSpace(line) != "" { - assert.Contains(t, result, strings.TrimSpace(line), - "Structured error should preserve line: %s", line) - } - } -} - -// TestFormatValidationErrorContentIntegrity verifies that formatting doesn't alter error content -func TestFormatValidationErrorContentIntegrity(t *testing.T) { - errorMessages := []string{ - "simple error", - "error with special chars: @#$%^&*()", - "error with path: /home/user/file.txt", - "error with URL: https://example.com", - "error with code snippet: engine: copilot", - "multi\nline\nerror\nwith\nbreaks", - "error with numbers: 123 456 789", - "error with quotes: 'single' and \"double\"", - } - - for _, msg := range errorMessages { - t.Run("content_integrity_"+strings.ReplaceAll(msg, "\n", "_"), func(t *testing.T) { - err := errors.New(msg) - result := FormatValidationError(err) - - // Verify the original message content is present in the result - assert.Contains(t, result, msg, - "Formatted error must preserve original content") - - // Verify no content is lost or corrupted - // The formatted version should contain at least as many meaningful characters - originalLength := len(strings.TrimSpace(msg)) - // Remove common ANSI codes to get actual content length - cleanResult := strings.ReplaceAll(result, "\033[", "") - cleanResult = strings.ReplaceAll(cleanResult, "\x1b[", "") - - if len(cleanResult) < originalLength { - t.Errorf("Formatting appears to have removed content. Original: %d chars, Result: %d chars", - originalLength, len(cleanResult)) - } - }) - } -} diff --git a/pkg/console/form.go b/pkg/console/form.go deleted file mode 100644 index 91078e0e0e..0000000000 --- a/pkg/console/form.go +++ /dev/null @@ -1,122 +0,0 @@ -//go:build !js && !wasm - -package console - -import ( - "errors" - "fmt" - - "github.com/charmbracelet/huh" - "github.com/github/gh-aw/pkg/tty" -) - -// RunForm executes a multi-field form with validation -// This is a higher-level helper that creates a form with multiple fields -func RunForm(fields []FormField) error { - // Validate inputs first before checking TTY - if len(fields) == 0 { - return errors.New("no form fields provided") - } - - // Validate field configurations before checking TTY - for _, field := range fields { - if field.Type == "select" && len(field.Options) == 0 { - return fmt.Errorf("select field '%s' requires options", field.Title) - } - if field.Type != "input" && field.Type != "password" && field.Type != "confirm" && field.Type != "select" { - return fmt.Errorf("unknown field type: %s", field.Type) - } - } - - // Check if stdin is a TTY - if not, we can't show interactive forms - if !tty.IsStderrTerminal() { - return errors.New("interactive forms not available (not a TTY)") - } - - // Build form fields - var huhFields []huh.Field - for _, field := range fields { - switch field.Type { - case "input": - inputField := huh.NewInput(). - Title(field.Title). - Description(field.Description). - Placeholder(field.Placeholder) - - if field.Validate != nil { - inputField.Validate(field.Validate) - } - - // Type assert to *string - if strPtr, ok := field.Value.(*string); ok { - inputField.Value(strPtr) - } else { - return fmt.Errorf("input field '%s' requires *string value", field.Title) - } - - huhFields = append(huhFields, inputField) - - case "password": - passwordField := huh.NewInput(). - Title(field.Title). - Description(field.Description). - EchoMode(huh.EchoModePassword) - - if field.Validate != nil { - passwordField.Validate(field.Validate) - } - - // Type assert to *string - if strPtr, ok := field.Value.(*string); ok { - passwordField.Value(strPtr) - } else { - return fmt.Errorf("password field '%s' requires *string value", field.Title) - } - - huhFields = append(huhFields, passwordField) - - case "confirm": - confirmField := huh.NewConfirm(). - Title(field.Title) - - // Type assert to *bool - if boolPtr, ok := field.Value.(*bool); ok { - confirmField.Value(boolPtr) - } else { - return fmt.Errorf("confirm field '%s' requires *bool value", field.Title) - } - - huhFields = append(huhFields, confirmField) - - case "select": - selectField := huh.NewSelect[string](). - Title(field.Title). - Description(field.Description) - - // Convert options to huh.Option format - huhOptions := make([]huh.Option[string], len(field.Options)) - for i, opt := range field.Options { - huhOptions[i] = huh.NewOption(opt.Label, opt.Value) - } - selectField.Options(huhOptions...) - - // Type assert to *string - if strPtr, ok := field.Value.(*string); ok { - selectField.Value(strPtr) - } else { - return fmt.Errorf("select field '%s' requires *string value", field.Title) - } - - huhFields = append(huhFields, selectField) - - default: - } - } - - // Create and run the form - form := huh.NewForm( - huh.NewGroup(huhFields...), - ).WithAccessible(IsAccessibleMode()) - - return form.Run() -} diff --git a/pkg/console/form_test.go b/pkg/console/form_test.go deleted file mode 100644 index 64efed30b9..0000000000 --- a/pkg/console/form_test.go +++ /dev/null @@ -1,169 +0,0 @@ -//go:build !integration - -package console - -import ( - "errors" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestRunForm(t *testing.T) { - t.Run("function signature", func(t *testing.T) { - // Verify the function exists and has the right signature - _ = RunForm - }) - - t.Run("requires fields", func(t *testing.T) { - fields := []FormField{} - - err := RunForm(fields) - require.Error(t, err, "Should error with no fields") - assert.Contains(t, err.Error(), "no form fields", "Error should mention missing fields") - }) - - t.Run("validates input field", func(t *testing.T) { - var name string - fields := []FormField{ - { - Type: "input", - Title: "Name", - Description: "Enter your name", - Value: &name, - }, - } - - err := RunForm(fields) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) - - t.Run("validates password field", func(t *testing.T) { - var password string - fields := []FormField{ - { - Type: "password", - Title: "Password", - Description: "Enter password", - Value: &password, - }, - } - - err := RunForm(fields) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) - - t.Run("validates confirm field", func(t *testing.T) { - var confirmed bool - fields := []FormField{ - { - Type: "confirm", - Title: "Confirm action", - Value: &confirmed, - }, - } - - err := RunForm(fields) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) - - t.Run("validates select field with options", func(t *testing.T) { - var selected string - fields := []FormField{ - { - Type: "select", - Title: "Choose option", - Description: "Select one", - Value: &selected, - Options: []SelectOption{ - {Label: "Option 1", Value: "opt1"}, - {Label: "Option 2", Value: "opt2"}, - }, - }, - } - - err := RunForm(fields) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) - - t.Run("rejects select field without options", func(t *testing.T) { - var selected string - fields := []FormField{ - { - Type: "select", - Title: "Choose option", - Value: &selected, - Options: []SelectOption{}, - }, - } - - err := RunForm(fields) - require.Error(t, err, "Should error with no options") - assert.Contains(t, err.Error(), "requires options", "Error should mention missing options") - }) - - t.Run("rejects unknown field type", func(t *testing.T) { - var value string - fields := []FormField{ - { - Type: "unknown", - Title: "Test", - Value: &value, - }, - } - - err := RunForm(fields) - require.Error(t, err, "Should error with unknown field type") - assert.Contains(t, err.Error(), "unknown field type", "Error should mention unknown type") - }) - - t.Run("validates input field with custom validator", func(t *testing.T) { - var name string - fields := []FormField{ - { - Type: "input", - Title: "Name", - Description: "Enter your name", - Value: &name, - Validate: func(s string) error { - if len(s) < 3 { - return errors.New("must be at least 3 characters") - } - return nil - }, - }, - } - - err := RunForm(fields) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) -} - -func TestFormField(t *testing.T) { - t.Run("struct creation", func(t *testing.T) { - var value string - field := FormField{ - Type: "input", - Title: "Test Field", - Description: "Test Description", - Placeholder: "Enter value", - Value: &value, - } - - assert.Equal(t, "input", field.Type, "Type should match") - assert.Equal(t, "Test Field", field.Title, "Title should match") - assert.Equal(t, "Test Description", field.Description, "Description should match") - assert.Equal(t, "Enter value", field.Placeholder, "Placeholder should match") - }) -} diff --git a/pkg/console/golden_test.go b/pkg/console/golden_test.go index 0c2acc0413..648da3cf65 100644 --- a/pkg/console/golden_test.go +++ b/pkg/console/golden_test.go @@ -7,9 +7,7 @@ import ( "strings" "testing" - "github.com/charmbracelet/lipgloss" "github.com/charmbracelet/x/exp/golden" - "github.com/github/gh-aw/pkg/styles" ) // TestGolden_TableRendering tests table rendering with different configurations @@ -132,36 +130,6 @@ func TestGolden_BoxRendering(t *testing.T) { } // TestGolden_LayoutBoxRendering tests layout box rendering (returns string) -func TestGolden_LayoutBoxRendering(t *testing.T) { - tests := []struct { - name string - title string - width int - }{ - { - name: "layout_narrow", - title: "Test", - width: 30, - }, - { - name: "layout_medium", - title: "Trial Execution Plan", - width: 60, - }, - { - name: "layout_wide", - title: "GitHub Agentic Workflows Compilation Report", - width: 100, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutTitleBox(tt.title, tt.width) - golden.RequireEqual(t, []byte(output)) - }) - } -} // TestGolden_TreeRendering tests tree rendering with different hierarchies func TestGolden_TreeRendering(t *testing.T) { @@ -467,94 +435,8 @@ func TestGolden_MessageFormatting(t *testing.T) { } // TestGolden_LayoutComposition tests composing multiple layout elements -func TestGolden_LayoutComposition(t *testing.T) { - tests := []struct { - name string - sections func() []string - }{ - { - name: "title_and_info", - sections: func() []string { - return []string{ - LayoutTitleBox("Trial Execution Plan", 60), - "", - LayoutInfoSection("Workflow", "test-workflow"), - LayoutInfoSection("Status", "Ready"), - } - }, - }, - { - name: "complete_composition", - sections: func() []string { - return []string{ - LayoutTitleBox("Trial Execution Plan", 60), - "", - LayoutInfoSection("Workflow", "test-workflow"), - LayoutInfoSection("Status", "Ready"), - "", - LayoutEmphasisBox("⚠️ WARNING: Large workflow file", styles.ColorWarning), - } - }, - }, - { - name: "multiple_emphasis_boxes", - sections: func() []string { - return []string{ - LayoutEmphasisBox("✓ Success", styles.ColorSuccess), - "", - LayoutEmphasisBox("⚠️ Warning", styles.ColorWarning), - "", - LayoutEmphasisBox("✗ Error", styles.ColorError), - } - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - sections := tt.sections() - output := LayoutJoinVertical(sections...) - golden.RequireEqual(t, []byte(output)) - }) - } -} // TestGolden_LayoutEmphasisBox tests emphasis boxes with different colors -func TestGolden_LayoutEmphasisBox(t *testing.T) { - tests := []struct { - name string - content string - color lipgloss.AdaptiveColor - }{ - { - name: "error_box", - content: "✗ ERROR: Compilation failed", - color: styles.ColorError, - }, - { - name: "warning_box", - content: "⚠️ WARNING: Deprecated syntax", - color: styles.ColorWarning, - }, - { - name: "success_box", - content: "✓ SUCCESS: All tests passed", - color: styles.ColorSuccess, - }, - { - name: "info_box", - content: "ℹ INFO: Processing workflow", - color: styles.ColorInfo, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutEmphasisBox(tt.content, tt.color) - golden.RequireEqual(t, []byte(output)) - }) - } -} // TestGolden_InfoSection tests info section rendering func TestGolden_InfoSection(t *testing.T) { diff --git a/pkg/console/layout.go b/pkg/console/layout.go deleted file mode 100644 index 3ea85a5597..0000000000 --- a/pkg/console/layout.go +++ /dev/null @@ -1,162 +0,0 @@ -//go:build !js && !wasm - -// Package console provides layout composition helpers for creating styled CLI output with Lipgloss. -// -// # Layout Composition Helpers -// -// The layout package provides reusable helper functions for common Lipgloss layout patterns. -// These helpers automatically respect TTY detection and provide both styled (TTY) and plain text -// (non-TTY) output modes. -// -// # Usage Example -// -// Here's a complete example showing how to compose a styled CLI output: -// -// import ( -// "fmt" -// "os" -// "github.com/github/gh-aw/pkg/console" -// "github.com/github/gh-aw/pkg/styles" -// ) -// -// // Create layout elements -// title := console.LayoutTitleBox("Trial Execution Plan", 60) -// info1 := console.LayoutInfoSection("Workflow", "test-workflow") -// info2 := console.LayoutInfoSection("Status", "Ready") -// warning := console.LayoutEmphasisBox("⚠️ WARNING: Large workflow file", styles.ColorWarning) -// -// // Compose sections vertically with spacing -// output := console.LayoutJoinVertical(title, "", info1, info2, "", warning) -// fmt.Fprintln(os.Stderr, output) -// -// # TTY Detection -// -// All layout helpers automatically detect whether output is going to a terminal (TTY) or being -// piped/redirected. In TTY mode, they use Lipgloss styling with colors and borders. In non-TTY -// mode, they output plain text suitable for parsing or logging. -// -// # Available Helpers -// -// - LayoutTitleBox: Centered title with double border -// - LayoutInfoSection: Info section with left border emphasis -// - LayoutEmphasisBox: Thick-bordered box with custom color -// - LayoutJoinVertical: Composes sections with automatic spacing -// -// # Comparison with Existing Functions -// -// These helpers complement the existing RenderTitleBox, RenderInfoSection, and -// RenderComposedSections functions in console.go. The key differences: -// -// - Layout helpers return strings instead of []string for simpler composition -// - LayoutInfoSection takes separate label and value parameters -// - LayoutEmphasisBox provides custom color support with thick borders -// - Layout helpers are designed for inline composition and chaining -package console - -import ( - "strings" - - "github.com/charmbracelet/lipgloss" - "github.com/github/gh-aw/pkg/styles" - "github.com/github/gh-aw/pkg/tty" -) - -// LayoutTitleBox renders a title with a double border box as a single string. -// In TTY mode, uses Lipgloss styled box centered with the Info color scheme. -// In non-TTY mode, renders plain text with separator lines. -// This is a simpler alternative to RenderTitleBox that returns a string instead of []string. -// -// Example: -// -// title := console.LayoutTitleBox("Trial Execution Plan", 60) -// fmt.Fprintln(os.Stderr, title) -func LayoutTitleBox(title string, width int) string { - if tty.IsStderrTerminal() { - // TTY mode: Use Lipgloss styled box - box := lipgloss.NewStyle(). - Bold(true). - Foreground(styles.ColorInfo). - Border(lipgloss.DoubleBorder(), true, false). - Padding(0, 2). - Width(width). - Align(lipgloss.Center). - Render(title) - return box - } - - // Non-TTY mode: Plain text with separators - separator := strings.Repeat("=", width) - return separator + "\n " + title + "\n" + separator -} - -// LayoutInfoSection renders an info section with left border emphasis as a single string. -// In TTY mode, uses Lipgloss styled section with left border and padding. -// In non-TTY mode, adds manual indentation. -// This is a simpler alternative to RenderInfoSection that returns a string and takes label/value. -// -// Example: -// -// info := console.LayoutInfoSection("Workflow", "test-workflow") -// fmt.Fprintln(os.Stderr, info) -func LayoutInfoSection(label, value string) string { - content := label + ": " + value - - if tty.IsStderrTerminal() { - // TTY mode: Use Lipgloss styled section with left border and padding - section := lipgloss.NewStyle(). - Border(lipgloss.NormalBorder(), false, false, false, true). - BorderForeground(styles.ColorInfo). - PaddingLeft(2). - Render(content) - return section - } - - // Non-TTY mode: Add manual indentation - return " " + content -} - -// LayoutEmphasisBox renders content in a rounded-bordered box with custom color. -// In TTY mode, uses Lipgloss styled box with rounded border for a polished appearance. -// In non-TTY mode, renders content with surrounding marker lines. -// -// Example: -// -// warning := console.LayoutEmphasisBox("⚠️ WARNING: Large workflow", styles.ColorWarning) -// fmt.Fprintln(os.Stderr, warning) -func LayoutEmphasisBox(content string, color lipgloss.AdaptiveColor) string { - if tty.IsStderrTerminal() { - // TTY mode: Use Lipgloss styled box with rounded border for a softer appearance - box := lipgloss.NewStyle(). - Bold(true). - Foreground(color). - Border(styles.RoundedBorder). - BorderForeground(color). - Padding(0, 2). - Render(content) - return box - } - - // Non-TTY mode: Content with marker lines - marker := strings.Repeat("!", len(content)+4) - return marker + "\n " + content + "\n" + marker -} - -// LayoutJoinVertical composes sections vertically with automatic spacing. -// In TTY mode, uses lipgloss.JoinVertical for proper composition. -// In non-TTY mode, joins sections with newlines. -// -// Example: -// -// title := console.LayoutTitleBox("Plan", 60) -// info := console.LayoutInfoSection("Status", "Ready") -// output := console.LayoutJoinVertical(title, info) -// fmt.Fprintln(os.Stderr, output) -func LayoutJoinVertical(sections ...string) string { - if tty.IsStderrTerminal() { - // TTY mode: Use Lipgloss to compose sections vertically - return lipgloss.JoinVertical(lipgloss.Left, sections...) - } - - // Non-TTY mode: Join with newlines - return strings.Join(sections, "\n") -} diff --git a/pkg/console/layout_test.go b/pkg/console/layout_test.go deleted file mode 100644 index 6360c99d06..0000000000 --- a/pkg/console/layout_test.go +++ /dev/null @@ -1,383 +0,0 @@ -//go:build !integration - -package console - -import ( - "strings" - "testing" - - "github.com/charmbracelet/lipgloss" - "github.com/github/gh-aw/pkg/styles" -) - -func TestLayoutTitleBox(t *testing.T) { - tests := []struct { - name string - title string - width int - expected []string // Substrings that should be present in output - }{ - { - name: "basic title", - title: "Test Title", - width: 40, - expected: []string{ - "Test Title", - }, - }, - { - name: "longer title", - title: "Trial Execution Plan", - width: 80, - expected: []string{ - "Trial Execution Plan", - }, - }, - { - name: "title with special characters", - title: "⚠️ Important Notice", - width: 60, - expected: []string{ - "⚠️ Important Notice", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutTitleBox(tt.title, tt.width) - - // Check that output is not empty - if output == "" { - t.Error("LayoutTitleBox() returned empty string") - } - - // Check that title appears in output - for _, expected := range tt.expected { - if !strings.Contains(output, expected) { - t.Errorf("LayoutTitleBox() output missing expected string '%s'\nGot:\n%s", expected, output) - } - } - }) - } -} - -func TestLayoutInfoSection(t *testing.T) { - tests := []struct { - name string - label string - value string - expected []string // Substrings that should be present in output - }{ - { - name: "simple label and value", - label: "Workflow", - value: "test-workflow", - expected: []string{ - "Workflow", - "test-workflow", - }, - }, - { - name: "status label", - label: "Status", - value: "Active", - expected: []string{ - "Status", - "Active", - }, - }, - { - name: "file path value", - label: "Location", - value: "/path/to/file", - expected: []string{ - "Location", - "/path/to/file", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutInfoSection(tt.label, tt.value) - - // Check that output is not empty - if output == "" { - t.Error("LayoutInfoSection() returned empty string") - } - - // Check that expected strings appear in output - for _, expected := range tt.expected { - if !strings.Contains(output, expected) { - t.Errorf("LayoutInfoSection() output missing expected string '%s'\nGot:\n%s", expected, output) - } - } - }) - } -} - -func TestLayoutEmphasisBox(t *testing.T) { - tests := []struct { - name string - content string - color lipgloss.AdaptiveColor - expected []string // Substrings that should be present in output - }{ - { - name: "warning message", - content: "⚠️ WARNING", - color: styles.ColorWarning, - expected: []string{ - "⚠️ WARNING", - }, - }, - { - name: "error message", - content: "✗ ERROR: Failed", - color: styles.ColorError, - expected: []string{ - "✗ ERROR: Failed", - }, - }, - { - name: "success message", - content: "✓ Success", - color: styles.ColorSuccess, - expected: []string{ - "✓ Success", - }, - }, - { - name: "info message", - content: "ℹ Information", - color: styles.ColorInfo, - expected: []string{ - "ℹ Information", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutEmphasisBox(tt.content, tt.color) - - // Check that output is not empty - if output == "" { - t.Error("LayoutEmphasisBox() returned empty string") - } - - // Check that content appears in output - for _, expected := range tt.expected { - if !strings.Contains(output, expected) { - t.Errorf("LayoutEmphasisBox() output missing expected string '%s'\nGot:\n%s", expected, output) - } - } - }) - } -} - -func TestLayoutJoinVertical(t *testing.T) { - tests := []struct { - name string - sections []string - expected []string // Substrings that should be present in output - }{ - { - name: "single section", - sections: []string{"Section 1"}, - expected: []string{"Section 1"}, - }, - { - name: "multiple sections", - sections: []string{"Section 1", "Section 2", "Section 3"}, - expected: []string{ - "Section 1", - "Section 2", - "Section 3", - }, - }, - { - name: "sections with empty strings", - sections: []string{"Section 1", "", "Section 2"}, - expected: []string{ - "Section 1", - "Section 2", - }, - }, - { - name: "empty sections", - sections: []string{}, - expected: []string{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutJoinVertical(tt.sections...) - - // For empty sections, output should be empty - if len(tt.sections) == 0 { - if output != "" { - t.Errorf("LayoutJoinVertical() expected empty string, got: %s", output) - } - return - } - - // Check that expected strings appear in output - for _, expected := range tt.expected { - if expected == "" { - continue - } - if !strings.Contains(output, expected) { - t.Errorf("LayoutJoinVertical() output missing expected string '%s'\nGot:\n%s", expected, output) - } - } - }) - } -} - -func TestLayoutCompositionAPI(t *testing.T) { - t.Run("compose multiple layout elements", func(t *testing.T) { - // Test the API example from the documentation - title := LayoutTitleBox("Trial Execution Plan", 60) - info := LayoutInfoSection("Workflow", "test-workflow") - warning := LayoutEmphasisBox("⚠️ WARNING", styles.ColorWarning) - - // Compose sections vertically with spacing - output := LayoutJoinVertical(title, "", info, "", warning) - - // Verify all elements are present in output - expected := []string{ - "Trial Execution Plan", - "Workflow", - "test-workflow", - "⚠️ WARNING", - } - - for _, exp := range expected { - if !strings.Contains(output, exp) { - t.Errorf("Composed output missing expected string '%s'\nGot:\n%s", exp, output) - } - } - }) -} - -func TestLayoutWidthConstraints(t *testing.T) { - tests := []struct { - name string - width int - }{ - {"narrow width", 40}, - {"medium width", 60}, - {"wide width", 80}, - {"very wide width", 120}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := LayoutTitleBox("Test", tt.width) - - // Output should not be empty - if output == "" { - t.Error("LayoutTitleBox() returned empty string") - } - - // In non-TTY mode, separator length should match width - // We can't test TTY mode easily, but we can check non-TTY - lines := strings.Split(output, "\n") - if len(lines) > 0 { - // First line should contain separators or styled content - if len(lines[0]) == 0 { - t.Error("LayoutTitleBox() first line is empty") - } - } - }) - } -} - -func TestLayoutWithDifferentColors(t *testing.T) { - colors := []struct { - name string - color lipgloss.AdaptiveColor - }{ - {"error color", styles.ColorError}, - {"warning color", styles.ColorWarning}, - {"success color", styles.ColorSuccess}, - {"info color", styles.ColorInfo}, - {"purple color", styles.ColorPurple}, - {"yellow color", styles.ColorYellow}, - } - - for _, c := range colors { - t.Run(c.name, func(t *testing.T) { - output := LayoutEmphasisBox("Test Content", c.color) - - // Output should not be empty - if output == "" { - t.Error("LayoutEmphasisBox() returned empty string") - } - - // Content should be present - if !strings.Contains(output, "Test Content") { - t.Errorf("LayoutEmphasisBox() missing content, got: %s", output) - } - }) - } -} - -func TestLayoutNonTTYOutput(t *testing.T) { - // These tests verify that non-TTY output is plain text - // In actual non-TTY environment, output should be plain without ANSI codes - - t.Run("title box non-tty format", func(t *testing.T) { - output := LayoutTitleBox("Test", 40) - // Should contain the title - if !strings.Contains(output, "Test") { - t.Errorf("Expected title in output, got: %s", output) - } - }) - - t.Run("info section non-tty format", func(t *testing.T) { - output := LayoutInfoSection("Label", "Value") - // Should contain label and value - if !strings.Contains(output, "Label") || !strings.Contains(output, "Value") { - t.Errorf("Expected label and value in output, got: %s", output) - } - }) - - t.Run("emphasis box non-tty format", func(t *testing.T) { - output := LayoutEmphasisBox("Content", styles.ColorWarning) - // Should contain content - if !strings.Contains(output, "Content") { - t.Errorf("Expected content in output, got: %s", output) - } - }) -} - -// Example demonstrates how to compose a styled CLI output -// using the layout helper functions. -func Example() { - // Create layout elements - title := LayoutTitleBox("Trial Execution Plan", 60) - info1 := LayoutInfoSection("Workflow", "test-workflow") - info2 := LayoutInfoSection("Status", "Ready") - warning := LayoutEmphasisBox("⚠️ WARNING: Large workflow file", styles.ColorWarning) - - // Compose sections vertically with spacing - output := LayoutJoinVertical(title, "", info1, info2, "", warning) - - // In a real application, you would output to stderr: - // fmt.Fprintln(os.Stderr, output) - - // For test purposes, just verify the output contains expected content - if !strings.Contains(output, "Trial Execution Plan") { - panic("missing title") - } - if !strings.Contains(output, "test-workflow") { - panic("missing workflow name") - } - if !strings.Contains(output, "WARNING") { - panic("missing warning") - } -} diff --git a/pkg/console/select.go b/pkg/console/select.go deleted file mode 100644 index 0d2a94a0ba..0000000000 --- a/pkg/console/select.go +++ /dev/null @@ -1,91 +0,0 @@ -//go:build !js && !wasm - -package console - -import ( - "errors" - - "github.com/charmbracelet/huh" - "github.com/github/gh-aw/pkg/tty" -) - -// PromptSelect shows an interactive single-select menu -// Returns the selected value or an error -func PromptSelect(title, description string, options []SelectOption) (string, error) { - // Validate inputs first - if len(options) == 0 { - return "", errors.New("no options provided") - } - - // Check if stdin is a TTY - if not, we can't show interactive forms - if !tty.IsStderrTerminal() { - return "", errors.New("interactive selection not available (not a TTY)") - } - - var selected string - - // Convert options to huh.Option format - huhOptions := make([]huh.Option[string], len(options)) - for i, opt := range options { - huhOptions[i] = huh.NewOption(opt.Label, opt.Value) - } - - form := huh.NewForm( - huh.NewGroup( - huh.NewSelect[string](). - Title(title). - Description(description). - Options(huhOptions...). - Value(&selected), - ), - ).WithAccessible(IsAccessibleMode()) - - if err := form.Run(); err != nil { - return "", err - } - - return selected, nil -} - -// PromptMultiSelect shows an interactive multi-select menu -// Returns the selected values or an error -func PromptMultiSelect(title, description string, options []SelectOption, limit int) ([]string, error) { - // Validate inputs first - if len(options) == 0 { - return nil, errors.New("no options provided") - } - - // Check if stdin is a TTY - if not, we can't show interactive forms - if !tty.IsStderrTerminal() { - return nil, errors.New("interactive selection not available (not a TTY)") - } - - var selected []string - - // Convert options to huh.Option format - huhOptions := make([]huh.Option[string], len(options)) - for i, opt := range options { - huhOptions[i] = huh.NewOption(opt.Label, opt.Value) - } - - multiSelect := huh.NewMultiSelect[string](). - Title(title). - Description(description). - Options(huhOptions...). - Value(&selected) - - // Set limit if specified (0 means no limit) - if limit > 0 { - multiSelect.Limit(limit) - } - - form := huh.NewForm( - huh.NewGroup(multiSelect), - ).WithAccessible(IsAccessibleMode()) - - if err := form.Run(); err != nil { - return nil, err - } - - return selected, nil -} diff --git a/pkg/console/select_test.go b/pkg/console/select_test.go deleted file mode 100644 index 9e513e8ac3..0000000000 --- a/pkg/console/select_test.go +++ /dev/null @@ -1,87 +0,0 @@ -//go:build !integration - -package console - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestPromptSelect(t *testing.T) { - t.Run("function signature", func(t *testing.T) { - // Verify the function exists and has the right signature - _ = PromptSelect - }) - - t.Run("requires options", func(t *testing.T) { - title := "Select an option" - description := "Choose one" - options := []SelectOption{} - - _, err := PromptSelect(title, description, options) - require.Error(t, err, "Should error with no options") - assert.Contains(t, err.Error(), "no options", "Error should mention missing options") - }) - - t.Run("validates parameters with options", func(t *testing.T) { - title := "Select an option" - description := "Choose one" - options := []SelectOption{ - {Label: "Option 1", Value: "opt1"}, - {Label: "Option 2", Value: "opt2"}, - } - - _, err := PromptSelect(title, description, options) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) -} - -func TestPromptMultiSelect(t *testing.T) { - t.Run("function signature", func(t *testing.T) { - // Verify the function exists and has the right signature - _ = PromptMultiSelect - }) - - t.Run("requires options", func(t *testing.T) { - title := "Select options" - description := "Choose multiple" - options := []SelectOption{} - limit := 0 - - _, err := PromptMultiSelect(title, description, options, limit) - require.Error(t, err, "Should error with no options") - assert.Contains(t, err.Error(), "no options", "Error should mention missing options") - }) - - t.Run("validates parameters with options", func(t *testing.T) { - title := "Select options" - description := "Choose multiple" - options := []SelectOption{ - {Label: "Option 1", Value: "opt1"}, - {Label: "Option 2", Value: "opt2"}, - {Label: "Option 3", Value: "opt3"}, - } - limit := 10 - - _, err := PromptMultiSelect(title, description, options, limit) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) -} - -func TestSelectOption(t *testing.T) { - t.Run("struct creation", func(t *testing.T) { - opt := SelectOption{ - Label: "Test Label", - Value: "test-value", - } - - assert.Equal(t, "Test Label", opt.Label, "Label should match") - assert.Equal(t, "test-value", opt.Value, "Value should match") - }) -} diff --git a/pkg/constants/constants.go b/pkg/constants/constants.go index 692f7bd770..fd773518cb 100644 --- a/pkg/constants/constants.go +++ b/pkg/constants/constants.go @@ -422,6 +422,13 @@ const DefaultMCPGatewayContainer = "ghcr.io/github/gh-aw-mcpg" // This directory is shared between the agent container and MCP gateway for large payload exchange const DefaultMCPGatewayPayloadDir = "/tmp/gh-aw/mcp-payloads" +// DefaultMCPGatewayPayloadSizeThreshold is the default size threshold (in bytes) for storing payloads to disk. +// Payloads larger than this threshold are stored to disk, smaller ones are returned inline. +// Default: 524288 bytes (512KB) - chosen to accommodate typical MCP tool responses including +// GitHub API queries (list_commits, list_issues, etc.) without triggering disk storage. +// This prevents agent looping issues when payloadPath is not accessible in agent containers. +const DefaultMCPGatewayPayloadSizeThreshold = 524288 + // DefaultFirewallRegistry is the container image registry for AWF (gh-aw-firewall) Docker images const DefaultFirewallRegistry = "ghcr.io/github/gh-aw-firewall" diff --git a/pkg/logger/error_formatting.go b/pkg/logger/error_formatting.go deleted file mode 100644 index 6ba2d55086..0000000000 --- a/pkg/logger/error_formatting.go +++ /dev/null @@ -1,47 +0,0 @@ -package logger - -import ( - "regexp" - "strings" -) - -// Pre-compiled regexes for performance (avoid recompiling in hot paths). -var ( - // Timestamp patterns for log cleanup - // Pattern 1: ISO 8601 with T or space separator (e.g., "2024-01-01T12:00:00.123Z " or "2024-01-01 12:00:00 "). - timestampPattern1 = regexp.MustCompile(`^\d{4}-\d{2}-\d{2}[T\s]\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{2}:\d{2}|Z)?\s*`) - // Pattern 2: Bracketed date-time (e.g., "[2024-01-01 12:00:00] "). - timestampPattern2 = regexp.MustCompile(`^\[\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}\]\s*`) - // Pattern 3: Bracketed time only (e.g., "[12:00:00] "). - timestampPattern3 = regexp.MustCompile(`^\[\d{2}:\d{2}:\d{2}\]\s+`) - // Pattern 4: Time only with optional milliseconds (e.g., "12:00:00.123 "). - timestampPattern4 = regexp.MustCompile(`^\d{2}:\d{2}:\d{2}(\.\d+)?\s+`) - - // Log level pattern for message cleanup (case-insensitive). - logLevelPattern = regexp.MustCompile(`(?i)^\[?(ERROR|WARNING|WARN|INFO|DEBUG)\]?\s*[:-]?\s*`) -) - -// ExtractErrorMessage extracts a clean error message from a log line. -// It removes timestamps, log level prefixes, and other common noise. -// If the message is longer than 200 characters, it will be truncated. -func ExtractErrorMessage(line string) string { - // Remove common timestamp patterns using pre-compiled regexes - cleanedLine := line - cleanedLine = timestampPattern1.ReplaceAllString(cleanedLine, "") - cleanedLine = timestampPattern2.ReplaceAllString(cleanedLine, "") - cleanedLine = timestampPattern3.ReplaceAllString(cleanedLine, "") - cleanedLine = timestampPattern4.ReplaceAllString(cleanedLine, "") - - // Remove common log level prefixes using pre-compiled regex - cleanedLine = logLevelPattern.ReplaceAllString(cleanedLine, "") - - // Trim whitespace - cleanedLine = strings.TrimSpace(cleanedLine) - - // If the line is too long (>200 chars), truncate it - if len(cleanedLine) > 200 { - cleanedLine = cleanedLine[:197] + "..." - } - - return cleanedLine -} diff --git a/pkg/logger/error_formatting_test.go b/pkg/logger/error_formatting_test.go deleted file mode 100644 index c07856146a..0000000000 --- a/pkg/logger/error_formatting_test.go +++ /dev/null @@ -1,177 +0,0 @@ -//go:build !integration - -package logger - -import ( - "strings" - "testing" -) - -func TestExtractErrorMessage(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "ISO 8601 timestamp with T separator and Z", - input: "2024-01-01T12:00:00.123Z Error: connection failed", - expected: "connection failed", - }, - { - name: "ISO 8601 timestamp with T separator and timezone offset", - input: "2024-01-01T12:00:00.123+00:00 Error: connection failed", - expected: "connection failed", - }, - { - name: "Date-time with space separator", - input: "2024-01-01 12:00:00 Error: connection failed", - expected: "connection failed", - }, - { - name: "Date-time with space separator and milliseconds", - input: "2024-01-01 12:00:00.456 Error: connection failed", - expected: "connection failed", - }, - { - name: "Bracketed date-time", - input: "[2024-01-01 12:00:00] Error: connection failed", - expected: "connection failed", - }, - { - name: "Bracketed time only", - input: "[12:00:00] Error: connection failed", - expected: "connection failed", - }, - { - name: "Time only with milliseconds", - input: "12:00:00.123 Error: connection failed", - expected: "connection failed", - }, - { - name: "Time only without milliseconds", - input: "12:00:00 Error: connection failed", - expected: "connection failed", - }, - { - name: "ERROR prefix with colon", - input: "ERROR: connection failed", - expected: "connection failed", - }, - { - name: "ERROR prefix without colon", - input: "ERROR connection failed", - expected: "connection failed", - }, - { - name: "Bracketed ERROR prefix", - input: "[ERROR] connection failed", - expected: "connection failed", - }, - { - name: "Bracketed ERROR prefix with colon", - input: "[ERROR]: connection failed", - expected: "connection failed", - }, - { - name: "WARNING prefix", - input: "WARNING: disk space low", - expected: "disk space low", - }, - { - name: "WARN prefix", - input: "WARN: deprecated API used", - expected: "deprecated API used", - }, - { - name: "INFO prefix", - input: "INFO: service started", - expected: "service started", - }, - { - name: "DEBUG prefix", - input: "DEBUG: processing request", - expected: "processing request", - }, - { - name: "Case insensitive log level", - input: "error: connection failed", - expected: "connection failed", - }, - { - name: "Combined timestamp and log level", - input: "2024-01-01 12:00:00 ERROR: connection failed", - expected: "connection failed", - }, - { - name: "Combined ISO timestamp with Z and log level", - input: "2024-01-01T12:00:00Z ERROR: connection failed", - expected: "connection failed", - }, - { - name: "Multiple timestamps - only first is removed", - input: "[12:00:00] 2024-01-01 12:00:00 ERROR: connection failed", - expected: "2024-01-01 12:00:00 ERROR: connection failed", - }, - { - name: "No timestamp or log level", - input: "connection failed", - expected: "connection failed", - }, - { - name: "Empty string", - input: "", - expected: "", - }, - { - name: "Only whitespace", - input: " ", - expected: "", - }, - { - name: "Truncation at 200 chars", - input: "ERROR: " + strings.Repeat("a", 250), - expected: strings.Repeat("a", 197) + "...", - }, - { - name: "Exactly 200 chars - no truncation", - input: "ERROR: " + strings.Repeat("a", 193), - expected: strings.Repeat("a", 193), - }, - { - name: "Real world example from metrics.go", - input: "2024-01-15 14:30:22 ERROR: Failed to connect to database", - expected: "Failed to connect to database", - }, - { - name: "Real world example from copilot_agent.go", - input: "2024-01-15T14:30:22.123Z ERROR: API request failed", - expected: "API request failed", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ExtractErrorMessage(tt.input) - if result != tt.expected { - t.Errorf("ExtractErrorMessage(%q) = %q, want %q", tt.input, result, tt.expected) - } - }) - } -} - -func BenchmarkExtractErrorMessage(b *testing.B) { - testLine := "2024-01-01T12:00:00.123Z ERROR: connection failed to remote server" - - for b.Loop() { - ExtractErrorMessage(testLine) - } -} - -func BenchmarkExtractErrorMessageLong(b *testing.B) { - testLine := "2024-01-01T12:00:00.123Z ERROR: " + strings.Repeat("very long error message ", 20) - - for b.Loop() { - ExtractErrorMessage(testLine) - } -} diff --git a/pkg/parser/ansi_strip.go b/pkg/parser/ansi_strip.go deleted file mode 100644 index a8d911ca9f..0000000000 --- a/pkg/parser/ansi_strip.go +++ /dev/null @@ -1,12 +0,0 @@ -package parser - -import ( - "github.com/github/gh-aw/pkg/stringutil" -) - -// StripANSI removes ANSI escape codes from a string. -// This is a thin wrapper around stringutil.StripANSI for backward compatibility. -// The comprehensive implementation lives in pkg/stringutil/ansi.go. -func StripANSI(s string) string { - return stringutil.StripANSI(s) -} diff --git a/pkg/parser/frontmatter_merge_test.go b/pkg/parser/frontmatter_merge_test.go index e0c23c3ef5..7af8849902 100644 --- a/pkg/parser/frontmatter_merge_test.go +++ b/pkg/parser/frontmatter_merge_test.go @@ -259,5 +259,3 @@ func TestMergeToolsFromJSON(t *testing.T) { }) } } - -// Test StripANSI function diff --git a/pkg/parser/frontmatter_utils_test.go b/pkg/parser/frontmatter_utils_test.go index c0142145d9..b9dba2eb71 100644 --- a/pkg/parser/frontmatter_utils_test.go +++ b/pkg/parser/frontmatter_utils_test.go @@ -6,7 +6,6 @@ import ( "encoding/json" "os" "path/filepath" - "strings" "testing" "github.com/github/gh-aw/pkg/testutil" @@ -385,220 +384,8 @@ name: Test } // Test mergeToolsFromJSON function -func TestStripANSI(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "empty string", - input: "", - expected: "", - }, - { - name: "plain text without ANSI", - input: "Hello World", - expected: "Hello World", - }, - { - name: "simple CSI color sequence", - input: "\x1b[31mRed Text\x1b[0m", - expected: "Red Text", - }, - { - name: "multiple CSI sequences", - input: "\x1b[1m\x1b[31mBold Red\x1b[0m\x1b[32mGreen\x1b[0m", - expected: "Bold RedGreen", - }, - { - name: "CSI cursor movement", - input: "Line 1\x1b[2;1HLine 2", - expected: "Line 1Line 2", - }, - { - name: "CSI erase sequences", - input: "Text\x1b[2JCleared\x1b[K", - expected: "TextCleared", - }, - { - name: "OSC sequence with BEL terminator", - input: "\x1b]0;Window Title\x07Content", - expected: "Content", - }, - { - name: "OSC sequence with ST terminator", - input: "\x1b]2;Terminal Title\x1b\\More content", - expected: "More content", - }, - { - name: "character set selection G0", - input: "\x1b(0Hello\x1b(B", - expected: "Hello", - }, - { - name: "character set selection G1", - input: "\x1b)0World\x1b)B", - expected: "World", - }, - { - name: "keypad mode sequences", - input: "\x1b=Keypad\x1b>Normal", - expected: "KeypadNormal", - }, - { - name: "reset sequence", - input: "Before\x1bcAfter", - expected: "BeforeAfter", - }, - { - name: "save and restore cursor", - input: "Start\x1b7Middle\x1b8End", - expected: "StartMiddleEnd", - }, - { - name: "index and reverse index", - input: "Text\x1bDDown\x1bMUp", - expected: "TextDownUp", - }, - { - name: "next line and horizontal tab set", - input: "Line\x1bENext\x1bHTab", - expected: "LineNextTab", - }, - { - name: "complex CSI with parameters", - input: "\x1b[38;5;196mBright Red\x1b[48;5;21mBlue BG\x1b[0m", - expected: "Bright RedBlue BG", - }, - { - name: "CSI with semicolon parameters", - input: "\x1b[1;31;42mBold red on green\x1b[0m", - expected: "Bold red on green", - }, - { - name: "malformed escape at end", - input: "Text\x1b", - expected: "Text", - }, - { - name: "malformed CSI at end", - input: "Text\x1b[31", - expected: "Text", - }, - { - name: "malformed OSC at end", - input: "Text\x1b]0;Title", - expected: "Text", - }, - { - name: "escape followed by invalid character", - input: "Text\x1bXInvalid", - expected: "TextInvalid", - }, - { - name: "consecutive escapes", - input: "\x1b[31m\x1b[1m\x1b[4mText\x1b[0m", - expected: "Text", - }, - { - name: "mixed content with newlines", - input: "Line 1\n\x1b[31mRed Line 2\x1b[0m\nLine 3", - expected: "Line 1\nRed Line 2\nLine 3", - }, - { - name: "common terminal output", - input: "\x1b[?25l\x1b[2J\x1b[H\x1b[32m✓\x1b[0m Success", - expected: "✓ Success", - }, - { - name: "git diff style colors", - input: "\x1b[32m+Added line\x1b[0m\n\x1b[31m-Removed line\x1b[0m", - expected: "+Added line\n-Removed line", - }, - { - name: "unicode content with ANSI", - input: "\x1b[33m🎉 Success! 测试\x1b[0m", - expected: "🎉 Success! 测试", - }, - { - name: "very long CSI sequence", - input: "\x1b[1;2;3;4;5;6;7;8;9;10;11;12;13;14;15mLong params\x1b[0m", - expected: "Long params", - }, - { - name: "CSI with question mark private parameter", - input: "\x1b[?25hCursor visible\x1b[?25l", - expected: "Cursor visible", - }, - { - name: "CSI with greater than private parameter", - input: "\x1b[>0cDevice attributes\x1b[>1c", - expected: "Device attributes", - }, - { - name: "all final CSI characters test", - input: "\x1b[@\x1b[A\x1b[B\x1b[C\x1b[D\x1b[E\x1b[F\x1b[G\x1b[H\x1b[I\x1b[J\x1b[K\x1b[L\x1b[M\x1b[N\x1b[O\x1b[P\x1b[Q\x1b[R\x1b[S\x1b[T\x1b[U\x1b[V\x1b[W\x1b[X\x1b[Y\x1b[Z\x1b[[\x1b[\\\x1b[]\x1b[^\x1b[_\x1b[`\x1b[a\x1b[b\x1b[c\x1b[d\x1b[e\x1b[f\x1b[g\x1b[h\x1b[i\x1b[j\x1b[k\x1b[l\x1b[m\x1b[n\x1b[o\x1b[p\x1b[q\x1b[r\x1b[s\x1b[t\x1b[u\x1b[v\x1b[w\x1b[x\x1b[y\x1b[z\x1b[{\x1b[|\x1b[}\x1b[~Text", - expected: "Text", - }, - { - name: "CSI with invalid final character", - input: "Before\x1b[31Text after", - expected: "Beforeext after", - }, - { - name: "real world lipgloss output", - input: "\x1b[1;38;2;80;250;123m✓\x1b[0;38;2;248;248;242m Success message\x1b[0m", - expected: "✓ Success message", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := StripANSI(tt.input) - if result != tt.expected { - t.Errorf("StripANSI(%q) = %q, want %q", tt.input, result, tt.expected) - } - }) - } -} // Benchmark StripANSI function for performance -func BenchmarkStripANSI(b *testing.B) { - testCases := []struct { - name string - input string - }{ - { - name: "plain text", - input: "This is plain text without any ANSI codes", - }, - { - name: "simple color", - input: "\x1b[31mRed text\x1b[0m", - }, - { - name: "complex formatting", - input: "\x1b[1;38;2;255;0;0m\x1b[48;2;0;255;0mComplex formatting\x1b[0m", - }, - { - name: "mixed content", - input: "Normal \x1b[31mred\x1b[0m normal \x1b[32mgreen\x1b[0m normal \x1b[34mblue\x1b[0m text", - }, - { - name: "long text with ANSI", - input: strings.Repeat("\x1b[31mRed \x1b[32mGreen \x1b[34mBlue\x1b[0m ", 100), - }, - } - - for _, tc := range testCases { - b.Run(tc.name, func(b *testing.B) { - for range b.N { - StripANSI(tc.input) - } - }) - } -} func TestIsWorkflowSpec(t *testing.T) { tests := []struct { diff --git a/pkg/parser/virtual_fs_test_helpers.go b/pkg/parser/virtual_fs_test_helpers.go deleted file mode 100644 index 72e1095b67..0000000000 --- a/pkg/parser/virtual_fs_test_helpers.go +++ /dev/null @@ -1,12 +0,0 @@ -package parser - -// SetReadFileFuncForTest overrides the file reading function for testing. -// This enables testing virtual filesystem behavior in native (non-wasm) builds. -// Returns a cleanup function that restores the original. -func SetReadFileFuncForTest(fn func(string) ([]byte, error)) func() { - original := readFileFunc - readFileFunc = fn - return func() { - readFileFunc = original - } -} diff --git a/pkg/stringutil/paths.go b/pkg/stringutil/paths.go deleted file mode 100644 index e63f0bc176..0000000000 --- a/pkg/stringutil/paths.go +++ /dev/null @@ -1,42 +0,0 @@ -package stringutil - -import "strings" - -// NormalizePath normalizes a file path by resolving . and .. components. -// It splits the path on "/" and processes each component: -// - Empty parts and "." are skipped -// - ".." moves up one directory (if possible) -// - Other parts are added to the result -// -// This is useful for resolving relative paths in bundler operations and -// other file path manipulations where . and .. components need to be resolved. -// -// Examples: -// -// NormalizePath("a/b/../c") // returns "a/c" -// NormalizePath("./a/./b") // returns "a/b" -// NormalizePath("a/b/../../c") // returns "c" -// NormalizePath("../a/b") // returns "a/b" (leading .. is ignored) -// NormalizePath("a//b") // returns "a/b" (empty parts removed) -func NormalizePath(path string) string { - // Split path into parts - parts := strings.Split(path, "/") - var result []string - - for _, part := range parts { - if part == "" || part == "." { - // Skip empty parts and current directory references - continue - } - if part == ".." { - // Go up one directory - if len(result) > 0 { - result = result[:len(result)-1] - } - } else { - result = append(result, part) - } - } - - return strings.Join(result, "/") -} diff --git a/pkg/stringutil/paths_test.go b/pkg/stringutil/paths_test.go deleted file mode 100644 index caf718d464..0000000000 --- a/pkg/stringutil/paths_test.go +++ /dev/null @@ -1,129 +0,0 @@ -//go:build !integration - -package stringutil - -import "testing" - -func TestNormalizePath(t *testing.T) { - tests := []struct { - name string - path string - expected string - }{ - { - name: "simple path", - path: "a/b/c", - expected: "a/b/c", - }, - { - name: "path with single dot", - path: "a/./b", - expected: "a/b", - }, - { - name: "path with multiple dots", - path: "./a/./b/./c", - expected: "a/b/c", - }, - { - name: "path with double dot", - path: "a/b/../c", - expected: "a/c", - }, - { - name: "path with multiple double dots", - path: "a/b/../../c", - expected: "c", - }, - { - name: "path with leading double dot", - path: "../a/b", - expected: "a/b", - }, - { - name: "path with trailing double dot", - path: "a/b/..", - expected: "a", - }, - { - name: "path with empty parts", - path: "a//b///c", - expected: "a/b/c", - }, - { - name: "complex path", - path: "a/./b/../c/d/../../e", - expected: "a/e", - }, - { - name: "empty path", - path: "", - expected: "", - }, - { - name: "single dot", - path: ".", - expected: "", - }, - { - name: "double dot only", - path: "..", - expected: "", - }, - { - name: "multiple double dots beyond root", - path: "../../a", - expected: "a", - }, - { - name: "mixed slashes and dots", - path: "a/b/./c/../d", - expected: "a/b/d", - }, - { - name: "path with only dots and slashes", - path: "./../.", - expected: "", - }, - { - name: "real-world bundler path", - path: "./lib/utils/../../helpers/common", - expected: "helpers/common", - }, - { - name: "deeply nested path with parent refs", - path: "a/b/c/d/../../../e/f", - expected: "a/e/f", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := NormalizePath(tt.path) - if result != tt.expected { - t.Errorf("NormalizePath(%q) = %q; want %q", tt.path, result, tt.expected) - } - }) - } -} - -func BenchmarkNormalizePath(b *testing.B) { - path := "a/b/c/./d/../e/f/../../g" - for b.Loop() { - NormalizePath(path) - } -} - -func BenchmarkNormalizePath_Simple(b *testing.B) { - path := "a/b/c/d/e" - for b.Loop() { - NormalizePath(path) - } -} - -func BenchmarkNormalizePath_Complex(b *testing.B) { - path := "./a/./b/../c/d/../../e/f/g/h/../../../i" - for b.Loop() { - NormalizePath(path) - } -} diff --git a/pkg/workflow/action_pins_test.go b/pkg/workflow/action_pins_test.go index 09844f3d40..9a85bf9152 100644 --- a/pkg/workflow/action_pins_test.go +++ b/pkg/workflow/action_pins_test.go @@ -297,9 +297,9 @@ func TestApplyActionPinToStep(t *testing.T) { func TestGetActionPinsSorting(t *testing.T) { pins := getActionPins() - // Verify we got all the pins (39 as of February 2026) - if len(pins) != 39 { - t.Errorf("getActionPins() returned %d pins, expected 39", len(pins)) + // Verify we got all the pins (33 as of February 2026) + if len(pins) != 33 { + t.Errorf("getActionPins() returned %d pins, expected 33", len(pins)) } // Verify they are sorted by version (descending) then by repository name (ascending) diff --git a/pkg/workflow/add_labels.go b/pkg/workflow/add_labels.go index 113a261b43..cb2adcb654 100644 --- a/pkg/workflow/add_labels.go +++ b/pkg/workflow/add_labels.go @@ -1,8 +1,6 @@ package workflow import ( - "errors" - "github.com/github/gh-aw/pkg/logger" ) @@ -38,33 +36,3 @@ func (c *Compiler) parseAddLabelsConfig(outputMap map[string]any) *AddLabelsConf return &config } - -// buildAddLabelsJob creates the add_labels job -func (c *Compiler) buildAddLabelsJob(data *WorkflowData, mainJobName string) (*Job, error) { - addLabelsLog.Printf("Building add_labels job for workflow: %s, main_job: %s", data.Name, mainJobName) - - if data.SafeOutputs == nil || data.SafeOutputs.AddLabels == nil { - return nil, errors.New("safe-outputs configuration is required") - } - - cfg := data.SafeOutputs.AddLabels - - // Build list job config - listJobConfig := ListJobConfig{ - SafeOutputTargetConfig: cfg.SafeOutputTargetConfig, - Allowed: cfg.Allowed, - Blocked: cfg.Blocked, - } - - // Use shared builder for list-based safe-output jobs - return c.BuildListSafeOutputJob(data, mainJobName, listJobConfig, cfg.BaseSafeOutputConfig, ListJobBuilderConfig{ - JobName: "add_labels", - StepName: "Add Labels", - StepID: "add_labels", - EnvPrefix: "GH_AW_LABELS", - OutputName: "labels_added", - Script: getAddLabelsScript(), - Permissions: NewPermissionsContentsReadIssuesWritePRWrite(), - DefaultMax: 3, - }) -} diff --git a/pkg/workflow/bundler.go b/pkg/workflow/bundler.go deleted file mode 100644 index 8c2f9c84f3..0000000000 --- a/pkg/workflow/bundler.go +++ /dev/null @@ -1,589 +0,0 @@ -// This file provides JavaScript bundling for agentic workflows. -// -// # JavaScript Bundler with Runtime Mode Support -// -// The bundler supports two runtime environments: -// -// 1. GitHub Script Mode (RuntimeModeGitHubScript) -// - Used for JavaScript embedded in GitHub Actions YAML via actions/github-script -// - No module system available (no require() or module.exports at runtime) -// - All local requires must be bundled inline -// - All module.exports statements are removed -// - Validation ensures no local requires or module references remain -// -// 2. Node.js Mode (RuntimeModeNodeJS) -// - Used for standalone Node.js scripts that run on filesystem -// - Full CommonJS module system available -// - module.exports statements are preserved -// - Local requires can remain if modules are available on filesystem -// - Less aggressive bundling and validation -// -// # Usage -// -// For GitHub Script mode (default for backward compatibility): -// -// bundled, err := BundleJavaScriptFromSources(mainContent, sources, "") -// // or explicitly: -// bundled, err := BundleJavaScriptWithMode(mainContent, sources, "", RuntimeModeGitHubScript) -// -// For Node.js mode: -// -// bundled, err := BundleJavaScriptWithMode(mainContent, sources, "", RuntimeModeNodeJS) -// -// # Guardrails and Validation -// -// The bundler includes several guardrails based on runtime mode: -// -// - validateNoLocalRequires: Ensures all local requires (./... or ../...) are bundled (GitHub Script mode only) -// - validateNoModuleReferences: Ensures no module.exports or exports.* remain (GitHub Script mode only) -// - removeExports: Strips module.exports from bundled code (GitHub Script mode only) -// -// These validations prevent runtime errors when JavaScript is executed in environments -// without a module system. - -package workflow - -import ( - "fmt" - "path/filepath" - "regexp" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var bundlerLog = logger.New("workflow:bundler") - -// RuntimeMode represents the JavaScript runtime environment -type RuntimeMode int - -const ( - // RuntimeModeGitHubScript indicates JavaScript running in actions/github-script - // In this mode: - // - All local requires must be bundled (no module system) - // - module.exports statements must be removed - // - No module object references allowed - RuntimeModeGitHubScript RuntimeMode = iota - - // RuntimeModeNodeJS indicates JavaScript running as a Node.js script - // In this mode: - // - module.exports can be preserved - // - Local requires can be kept if modules are available on filesystem - // - Full Node.js module system is available - RuntimeModeNodeJS -) - -// String returns a string representation of the RuntimeMode -func (r RuntimeMode) String() string { - switch r { - case RuntimeModeGitHubScript: - return "github-script" - case RuntimeModeNodeJS: - return "nodejs" - default: - return "unknown" - } -} - -// BundleJavaScriptFromSources bundles JavaScript from in-memory sources -// sources is a map where keys are file paths (e.g., "sanitize.cjs") and values are the content -// mainContent is the main JavaScript content that may contain require() calls -// basePath is the base directory path for resolving relative imports (e.g., "js") -// -// DEPRECATED: Use BundleJavaScriptWithMode instead to specify runtime mode explicitly. -// This function defaults to RuntimeModeGitHubScript for backward compatibility. -// -// Migration guide: -// - For GitHub Script action (inline in YAML): use BundleJavaScriptWithMode(content, sources, basePath, RuntimeModeGitHubScript) -// - For Node.js scripts (filesystem-based): use BundleJavaScriptWithMode(content, sources, basePath, RuntimeModeNodeJS) -// -// This function will be maintained for backward compatibility but new code should use BundleJavaScriptWithMode. -func BundleJavaScriptFromSources(mainContent string, sources map[string]string, basePath string) (string, error) { - return BundleJavaScriptWithMode(mainContent, sources, basePath, RuntimeModeGitHubScript) -} - -// BundleJavaScriptWithMode bundles JavaScript from in-memory sources with specified runtime mode -// sources is a map where keys are file paths (e.g., "sanitize.cjs") and values are the content -// mainContent is the main JavaScript content that may contain require() calls -// basePath is the base directory path for resolving relative imports (e.g., "js") -// mode specifies the target runtime environment (GitHub script action vs Node.js) -func BundleJavaScriptWithMode(mainContent string, sources map[string]string, basePath string, mode RuntimeMode) (string, error) { - bundlerLog.Printf("Bundling JavaScript: source_count=%d, base_path=%s, main_content_size=%d bytes, runtime_mode=%s", - len(sources), basePath, len(mainContent), mode) - - // Validate that no runtime mode mixing occurs - if err := validateNoRuntimeMixing(mainContent, sources, mode); err != nil { - bundlerLog.Printf("Runtime mode validation failed: %v", err) - return "", err - } - - // Track already processed files to avoid circular dependencies - processed := make(map[string]bool) - - // Bundle the main content recursively - bundled, err := bundleFromSources(mainContent, basePath, sources, processed, mode) - if err != nil { - bundlerLog.Printf("Bundling failed: %v", err) - return "", err - } - - // Deduplicate require statements (keep only the first occurrence) - bundled = deduplicateRequires(bundled) - - // Mode-specific processing and validations - switch mode { - case RuntimeModeGitHubScript: - // GitHub Script mode: remove module.exports from final output - bundled = removeExports(bundled) - - // Inject await main() call for inline execution - // This allows scripts to export main when used with require(), but still execute - // when inlined directly in github-script action - if strings.Contains(bundled, "async function main()") || strings.Contains(bundled, "async function main ()") { - bundled = bundled + "\nawait main();\n" - bundlerLog.Print("Injected 'await main()' call for GitHub Script inline execution") - } - - // Validate all local requires are bundled and module references removed - if err := validateNoLocalRequires(bundled); err != nil { - bundlerLog.Printf("Validation failed: %v", err) - return "", err - } - if err := validateNoModuleReferences(bundled); err != nil { - bundlerLog.Printf("Module reference validation failed: %v", err) - return "", err - } - - case RuntimeModeNodeJS: - // Node.js mode: more permissive, allows module.exports and may allow local requires - // Local requires are OK if modules will be available on filesystem - bundlerLog.Print("Node.js mode: module.exports preserved, local requires allowed") - // Note: We still bundle what we can, but don't fail on remaining requires - } - - // Log size information about the bundled output - lines := strings.Split(bundled, "\n") - var maxLineLength int - for _, line := range lines { - if len(line) > maxLineLength { - maxLineLength = len(line) - } - } - - bundlerLog.Printf("Bundling completed: processed_files=%d, output_size=%d bytes, output_lines=%d, max_line_length=%d chars", - len(processed), len(bundled), len(lines), maxLineLength) - return bundled, nil -} - -// bundleFromSources processes content and recursively bundles its dependencies from the sources map -// The mode parameter controls how module.exports statements are handled -func bundleFromSources(content string, currentPath string, sources map[string]string, processed map[string]bool, mode RuntimeMode) (string, error) { - bundlerLog.Printf("Processing file for bundling: current_path=%s, content_size=%d bytes, runtime_mode=%s", currentPath, len(content), mode) - - // Regular expression to match require('./...') or require("./...") - // This matches both single-line and multi-line destructuring: - // const { x } = require("./file.cjs"); - // const { - // x, - // y - // } = require("./file.cjs"); - // Captures the require path where it starts with ./ or ../ - requireRegex := regexp.MustCompile(`(?s)(?:const|let|var)\s+(?:\{[^}]*\}|\w+)\s*=\s*require\(['"](\.\.?/[^'"]+)['"]\);?`) - - // Find all requires and their positions - matches := requireRegex.FindAllStringSubmatchIndex(content, -1) - - if len(matches) == 0 { - bundlerLog.Print("No requires found in content") - // No requires found, return content as-is - return content, nil - } - - bundlerLog.Printf("Found %d require statements to process", len(matches)) - - var result strings.Builder - lastEnd := 0 - - for _, match := range matches { - // match[0], match[1] are the start and end of the full match - // match[2], match[3] are the start and end of the captured group (the path) - matchStart := match[0] - matchEnd := match[1] - pathStart := match[2] - pathEnd := match[3] - - // Write content before this require - result.WriteString(content[lastEnd:matchStart]) - - // Extract the require path - requirePath := content[pathStart:pathEnd] - - // Resolve the full path relative to current path - var fullPath string - if currentPath == "" { - fullPath = requirePath - } else { - fullPath = filepath.Join(currentPath, requirePath) - } - - // Ensure .cjs extension - if !strings.HasSuffix(fullPath, ".cjs") && !strings.HasSuffix(fullPath, ".js") { - fullPath += ".cjs" - } - - // Normalize the path (clean up ./ and ../) - fullPath = filepath.Clean(fullPath) - - // Convert Windows path separators to forward slashes for consistency - fullPath = filepath.ToSlash(fullPath) - - // Check if we've already processed this file - if processed[fullPath] { - bundlerLog.Printf("Skipping already processed file: %s", fullPath) - // Skip - already inlined - result.WriteString("// Already inlined: " + requirePath + "\n") - } else { - // Mark as processed - processed[fullPath] = true - - // Look up the required file in sources - requiredContent, ok := sources[fullPath] - if !ok { - bundlerLog.Printf("Required file not found in sources: %s", fullPath) - return "", fmt.Errorf("required file not found in sources: %s", fullPath) - } - - bundlerLog.Printf("Inlining file: %s (size: %d bytes)", fullPath, len(requiredContent)) - - // Recursively bundle the required file - requiredDir := filepath.Dir(fullPath) - bundledRequired, err := bundleFromSources(requiredContent, requiredDir, sources, processed, mode) - if err != nil { - return "", err - } - - // Remove exports from the bundled content based on runtime mode - var cleanedRequired string - if mode == RuntimeModeGitHubScript { - // GitHub Script mode: remove all module.exports - cleanedRequired = removeExports(bundledRequired) - bundlerLog.Printf("Processed %s (github-script mode): original_size=%d, after_export_removal=%d", - fullPath, len(bundledRequired), len(cleanedRequired)) - } else { - // Node.js mode: preserve module.exports - cleanedRequired = bundledRequired - bundlerLog.Printf("Processed %s (nodejs mode): size=%d, module.exports preserved", - fullPath, len(bundledRequired)) - } - - // Add a comment indicating the inlined file - fmt.Fprintf(&result, "// === Inlined from %s ===\n", requirePath) - result.WriteString(cleanedRequired) - fmt.Fprintf(&result, "// === End of %s ===\n", requirePath) - } - - lastEnd = matchEnd - } - - // Write any remaining content after the last require - result.WriteString(content[lastEnd:]) - - return result.String(), nil -} - -// removeExports removes module.exports and exports statements from JavaScript code -// This function removes ALL exports, including conditional ones, because GitHub Script -// mode does not support any form of module.exports -func removeExports(content string) string { - lines := strings.Split(content, "\n") - var result strings.Builder - - // Regular expressions for export patterns - moduleExportsRegex := regexp.MustCompile(`^\s*module\.exports\s*=`) - exportsRegex := regexp.MustCompile(`^\s*exports\.\w+\s*=`) - - // Pattern for inline conditional exports like: - // ("undefined" != typeof module && module.exports && (module.exports = {...}), - // This pattern is used by minified code - inlineConditionalExportRegex := regexp.MustCompile(`\(\s*["']undefined["']\s*!=\s*typeof\s+module\s*&&\s*module\.exports`) - - // Track if we're inside a conditional export block that should be removed - inConditionalExport := false - conditionalDepth := 0 - - // Track if we're inside an unconditional module.exports block - inModuleExports := false - moduleExportsDepth := 0 - - for i, line := range lines { - trimmed := strings.TrimSpace(line) - - // Check for inline conditional export pattern (minified style) - // These lines should be entirely removed as they only contain the conditional export - if inlineConditionalExportRegex.MatchString(trimmed) { - // Skip the entire line - it's an inline conditional export - continue - } - - // Check if this starts a conditional export block - // Pattern: if (typeof module !== "undefined" && module.exports) { - // These need to be REMOVED for GitHub Script mode - if strings.Contains(trimmed, "if") && - strings.Contains(trimmed, "module") && - strings.Contains(trimmed, "exports") && - strings.Contains(trimmed, "{") { - inConditionalExport = true - conditionalDepth = 1 - // Skip this line - we're removing conditional exports for GitHub Script mode - continue - } - - // Track braces if we're in a conditional export - skip all lines until it closes - if inConditionalExport { - for _, ch := range trimmed { - if ch == '{' { - conditionalDepth++ - } else if ch == '}' { - conditionalDepth-- - if conditionalDepth == 0 { - inConditionalExport = false - // Skip this closing line and continue - continue - } - } - } - // Skip all lines inside the conditional export block - continue - } - - // Check if this line starts an unconditional module.exports assignment - if moduleExportsRegex.MatchString(line) { - // Check if it's a multi-line object export (ends with {) - if strings.Contains(trimmed, "{") && !strings.Contains(trimmed, "}") { - // This is a multi-line module.exports = { ... } - inModuleExports = true - moduleExportsDepth = 1 - // Skip this line and start tracking the export block - continue - } else { - // Single-line export, skip just this line - continue - } - } - - // Track braces if we're in an unconditional module.exports block - if inModuleExports { - // Count braces to track when the export block ends - for _, ch := range trimmed { - if ch == '{' { - moduleExportsDepth++ - } else if ch == '}' { - moduleExportsDepth-- - if moduleExportsDepth == 0 { - inModuleExports = false - // Skip this closing line and continue - continue - } - } - } - // Skip all lines inside the export block - continue - } - - // Skip lines that are unconditional exports.* assignments - if exportsRegex.MatchString(line) { - // Skip this line - it's an unconditional export - continue - } - - result.WriteString(line) - if i < len(lines)-1 { - result.WriteString("\n") - } - } - - return result.String() -} - -// deduplicateRequires removes duplicate require() statements from bundled JavaScript -// For destructured imports from the same module, it merges them into a single require statement -// keeping only the first occurrence of each unique require for non-destructured imports. -// IMPORTANT: Only merges requires that have the same indentation level to avoid moving -// requires across scope boundaries (which would cause "X is not defined" errors) -func deduplicateRequires(content string) string { - lines := strings.Split(content, "\n") - - // Helper to get indentation level of a line - getIndentation := func(line string) int { - count := 0 - for _, ch := range line { - //nolint:staticcheck // switch would require label for break; if-else is clearer here - if ch == ' ' { - count++ - } else if ch == '\t' { - count += 2 // Treat tab as 2 spaces for comparison - } else { - break - } - } - return count - } - - // Track module imports per indentation level: map[indent]map[moduleName][]names - moduleImportsByIndent := make(map[int]map[string][]string) - // Track which lines are require statements to skip during first pass - requireLines := make(map[int]bool) - // Track order of first appearance of each module per indentation: map[indent][]moduleName - moduleOrderByIndent := make(map[int][]string) - // Track the first line number where we see a require at each indentation - firstRequireLineByIndent := make(map[int]int) - - // Regular expression to match destructured require statements - // Matches: const/let/var { name1, name2 } = require('module'); - destructuredRequireRegex := regexp.MustCompile(`^\s*(?:const|let|var)\s+\{\s*([^}]+)\s*\}\s*=\s*require\(['"]([^'"]+)['"]\);?\s*$`) - // Regular expression to match non-destructured require statements - // Matches: const/let/var name = require('module'); - simpleRequireRegex := regexp.MustCompile(`^\s*(?:const|let|var)\s+(\w+)\s*=\s*require\(['"]([^'"]+)['"]\);?\s*$`) - - // First pass: collect all require statements grouped by indentation level - for i, line := range lines { - indent := getIndentation(line) - - // Try destructured require first - destructuredMatches := destructuredRequireRegex.FindStringSubmatch(line) - if len(destructuredMatches) > 2 { - moduleName := destructuredMatches[2] - destructuredNames := destructuredMatches[1] - - requireLines[i] = true - - // Initialize map for this indentation level if needed - if moduleImportsByIndent[indent] == nil { - moduleImportsByIndent[indent] = make(map[string][]string) - firstRequireLineByIndent[indent] = i - } - - // Parse the destructured names (split by comma and trim whitespace) - names := strings.Split(destructuredNames, ",") - for _, name := range names { - name = strings.TrimSpace(name) - if name != "" { - moduleImportsByIndent[indent][moduleName] = append(moduleImportsByIndent[indent][moduleName], name) - } - } - - // Track order of first appearance at this indentation - if len(moduleImportsByIndent[indent][moduleName]) == len(names) { - moduleOrderByIndent[indent] = append(moduleOrderByIndent[indent], moduleName) - } - continue - } - - // Try simple require - simpleMatches := simpleRequireRegex.FindStringSubmatch(line) - if len(simpleMatches) > 2 { - moduleName := simpleMatches[2] - varName := simpleMatches[1] - - requireLines[i] = true - - // Initialize map for this indentation level if needed - if moduleImportsByIndent[indent] == nil { - moduleImportsByIndent[indent] = make(map[string][]string) - firstRequireLineByIndent[indent] = i - } - - // For simple requires, store the variable name with a marker - if _, exists := moduleImportsByIndent[indent][moduleName]; !exists { - moduleOrderByIndent[indent] = append(moduleOrderByIndent[indent], moduleName) - } - moduleImportsByIndent[indent][moduleName] = append(moduleImportsByIndent[indent][moduleName], "VAR:"+varName) - } - } - - // Second pass: write output - var result strings.Builder - // Track which indentation levels have had their merged requires written - wroteRequiresByIndent := make(map[int]bool) - - for i, line := range lines { - indent := getIndentation(line) - - // Skip original require lines, we'll write merged ones at the first require position for each indent level - if requireLines[i] { - // Check if this is the first require at this indentation level - if firstRequireLineByIndent[indent] == i && !wroteRequiresByIndent[indent] { - // Write all merged require statements for this indentation level - moduleImports := moduleImportsByIndent[indent] - moduleOrder := moduleOrderByIndent[indent] - - indentStr := strings.Repeat(" ", indent) - - for _, moduleName := range moduleOrder { - imports := moduleImports[moduleName] - if len(imports) == 0 { - continue - } - - // Separate VAR: prefixed (simple requires) from destructured imports - var varNames []string - var destructuredNames []string - for _, imp := range imports { - if after, ok := strings.CutPrefix(imp, "VAR:"); ok { - varNames = append(varNames, after) - } else { - destructuredNames = append(destructuredNames, imp) - } - } - - // Deduplicate variable names for simple requires - if len(varNames) > 0 { - seen := make(map[string]bool) - var uniqueVarNames []string - for _, varName := range varNames { - if !seen[varName] { - seen[varName] = true - uniqueVarNames = append(uniqueVarNames, varName) - } - } - - // Write simple require(s) - use the first unique variable name - if len(uniqueVarNames) > 0 { - varName := uniqueVarNames[0] - fmt.Fprintf(&result, "%sconst %s = require(\"%s\");\n", indentStr, varName, moduleName) - bundlerLog.Printf("Keeping simple require: %s at indent %d", moduleName, indent) - } - } - - // Handle destructured imports - if len(destructuredNames) > 0 { - // Remove duplicates while preserving order - seen := make(map[string]bool) - var uniqueImports []string - for _, imp := range destructuredNames { - if !seen[imp] { - seen[imp] = true - uniqueImports = append(uniqueImports, imp) - } - } - - fmt.Fprintf(&result, "%sconst { %s } = require(\"%s\");\n", - indentStr, strings.Join(uniqueImports, ", "), moduleName) - bundlerLog.Printf("Merged destructured require for %s at indent %d: %v", moduleName, indent, uniqueImports) - } - } - wroteRequiresByIndent[indent] = true - } - // Skip this require line (it's been merged or will be merged) - continue - } - - // Keep non-require lines - result.WriteString(line) - if i < len(lines)-1 { - result.WriteString("\n") - } - } - - return result.String() -} diff --git a/pkg/workflow/bundler_deduplicate_test.go b/pkg/workflow/bundler_deduplicate_test.go deleted file mode 100644 index cecf32640b..0000000000 --- a/pkg/workflow/bundler_deduplicate_test.go +++ /dev/null @@ -1,44 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -// TestDeduplicateRequiresPreservesIndentation tests that deduplicateRequires -// preserves the indentation level of requires -func TestDeduplicateRequiresPreservesIndentation(t *testing.T) { - input := `async function main() { - const fs = require("fs"); - - if (fs.existsSync("/tmp/test.txt")) { - console.log("exists"); - } -} - -const path = require("path"); -console.log(path.basename("/tmp/file.txt")); -` - - output := deduplicateRequires(input) - - t.Logf("Input:\n%s", input) - t.Logf("Output:\n%s", output) - - // Check that fs require is at indent 2 - if !strings.Contains(output, " const fs = require(\"fs\");") { - t.Error("fs require should have 2 spaces of indentation") - } - - // Check that path require is at indent 0 - if !strings.Contains(output, "const path = require(\"path\");") { - t.Error("path require should have 0 spaces of indentation") - - // Check if it was incorrectly indented - if strings.Contains(output, " const path = require(\"path\");") { - t.Error("path require was incorrectly indented with 2 spaces") - } - } -} diff --git a/pkg/workflow/bundler_duplicate_modules_test.go b/pkg/workflow/bundler_duplicate_modules_test.go deleted file mode 100644 index 9d3387606e..0000000000 --- a/pkg/workflow/bundler_duplicate_modules_test.go +++ /dev/null @@ -1,65 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -// TestDeduplicateRequiresDuplicateModules tests that when multiple files require -// the same module with the same variable name, only one require statement is kept -func TestDeduplicateRequiresDuplicateModules(t *testing.T) { - // Simulates what happens when multiple inlined files all require "fs" - input := `const fs = require("fs"); -const path = require("path"); -// Inlined from file1.cjs -const fs = require("fs"); -// Inlined from file2.cjs -const fs = require("fs"); -const path = require("path"); -// Inlined from file3.cjs -const fs = require("fs"); - -function useModules() { - fs.existsSync("/tmp"); - path.join("/tmp", "test"); -} -` - - output := deduplicateRequires(input) - - t.Logf("Input:\n%s", input) - t.Logf("Output:\n%s", output) - - // Should have exactly 1 fs require - fsCount := strings.Count(output, `const fs = require`) - if fsCount != 1 { - t.Errorf("Expected 1 fs require, got %d", fsCount) - } - - // Should have exactly 1 path require - pathCount := strings.Count(output, `const path = require`) - if pathCount != 1 { - t.Errorf("Expected 1 path require, got %d", pathCount) - } - - // Both requires should come before their usage - fsRequireIndex := strings.Index(output, `require("fs")`) - fsUsageIndex := strings.Index(output, "fs.existsSync") - pathRequireIndex := strings.Index(output, `require("path")`) - pathUsageIndex := strings.Index(output, "path.join") - - if fsRequireIndex == -1 { - t.Error("fs require not found") - } - if pathRequireIndex == -1 { - t.Error("path require not found") - } - if fsUsageIndex != -1 && fsRequireIndex > fsUsageIndex { - t.Errorf("fs require should come before fs.existsSync usage") - } - if pathUsageIndex != -1 && pathRequireIndex > pathUsageIndex { - t.Errorf("path require should come before path.join usage") - } -} diff --git a/pkg/workflow/bundler_file_mode.go b/pkg/workflow/bundler_file_mode.go deleted file mode 100644 index 9c96e5a5cb..0000000000 --- a/pkg/workflow/bundler_file_mode.go +++ /dev/null @@ -1,529 +0,0 @@ -// This file provides JavaScript bundling for agentic workflows. -// -// # File Mode Bundler -// -// This file implements a file-based bundling mode for GitHub Script actions that writes -// JavaScript files to disk instead of inlining them in YAML. This approach maximizes -// reuse of helper modules within the same job. -// -// # How it works -// -// 1. CollectScriptFiles - Recursively collects all JavaScript files used by a script -// 2. GenerateWriteScriptsStep - Creates a step that writes all files to /opt/gh-aw/scripts/ -// 3. GenerateRequireScript - Converts a script to require from the local filesystem -// -// # Benefits -// -// - Reduces YAML size by avoiding duplicate inlined code -// - Maximizes reuse of helper modules within the same job -// - Makes debugging easier (files exist on disk during execution) -// - Reduces memory pressure from large bundled strings - -package workflow - -import ( - "crypto/sha256" - "encoding/hex" - "fmt" - "path/filepath" - "regexp" - "sort" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var fileModeLog = logger.New("workflow:bundler_file_mode") - -// ScriptsBasePath is the directory where JavaScript files are written at runtime -// This must match SetupActionDestination since files are copied there by the setup action -const ScriptsBasePath = "/opt/gh-aw/actions" - -// SetupActionDestination is the directory where the setup action writes activation scripts -const SetupActionDestination = "/opt/gh-aw/actions" - -// ScriptFile represents a JavaScript file to be written to disk -type ScriptFile struct { - // Path is the relative path within ScriptsBasePath (e.g., "create_issue.cjs") - Path string - // Content is the JavaScript content to write - Content string - // Hash is a short hash of the content for cache invalidation - Hash string -} - -// ScriptFilesResult contains the collected script files and metadata -type ScriptFilesResult struct { - // Files is the list of files to write, deduplicated and sorted - Files []ScriptFile - // MainScriptPath is the path to the main entry point script - MainScriptPath string - // TotalSize is the total size of all files in bytes - TotalSize int -} - -// CollectScriptFiles recursively collects all JavaScript files used by a script. -// It starts from the main script and follows all local require() statements. -// Top-level await patterns (like `await main();`) are patched to work in CommonJS. -// -// Parameters: -// - scriptName: Name of the main script (e.g., "create_issue") -// - mainContent: The main script content -// - sources: Map of all available JavaScript sources (from GetJavaScriptSources()) -// -// Returns a ScriptFilesResult with all files needed, or an error if a required file is missing. -// -// Note: This includes the main script in the output. Use CollectScriptDependencies if you -// only want the dependencies (for when the main script is inlined in github-script). -func CollectScriptFiles(scriptName string, mainContent string, sources map[string]string) (*ScriptFilesResult, error) { - fileModeLog.Printf("Collecting script files for: %s (%d bytes)", scriptName, len(mainContent)) - - // Track collected files and avoid duplicates - collected := make(map[string]*ScriptFile) - processed := make(map[string]bool) - - // The main script path - mainPath := scriptName + ".cjs" - - // Patch top-level await patterns to work in CommonJS - patchedContent := patchTopLevelAwaitForFileMode(mainContent) - - // Add the main script first - hash := computeShortHash(patchedContent) - collected[mainPath] = &ScriptFile{ - Path: mainPath, - Content: patchedContent, - Hash: hash, - } - processed[mainPath] = true - - // Recursively collect dependencies - if err := collectDependencies(mainContent, "", sources, collected, processed); err != nil { - return nil, err - } - - // Convert to sorted slice for deterministic output - var files []ScriptFile - totalSize := 0 - for _, file := range collected { - files = append(files, *file) - totalSize += len(file.Content) - } - - // Sort by path for consistent output - sort.Slice(files, func(i, j int) bool { - return files[i].Path < files[j].Path - }) - - fileModeLog.Printf("Collected %d files, total size: %d bytes", len(files), totalSize) - - return &ScriptFilesResult{ - Files: files, - MainScriptPath: mainPath, - TotalSize: totalSize, - }, nil -} - -// CollectScriptDependencies collects only the dependencies of a script (not the main script itself). -// This is used when the main script is inlined in github-script but its dependencies -// need to be written to disk. -// -// Parameters: -// - scriptName: Name of the main script (e.g., "create_issue") -// - mainContent: The main script content -// - sources: Map of all available JavaScript sources (from GetJavaScriptSources()) -// -// Returns a ScriptFilesResult with only the dependency files, or an error if a required file is missing. -func CollectScriptDependencies(scriptName string, mainContent string, sources map[string]string) (*ScriptFilesResult, error) { - fileModeLog.Printf("Collecting dependencies for: %s (%d bytes)", scriptName, len(mainContent)) - - // Track collected files and avoid duplicates - collected := make(map[string]*ScriptFile) - processed := make(map[string]bool) - - // Mark the main script as processed so we don't include it - mainPath := scriptName + ".cjs" - processed[mainPath] = true - - // Recursively collect dependencies (but not the main script) - if err := collectDependencies(mainContent, "", sources, collected, processed); err != nil { - return nil, err - } - - // Convert to sorted slice for deterministic output - var files []ScriptFile - totalSize := 0 - for _, file := range collected { - files = append(files, *file) - totalSize += len(file.Content) - } - - // Sort by path for consistent output - sort.Slice(files, func(i, j int) bool { - return files[i].Path < files[j].Path - }) - - fileModeLog.Printf("Collected %d dependency files, total size: %d bytes", len(files), totalSize) - - return &ScriptFilesResult{ - Files: files, - MainScriptPath: mainPath, - TotalSize: totalSize, - }, nil -} - -// collectDependencies recursively collects all files required by the given content -func collectDependencies(content string, currentDir string, sources map[string]string, collected map[string]*ScriptFile, processed map[string]bool) error { - // Regular expression to match require('./...') or require("./...") - requireRegex := regexp.MustCompile(`require\(['"](\.\.?/[^'"]+)['"]\)`) - - matches := requireRegex.FindAllStringSubmatch(content, -1) - for _, match := range matches { - if len(match) <= 1 { - continue - } - - requirePath := match[1] - - // Resolve the full path - var fullPath string - if currentDir == "" { - fullPath = requirePath - } else { - fullPath = filepath.Join(currentDir, requirePath) - } - - // Ensure .cjs extension - if !strings.HasSuffix(fullPath, ".cjs") && !strings.HasSuffix(fullPath, ".js") { - fullPath += ".cjs" - } - - // Normalize the path - fullPath = filepath.Clean(fullPath) - fullPath = filepath.ToSlash(fullPath) - - // Skip if already processed - if processed[fullPath] { - continue - } - processed[fullPath] = true - - // Look up in sources - requiredContent, ok := sources[fullPath] - if !ok { - return fmt.Errorf("required file not found in sources: %s", fullPath) - } - - // Add to collected - hash := computeShortHash(requiredContent) - collected[fullPath] = &ScriptFile{ - Path: fullPath, - Content: requiredContent, - Hash: hash, - } - - fileModeLog.Printf("Collected dependency: %s (%d bytes)", fullPath, len(requiredContent)) - - // Recursively process this file's dependencies - requiredDir := filepath.Dir(fullPath) - if err := collectDependencies(requiredContent, requiredDir, sources, collected, processed); err != nil { - return err - } - } - - return nil -} - -// computeShortHash computes a short SHA256 hash of the content (first 8 characters) -func computeShortHash(content string) string { - hash := sha256.Sum256([]byte(content)) - return hex.EncodeToString(hash[:])[:8] -} - -// patchTopLevelAwaitForFileMode wraps top-level `await main();` calls in an async IIFE. -// CommonJS modules don't support top-level await, so we need to wrap it. -// -// This transforms: -// -// await main(); -// -// Into: -// -// (async () => { await main(); })(); -func patchTopLevelAwaitForFileMode(content string) string { - // Match `await main();` at the end of the file (with optional whitespace/newlines) - // This pattern is used in safe output scripts as the entry point - awaitMainRegex := regexp.MustCompile(`(?m)^await\s+main\s*\(\s*\)\s*;?\s*$`) - - return awaitMainRegex.ReplaceAllString(content, "(async () => { await main(); })();") -} - -// GenerateWriteScriptsStep generates the YAML for a step that writes all collected -// JavaScript files to /opt/gh-aw/scripts/. This step should be added once at the -// beginning of the safe_outputs job. -// -// The generated step uses a heredoc to write each file efficiently. -func GenerateWriteScriptsStep(files []ScriptFile) []string { - if len(files) == 0 { - return nil - } - - fileModeLog.Printf("Generating write scripts step for %d files", len(files)) - - var steps []string - - steps = append(steps, " - name: Setup JavaScript files\n") - steps = append(steps, " id: setup_scripts\n") - steps = append(steps, " shell: bash\n") - steps = append(steps, " run: |\n") - steps = append(steps, fmt.Sprintf(" mkdir -p %s\n", ScriptsBasePath)) - - // Write each file using cat with heredoc - for _, file := range files { - filePath := fmt.Sprintf("%s/%s", ScriptsBasePath, file.Path) - - // Ensure parent directory exists - dir := filepath.Dir(filePath) - if dir != ScriptsBasePath { - steps = append(steps, fmt.Sprintf(" mkdir -p %s\n", dir)) - } - - // Use heredoc to write file content safely - // Generate unique delimiter using file hash to avoid conflicts - delimiter := GenerateHeredocDelimiter("FILE_" + file.Hash) - steps = append(steps, fmt.Sprintf(" cat > %s << '%s'\n", filePath, delimiter)) - - // Write content line by line - lines := strings.SplitSeq(file.Content, "\n") - for line := range lines { - steps = append(steps, fmt.Sprintf(" %s\n", line)) - } - - steps = append(steps, fmt.Sprintf(" %s\n", delimiter)) - } - - return steps -} - -// GenerateRequireScript generates the JavaScript code that requires the main script -// from the filesystem instead of inlining the bundled code. -// -// For GitHub Script mode, the script is wrapped in an async IIFE to support -// top-level await patterns used in the JavaScript files (e.g., `await main();`). -// The globals (github, context, core, exec, io) are automatically available -// in the GitHub Script execution context. -func GenerateRequireScript(mainScriptPath string) string { - fullPath := fmt.Sprintf("%s/%s", ScriptsBasePath, mainScriptPath) - // Wrap in async IIFE to support top-level await in the required module - return fmt.Sprintf(`(async () => { await require('%s'); })();`, fullPath) -} - -// GitHubScriptGlobalsPreamble is JavaScript code that exposes the github-script -// built-in objects (github, context, core, exec, io) on the global JavaScript object. -// This allows required modules to access these globals via globalThis. -const GitHubScriptGlobalsPreamble = `// Expose github-script globals to required modules -globalThis.github = github; -globalThis.context = context; -globalThis.core = core; -globalThis.exec = exec; -globalThis.io = io; - -` - -// GetInlinedScriptForFileMode gets the main script content and transforms it for inlining -// in the github-script action while using file mode for dependencies. -// -// This function: -// 1. Adds a preamble to expose github-script globals (github, context, core, exec, io) on globalThis -// 2. Gets the script content from the registry -// 3. Transforms relative require() calls to absolute paths (e.g., './helper.cjs' -> '/opt/gh-aw/scripts/helper.cjs') -// 4. Patches top-level await patterns to work in the execution context -// -// This is different from GenerateRequireScript which just generates a require() call. -// Inlining the main script is necessary because: -// - require() runs in a separate module context without the GitHub Script globals -// - The main script needs access to github, context, core, etc. in its top-level scope -// -// Dependencies are still loaded from files using require() and can access the globals -// via globalThis (e.g., globalThis.github, globalThis.core). -func GetInlinedScriptForFileMode(scriptName string) (string, error) { - // Get script content from registry - content := DefaultScriptRegistry.GetSource(scriptName) - if content == "" { - return "", fmt.Errorf("script not found in registry: %s", scriptName) - } - - // Transform relative requires to absolute paths pointing to /opt/gh-aw/scripts/ - transformed := TransformRequiresToAbsolutePath(content, ScriptsBasePath) - - // Patch top-level await patterns - patched := patchTopLevelAwaitForFileMode(transformed) - - // Add preamble to expose globals to required modules - result := GitHubScriptGlobalsPreamble + patched - - fileModeLog.Printf("Inlined script %s: %d bytes (transformed from %d)", scriptName, len(result), len(content)) - - return result, nil -} - -// RewriteScriptForFileMode rewrites a script's require statements to use absolute -// paths from /tmp/gh-aw/scripts/ instead of relative paths. -// -// This transforms: -// -// const { helper } = require('./helper.cjs'); -// -// Into: -// -// const { helper } = require('/opt/gh-aw/scripts/helper.cjs'); -func RewriteScriptForFileMode(content string, currentPath string) string { - // Regular expression to match local require statements - requireRegex := regexp.MustCompile(`require\(['"](\.\.?/)([^'"]+)['"]\)`) - - return requireRegex.ReplaceAllStringFunc(content, func(match string) string { - // Extract the path - submatches := requireRegex.FindStringSubmatch(match) - if len(submatches) < 3 { - return match - } - - relativePrefix := submatches[1] - requirePath := submatches[2] - - // Resolve the full path - var fullPath string - currentDir := filepath.Dir(currentPath) - switch relativePrefix { - case "./": - if currentDir == "." || currentDir == "" { - fullPath = requirePath - } else { - fullPath = filepath.Join(currentDir, requirePath) - } - case "../": - parentDir := filepath.Dir(currentDir) - fullPath = filepath.Join(parentDir, requirePath) - } - - // Normalize - fullPath = filepath.Clean(fullPath) - fullPath = filepath.ToSlash(fullPath) - - // Return the rewritten require - return fmt.Sprintf("require('%s/%s')", ScriptsBasePath, fullPath) - }) -} - -// TransformRequiresToAbsolutePath rewrites all relative require statements in content -// to use the specified absolute base path. -// -// This transforms: -// -// const { helper } = require('./helper.cjs'); -// -// Into: -// -// const { helper } = require('/base/path/helper.cjs'); -// -// Parameters: -// - content: The JavaScript content to transform -// - basePath: The absolute path to use for requires (e.g., "/opt/gh-aw/safeoutputs") -func TransformRequiresToAbsolutePath(content string, basePath string) string { - // Regular expression to match local require statements - requireRegex := regexp.MustCompile(`require\(['"](\.\.?/)([^'"]+)['"]\)`) - - return requireRegex.ReplaceAllStringFunc(content, func(match string) string { - // Extract the path - submatches := requireRegex.FindStringSubmatch(match) - if len(submatches) < 3 { - return match - } - - requirePath := submatches[2] - - // Return the rewritten require with the base path - return fmt.Sprintf("require('%s/%s')", basePath, requirePath) - }) -} - -// PrepareFilesForFileMode prepares all collected files for file mode by rewriting -// their require statements to use absolute paths. -func PrepareFilesForFileMode(files []ScriptFile) []ScriptFile { - result := make([]ScriptFile, len(files)) - for i, file := range files { - rewritten := RewriteScriptForFileMode(file.Content, file.Path) - result[i] = ScriptFile{ - Path: file.Path, - Content: rewritten, - Hash: computeShortHash(rewritten), - } - } - return result -} - -// CollectAllJobScriptFiles collects all JavaScript files needed by multiple scripts -// in a single job. This deduplicates common helper files across different safe output types. -// -// Parameters: -// - scriptNames: List of script names to collect (e.g., ["create_issue", "add_comment"]) -// - sources: Map of all available JavaScript sources -// -// Returns a combined ScriptFilesResult with all deduplicated files. -func CollectAllJobScriptFiles(scriptNames []string, sources map[string]string) (*ScriptFilesResult, error) { - fileModeLog.Printf("Collecting files for %d scripts: %v", len(scriptNames), scriptNames) - - // Track all collected files across all scripts - allFiles := make(map[string]*ScriptFile) - - for _, name := range scriptNames { - // Get the script content from the registry - content := DefaultScriptRegistry.GetSource(name) - if content == "" { - fileModeLog.Printf("Script not found in registry: %s, skipping", name) - continue - } - - // Collect only this script's dependencies (not the main script itself) - // The main script is inlined in the github-script action - result, err := CollectScriptDependencies(name, content, sources) - if err != nil { - return nil, fmt.Errorf("failed to collect dependencies for script %s: %w", name, err) - } - - // Merge into allFiles - for _, file := range result.Files { - if existing, ok := allFiles[file.Path]; ok { - // Already have this file - verify content matches - if existing.Hash != file.Hash { - fileModeLog.Printf("WARNING: File %s has different content from different scripts", file.Path) - } - } else { - allFiles[file.Path] = &ScriptFile{ - Path: file.Path, - Content: file.Content, - Hash: file.Hash, - } - } - } - } - - // Convert to sorted slice - var files []ScriptFile - totalSize := 0 - for _, file := range allFiles { - files = append(files, *file) - totalSize += len(file.Content) - } - - sort.Slice(files, func(i, j int) bool { - return files[i].Path < files[j].Path - }) - - fileModeLog.Printf("Total collected: %d unique dependency files, %d bytes", len(files), totalSize) - - return &ScriptFilesResult{ - Files: files, - TotalSize: totalSize, - }, nil -} diff --git a/pkg/workflow/bundler_file_mode_test.go b/pkg/workflow/bundler_file_mode_test.go deleted file mode 100644 index 12df187178..0000000000 --- a/pkg/workflow/bundler_file_mode_test.go +++ /dev/null @@ -1,255 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestCollectScriptFiles(t *testing.T) { - // Create mock sources with dependencies - sources := map[string]string{ - "main.cjs": ` -const { helper } = require('./helper.cjs'); -const { util } = require('./utils/util.cjs'); -helper(); -util(); -`, - "helper.cjs": ` -const { shared } = require('./shared.cjs'); -function helper() { - shared(); - console.log("helper"); -} -module.exports = { helper }; -`, - "shared.cjs": ` -function shared() { - console.log("shared"); -} -module.exports = { shared }; -`, - "utils/util.cjs": ` -function util() { - console.log("util"); -} -module.exports = { util }; -`, - } - - result, err := CollectScriptFiles("main", sources["main.cjs"], sources) - if err != nil { - t.Fatalf("CollectScriptFiles failed: %v", err) - } - - // Should collect all 4 files - if len(result.Files) != 4 { - t.Errorf("Expected 4 files, got %d", len(result.Files)) - for _, f := range result.Files { - t.Logf(" - %s", f.Path) - } - } - - // Check that main script path is set - if result.MainScriptPath != "main.cjs" { - t.Errorf("Expected MainScriptPath to be 'main.cjs', got '%s'", result.MainScriptPath) - } - - // Check total size is > 0 - if result.TotalSize == 0 { - t.Error("Expected TotalSize > 0") - } -} - -func TestCollectScriptFiles_MissingDependency(t *testing.T) { - sources := map[string]string{ - "main.cjs": ` -const { missing } = require('./missing.cjs'); -missing(); -`, - } - - _, err := CollectScriptFiles("main", sources["main.cjs"], sources) - if err == nil { - t.Fatal("Expected error for missing dependency, got nil") - } - if !strings.Contains(err.Error(), "missing.cjs") { - t.Errorf("Expected error to mention 'missing.cjs', got: %v", err) - } -} - -func TestCollectScriptFiles_CircularDependency(t *testing.T) { - // Circular dependencies should be handled (file only processed once) - sources := map[string]string{ - "a.cjs": ` -const { b } = require('./b.cjs'); -module.exports = { a: () => b() }; -`, - "b.cjs": ` -const { a } = require('./a.cjs'); -module.exports = { b: () => console.log("b") }; -`, - } - - result, err := CollectScriptFiles("a", sources["a.cjs"], sources) - if err != nil { - t.Fatalf("CollectScriptFiles failed with circular dependency: %v", err) - } - - // Should collect both files without infinite loop - if len(result.Files) != 2 { - t.Errorf("Expected 2 files, got %d", len(result.Files)) - } -} - -func TestGenerateWriteScriptsStep(t *testing.T) { - files := []ScriptFile{ - { - Path: "test.cjs", - Content: "console.log('hello');", - Hash: "abc12345", - }, - } - - steps := GenerateWriteScriptsStep(files) - if len(steps) == 0 { - t.Fatal("Expected steps to be generated") - } - - // Check that the step includes the mkdir command - stepsStr := strings.Join(steps, "") - if !strings.Contains(stepsStr, "mkdir -p /opt/gh-aw/actions") { - t.Error("Expected mkdir command for actions directory") - } - - // Check that the file is written - if !strings.Contains(stepsStr, "cat > /opt/gh-aw/actions/test.cjs") { - t.Error("Expected cat command for writing file") - } - - // Check that content is included - if !strings.Contains(stepsStr, "console.log") { - t.Error("Expected file content to be included") - } -} - -func TestGenerateRequireScript(t *testing.T) { - script := GenerateRequireScript("create_issue.cjs") - - if !strings.Contains(script, "/opt/gh-aw/actions/create_issue.cjs") { - t.Errorf("Expected script to require from /opt/gh-aw/actions/, got: %s", script) - } - - if !strings.Contains(script, "require(") { - t.Error("Expected script to contain require()") - } - - // Should be wrapped in async IIFE to support top-level await - if !strings.Contains(script, "(async () =>") { - t.Error("Should be wrapped in async IIFE to support top-level await") - } - - // Should have the closing IIFE parentheses - if !strings.Contains(script, ")()") { - t.Error("Should have IIFE invocation") - } -} - -func TestRewriteScriptForFileMode(t *testing.T) { - tests := []struct { - name string - content string - currentPath string - wantContain string - }{ - { - name: "simple relative require", - content: "const { helper } = require('./helper.cjs');", - currentPath: "main.cjs", - wantContain: "/opt/gh-aw/actions/helper.cjs", - }, - { - name: "nested relative require", - content: "const { util } = require('./utils/util.cjs');", - currentPath: "main.cjs", - wantContain: "/opt/gh-aw/actions/utils/util.cjs", - }, - { - name: "parent directory require", - content: "const { shared } = require('../shared.cjs');", - currentPath: "utils/util.cjs", - wantContain: "/opt/gh-aw/actions/shared.cjs", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := RewriteScriptForFileMode(tt.content, tt.currentPath) - if !strings.Contains(result, tt.wantContain) { - t.Errorf("Expected result to contain %q, got: %s", tt.wantContain, result) - } - }) - } -} - -func TestPrepareFilesForFileMode(t *testing.T) { - files := []ScriptFile{ - { - Path: "main.cjs", - Content: "const { helper } = require('./helper.cjs'); helper();", - Hash: "abc123", - }, - { - Path: "helper.cjs", - Content: "module.exports = { helper: () => {} };", - Hash: "def456", - }, - } - - prepared := PrepareFilesForFileMode(files) - if len(prepared) != 2 { - t.Fatalf("Expected 2 prepared files, got %d", len(prepared)) - } - - // Check that require paths are rewritten - mainFile := prepared[0] - if !strings.Contains(mainFile.Content, "/opt/gh-aw/actions/helper.cjs") { - t.Errorf("Expected main file to have rewritten require path, got: %s", mainFile.Content) - } - - // Check that hash is updated - if mainFile.Hash == files[0].Hash { - t.Error("Expected hash to be updated after rewriting") - } -} - -func TestCollectAllJobScriptFiles(t *testing.T) { - // This test uses the actual script registry - // Skip if registry is empty (shouldn't happen in normal runs) - if !DefaultScriptRegistry.Has("create_issue") { - t.Skip("Script registry not populated") - } - - scriptNames := []string{"create_issue", "add_comment"} - sources := GetJavaScriptSources() - - result, err := CollectAllJobScriptFiles(scriptNames, sources) - if err != nil { - t.Fatalf("CollectAllJobScriptFiles failed: %v", err) - } - - // Should collect at least the 2 main scripts plus shared dependencies - if len(result.Files) < 2 { - t.Errorf("Expected at least 2 files, got %d", len(result.Files)) - } - - // Check that helpers are deduplicated (shared files should appear only once) - pathCounts := make(map[string]int) - for _, f := range result.Files { - pathCounts[f.Path]++ - if pathCounts[f.Path] > 1 { - t.Errorf("File %s appears multiple times", f.Path) - } - } -} diff --git a/pkg/workflow/bundler_fs_undefined_test.go b/pkg/workflow/bundler_fs_undefined_test.go deleted file mode 100644 index 991b24f944..0000000000 --- a/pkg/workflow/bundler_fs_undefined_test.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestBundleJavaScriptFsInsideFunctionWithMultilineDestructure tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptFsInsideFunctionWithMultilineDestructure(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/bundler_function_scope_test.go b/pkg/workflow/bundler_function_scope_test.go deleted file mode 100644 index c00a186c8d..0000000000 --- a/pkg/workflow/bundler_function_scope_test.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestBundleJavaScriptWithRequireInsideFunction tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithRequireInsideFunction(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/bundler_indentation_test.go b/pkg/workflow/bundler_indentation_test.go deleted file mode 100644 index 58a0d4e3c8..0000000000 --- a/pkg/workflow/bundler_indentation_test.go +++ /dev/null @@ -1,58 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -// TestDeduplicateRequiresWithMixedIndentation tests what happens when requires have different indentation -func TestDeduplicateRequiresWithMixedIndentation(t *testing.T) { - // This simulates the real scenario where some code has no indentation - // but other inlined code has indentation - input := `const { execFile } = require("child_process"); -const os = require("os"); - -function someFunction() { - const fs = require("fs"); - const path = require("path"); - - fs.existsSync("/tmp"); - path.join("/tmp", "test"); -} -` - - output := deduplicateRequires(input) - - t.Logf("Input:\n%s", input) - t.Logf("Output:\n%s", output) - - // Count requires at each indentation level - lines := strings.Split(output, "\n") - indent0Requires := 0 - indent2Requires := 0 - - for _, line := range lines { - if strings.Contains(line, "require(") { - // Count leading spaces - spaces := len(line) - len(strings.TrimLeft(line, " ")) - switch spaces { - case 0: - indent0Requires++ - t.Logf("Indent 0: %s", line) - case 2: - indent2Requires++ - t.Logf("Indent 2: %s", line) - } - } - } - - t.Logf("Requires at indent 0: %d", indent0Requires) - t.Logf("Requires at indent 2: %d", indent2Requires) - - // fs and path should stay at indent 2 (inside the function scope) - if indent2Requires != 2 { - t.Errorf("Expected 2 requires at indent 2 (fs and path inside function), got %d", indent2Requires) - } -} diff --git a/pkg/workflow/bundler_inline_test.go b/pkg/workflow/bundler_inline_test.go deleted file mode 100644 index fd243ba7a8..0000000000 --- a/pkg/workflow/bundler_inline_test.go +++ /dev/null @@ -1,59 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -// TestDeduplicateRequiresWithInlinedContent tests deduplication with comment markers -func TestDeduplicateRequiresWithInlinedContent(t *testing.T) { - input := `// === Inlined from ./safe_outputs_mcp_server.cjs === -const { execFile, execSync } = require("child_process"); -const os = require("os"); -// === Inlined from ./read_buffer.cjs === -class ReadBuffer { -} -// === End of ./read_buffer.cjs === -// === Inlined from ./mcp_server_core.cjs === -const fs = require("fs"); -const path = require("path"); -function initLogFile(server) { - if (!fs.existsSync(server.logDir)) { - fs.mkdirSync(server.logDir, { recursive: true }); - } -} -// === End of ./mcp_server_core.cjs === -// === End of ./safe_outputs_mcp_server.cjs === -` - - output := deduplicateRequires(input) - - t.Logf("Input:\n%s", input) - t.Logf("Output:\n%s", output) - - // Check that fs and path requires are present - if !strings.Contains(output, `require("fs")`) { - t.Error("fs require should be present in output") - } - - if !strings.Contains(output, `require("path")`) { - t.Error("path require should be present in output") - } - - // Check that they come before fs.existsSync usage - fsRequireIndex := strings.Index(output, `require("fs")`) - fsUsageIndex := strings.Index(output, "fs.existsSync") - found := strings.Contains(output, `require("path")`) - - if fsRequireIndex == -1 { - t.Error("fs require not found") - } - if !found { - t.Error("path require not found") - } - if fsUsageIndex != -1 && fsRequireIndex > fsUsageIndex { - t.Errorf("fs require should come before fs.existsSync usage (require at %d, usage at %d)", fsRequireIndex, fsUsageIndex) - } -} diff --git a/pkg/workflow/bundler_integration_test.go b/pkg/workflow/bundler_integration_test.go deleted file mode 100644 index a3419fe355..0000000000 --- a/pkg/workflow/bundler_integration_test.go +++ /dev/null @@ -1,55 +0,0 @@ -//go:build integration - -package workflow - -import ( - "testing" -) - -// TestBundlerIntegration tests the integration of bundler with embedded scripts -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundlerIntegration(t *testing.T) { - t.Skip("Bundler integration tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundlerCaching tests that bundling is cached and only happens once -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundlerCaching(t *testing.T) { - t.Skip("Bundler caching tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundlerConcurrency tests that the bundler works correctly under concurrent access -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundlerConcurrency(t *testing.T) { - t.Skip("Bundler concurrency tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundledScriptsContainHelperFunctions verifies that helper functions are properly bundled -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundledScriptsContainHelperFunctions(t *testing.T) { - t.Skip("Bundled scripts helper function tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundledScriptsDoNotContainExports verifies that exports are removed from bundled scripts -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundledScriptsDoNotContainExports(t *testing.T) { - t.Skip("Bundled scripts exports tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundledScriptsHaveCorrectStructure verifies the structure of bundled scripts -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundledScriptsHaveCorrectStructure(t *testing.T) { - t.Skip("Bundled scripts structure tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestSourceFilesAreSmaller verifies that source files are smaller than bundled scripts -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestSourceFilesAreSmaller(t *testing.T) { - t.Skip("Source file size comparison tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestGetJavaScriptSources verifies that GetJavaScriptSources returns all embedded sources -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestGetJavaScriptSources(t *testing.T) { - t.Skip("JavaScript sources tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/bundler_quotes_test.go b/pkg/workflow/bundler_quotes_test.go deleted file mode 100644 index 4ecce1e775..0000000000 --- a/pkg/workflow/bundler_quotes_test.go +++ /dev/null @@ -1,103 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -// TestDeduplicateRequiresWithSingleAndDoubleQuotes tests that deduplicateRequires -// handles both single and double quoted require statements correctly -func TestDeduplicateRequiresWithSingleAndDoubleQuotes(t *testing.T) { - input := `const fs = require("fs"); -const path = require('path'); - -function test() { - const result = path.join("/tmp", "test"); - return fs.readFileSync(result); -} -` - - output := deduplicateRequires(input) - - t.Logf("Input:\n%s", input) - t.Logf("Output:\n%s", output) - - // Check that both requires are present - if !strings.Contains(output, `const fs = require("fs");`) { - t.Error("fs require with double quotes should be present") - } - - if !strings.Contains(output, `const path = require('path');`) && - !strings.Contains(output, `const path = require("path");`) { - t.Error("path require should be present (with single or double quotes)") - } - - // Check that path is defined before its use - found := strings.Contains(output, "const fs") - pathIndex := strings.Index(output, "const path") - joinIndex := strings.Index(output, "path.join") - - if pathIndex == -1 { - t.Error("path require is missing") - } - if joinIndex == -1 { - t.Error("path.join usage is missing") - } - if pathIndex > joinIndex { - t.Errorf("path require appears after path.join usage (path at %d, join at %d)", pathIndex, joinIndex) - } - if !found { - t.Error("fs require is missing") - } -} - -// TestDeduplicateRequiresMixedQuotesMultiple tests that the regex correctly -// handles multiple requires with mixed quote styles -func TestDeduplicateRequiresMixedQuotesMultiple(t *testing.T) { - input := `const fs = require("fs"); -const path = require('path'); -const os = require("os"); - -function useModules() { - console.log(fs.readFileSync("/tmp/test")); - console.log(path.join("/tmp", "test")); - console.log(os.tmpdir()); -} -` - - output := deduplicateRequires(input) - - t.Logf("Input:\n%s", input) - t.Logf("Output:\n%s", output) - - // Should have exactly one fs require - fsCount := strings.Count(output, `const fs = require`) - if fsCount != 1 { - t.Errorf("Expected 1 fs require, got %d", fsCount) - } - - // Should have exactly one path require - pathCount := strings.Count(output, `const path = require`) - if pathCount != 1 { - t.Errorf("Expected 1 path require, got %d", pathCount) - } - - // Should have exactly one os require - osCount := strings.Count(output, `const os = require`) - if osCount != 1 { - t.Errorf("Expected 1 os require, got %d", osCount) - } - - // All three modules should be present - if !strings.Contains(output, `require("fs")`) && !strings.Contains(output, `require('fs')`) { - t.Error("fs module should be required") - } - if !strings.Contains(output, `require("path")`) && !strings.Contains(output, `require('path')`) { - t.Error("path module should be required") - } - if !strings.Contains(output, `require("os")`) && !strings.Contains(output, `require('os')`) { - t.Error("os module should be required") - } -} diff --git a/pkg/workflow/bundler_runtime_mode_test.go b/pkg/workflow/bundler_runtime_mode_test.go deleted file mode 100644 index 43c9e8f9d9..0000000000 --- a/pkg/workflow/bundler_runtime_mode_test.go +++ /dev/null @@ -1,79 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestRuntimeModeString tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestRuntimeModeString(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptWithMode_GitHubScript tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithMode_GitHubScript(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptWithMode_NodeJS tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithMode_NodeJS(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptWithMode_GitHubScriptValidation tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithMode_GitHubScriptValidation(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoModuleReferences tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoModuleReferences(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptFromSources_BackwardCompatibility tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptFromSources_BackwardCompatibility(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptWithMode_MultipleFiles_NodeJS tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithMode_MultipleFiles_NodeJS(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoRuntimeMixing_GitHubScriptWithNodeJsHelper tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoRuntimeMixing_GitHubScriptWithNodeJsHelper(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoRuntimeMixing_NodeJsWithNodeJsHelper tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoRuntimeMixing_NodeJsWithNodeJsHelper(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoRuntimeMixing_GitHubScriptWithCompatibleHelper tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoRuntimeMixing_GitHubScriptWithCompatibleHelper(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoRuntimeMixing_GitHubScriptWithGitHubScriptAPIs tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoRuntimeMixing_GitHubScriptWithGitHubScriptAPIs(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoRuntimeMixing_TransitiveDependency tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoRuntimeMixing_TransitiveDependency(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/bundler_runtime_validation.go b/pkg/workflow/bundler_runtime_validation.go deleted file mode 100644 index bad1ac0d05..0000000000 --- a/pkg/workflow/bundler_runtime_validation.go +++ /dev/null @@ -1,176 +0,0 @@ -// This file provides JavaScript runtime mode validation for agentic workflows. -// -// # Runtime Mode Validation -// -// This file validates that JavaScript scripts are compatible with their target runtime mode -// and that different runtime modes are not mixed in a bundling operation. This prevents -// runtime errors from incompatible API usage. -// -// # Runtime Modes -// -// GitHub Script Mode: -// - Used for JavaScript embedded in GitHub Actions YAML via actions/github-script -// - No module system available (no require() or module.exports at runtime) -// - GitHub Actions globals available (core.*, exec.*, github.*) -// -// Node.js Mode: -// - Used for standalone Node.js scripts that run on filesystem -// - Full CommonJS module system available -// - Standard Node.js APIs available (child_process, fs, etc.) -// - No GitHub Actions globals -// -// # Validation Functions -// -// - validateNoRuntimeMixing() - Ensures all files being bundled are compatible with target mode -// - validateRuntimeModeRecursive() - Recursively validates runtime compatibility -// - detectRuntimeMode() - Detects the intended runtime mode of a JavaScript file -// -// # When to Add Validation Here -// -// Add validation to this file when: -// - It validates runtime mode compatibility -// - It checks for mixing of incompatible scripts -// - It detects runtime-specific APIs -// -// For bundling functions, see bundler.go. -// For bundle safety validation, see bundler_safety_validation.go. -// For script content validation, see bundler_script_validation.go. -// For general validation, see validation.go. -// For detailed documentation, see scratchpad/validation-architecture.md - -package workflow - -import ( - "fmt" - "regexp" - "strings" - - "github.com/github/gh-aw/pkg/logger" - "github.com/github/gh-aw/pkg/stringutil" -) - -var bundlerRuntimeLog = logger.New("workflow:bundler_runtime_validation") - -// validateNoRuntimeMixing checks that all files being bundled are compatible with the target runtime mode -// This prevents mixing nodejs-only scripts (that use child_process) with github-script scripts -// Returns an error if incompatible runtime modes are detected -// Note: This function uses fail-fast error handling because runtime mode conflicts in dependencies -// need to be resolved one at a time, and showing multiple conflicting dependency chains would be confusing -func validateNoRuntimeMixing(mainScript string, sources map[string]string, targetMode RuntimeMode) error { - bundlerRuntimeLog.Printf("Validating runtime mode compatibility: target_mode=%s", targetMode) - - // Track which files have been checked to avoid redundant checks - checked := make(map[string]bool) - - // Recursively validate the main script and its dependencies - // This uses fail-fast error handling because runtime conflicts need sequential resolution - return validateRuntimeModeRecursive(mainScript, "", sources, targetMode, checked) -} - -// validateRuntimeModeRecursive recursively validates that all required files are compatible with the target runtime mode -func validateRuntimeModeRecursive(content string, currentPath string, sources map[string]string, targetMode RuntimeMode, checked map[string]bool) error { - // Extract all local require statements - requireRegex := regexp.MustCompile(`require\(['"](\.\.?/[^'"]+)['"]\)`) - matches := requireRegex.FindAllStringSubmatch(content, -1) - - for _, match := range matches { - if len(match) <= 1 { - continue - } - - requirePath := match[1] - - // Resolve the full path - var fullPath string - if currentPath == "" { - fullPath = requirePath - } else { - fullPath = currentPath + "/" + requirePath - } - - // Ensure .cjs extension - if !strings.HasSuffix(fullPath, ".cjs") && !strings.HasSuffix(fullPath, ".js") { - fullPath += ".cjs" - } - - // Normalize the path - fullPath = stringutil.NormalizePath(fullPath) - - // Skip if already checked - if checked[fullPath] { - continue - } - checked[fullPath] = true - - // Get the required file content - requiredContent, ok := sources[fullPath] - if !ok { - // File not found - this will be caught by other validation - continue - } - - // Detect the runtime mode of the required file - detectedMode := detectRuntimeMode(requiredContent) - - // Check for incompatibility - if detectedMode != RuntimeModeGitHubScript && targetMode != detectedMode { - return fmt.Errorf("runtime mode conflict: script requires '%s' which is a %s script, but the main script is compiled for %s mode.\n\nNode.js scripts cannot be bundled with GitHub Script mode scripts because they use incompatible APIs (e.g., child_process, fs).\n\nTo fix this:\n- Use only GitHub Script compatible scripts (core.*, exec.*, github.*) for GitHub Script mode\n- Or change the main script to Node.js mode if it needs Node.js APIs", - fullPath, detectedMode, targetMode) - } - - // Recursively check the required file's dependencies - requiredDir := "" - if strings.Contains(fullPath, "/") { - parts := strings.Split(fullPath, "/") - requiredDir = strings.Join(parts[:len(parts)-1], "/") - } - - if err := validateRuntimeModeRecursive(requiredContent, requiredDir, sources, targetMode, checked); err != nil { - return err - } - } - - return nil -} - -// detectRuntimeMode attempts to detect the intended runtime mode of a JavaScript file -// by analyzing its content for runtime-specific patterns. -// This is used to detect if a LOCAL file being bundled is incompatible with the target mode. -func detectRuntimeMode(content string) RuntimeMode { - // Check for Node.js-specific APIs that are CALLED in the code - // These indicate the script uses Node.js-only functionality - // Note: We only check for APIs that are fundamentally incompatible with github-script, - // specifically child_process APIs like execSync/spawnSync - nodeOnlyPatterns := []string{ - `\bexecSync\s*\(`, // execSync function call - `\bspawnSync\s*\(`, // spawnSync function call - } - - for _, pattern := range nodeOnlyPatterns { - matched, _ := regexp.MatchString(pattern, content) - if matched { - bundlerRuntimeLog.Printf("Detected Node.js mode: pattern '%s' found", pattern) - return RuntimeModeNodeJS - } - } - - // Check for github-script specific APIs - // These indicate the script is intended for GitHub Script mode - githubScriptPatterns := []string{ - `\bcore\.\w+`, // @actions/core - `\bgithub\.\w+`, // github context - } - - for _, pattern := range githubScriptPatterns { - matched, _ := regexp.MatchString(pattern, content) - if matched { - bundlerRuntimeLog.Printf("Detected GitHub Script mode: pattern '%s' found", pattern) - return RuntimeModeGitHubScript - } - } - - // If no specific patterns found, assume it's compatible with both (utility/helper functions) - // and return GitHub Script mode as the default/most restrictive - bundlerRuntimeLog.Print("No runtime-specific patterns found, assuming GitHub Script compatible") - return RuntimeModeGitHubScript -} diff --git a/pkg/workflow/bundler_safety_validation.go b/pkg/workflow/bundler_safety_validation.go deleted file mode 100644 index 1c235ddd99..0000000000 --- a/pkg/workflow/bundler_safety_validation.go +++ /dev/null @@ -1,223 +0,0 @@ -// This file provides JavaScript bundler safety validation for agentic workflows. -// -// # Bundle Safety Validation -// -// This file validates bundled JavaScript to ensure safe module dependencies and prevent -// runtime errors from missing modules. Validation ensures compatibility with target runtime mode. -// -// # Validation Functions -// -// - validateNoLocalRequires() - Validates bundled JavaScript has no local require() statements -// - validateNoModuleReferences() - Validates no module.exports or exports references remain -// - ValidateEmbeddedResourceRequires() - Validates embedded JavaScript dependencies exist -// -// # Validation Pattern: Bundling Verification -// -// Bundle safety validation ensures that local require() statements are inlined and -// module references are removed when required: -// - Scans bundled JavaScript for require('./...') or require('../...') patterns -// - Ignores require statements inside string literals -// - Returns hard errors if local requires are found (indicates bundling failure) -// - Helps prevent runtime module-not-found errors -// -// # When to Add Validation Here -// -// Add validation to this file when: -// - It validates JavaScript bundling correctness -// - It checks for missing module dependencies -// - It validates CommonJS require() statement resolution -// -// For bundling functions, see bundler.go. -// For runtime mode validation, see bundler_runtime_validation.go. -// For script content validation, see bundler_script_validation.go. -// For general validation, see validation.go. -// For detailed documentation, see scratchpad/validation-architecture.md - -package workflow - -import ( - "fmt" - "regexp" - "strings" - - "github.com/github/gh-aw/pkg/logger" - "github.com/github/gh-aw/pkg/stringutil" -) - -var bundlerSafetyLog = logger.New("workflow:bundler_safety_validation") - -// Pre-compiled regular expressions for validation (compiled once at package initialization for performance) -var ( - // moduleExportsRegex matches module.exports references - moduleExportsRegex = regexp.MustCompile(`\bmodule\.exports\b`) - // exportsRegex matches exports.property references - exportsRegex = regexp.MustCompile(`\bexports\.\w+`) -) - -// validateNoLocalRequires checks that the bundled JavaScript contains no local require() statements -// that weren't inlined during bundling. This prevents runtime errors from missing local modules. -// Returns an error if any local requires are found, otherwise returns nil -func validateNoLocalRequires(bundledContent string) error { - bundlerSafetyLog.Printf("Validating bundled JavaScript: %d bytes, %d lines", len(bundledContent), strings.Count(bundledContent, "\n")+1) - - // Regular expression to match local require statements - // Matches: require('./...') or require("../...") - localRequireRegex := regexp.MustCompile(`require\(['"](\.\.?/[^'"]+)['"]\)`) - - lines := strings.Split(bundledContent, "\n") - var foundRequires []string - - for lineNum, line := range lines { - // Check for local requires - matches := localRequireRegex.FindAllStringSubmatch(line, -1) - for _, match := range matches { - if len(match) > 1 { - requirePath := match[1] - foundRequires = append(foundRequires, fmt.Sprintf("line %d: require('%s')", lineNum+1, requirePath)) - } - } - } - - if len(foundRequires) > 0 { - bundlerSafetyLog.Printf("Validation failed: found %d un-inlined local require statements", len(foundRequires)) - return NewValidationError( - "bundled-javascript", - fmt.Sprintf("%d un-inlined requires", len(foundRequires)), - "bundled JavaScript contains local require() statements that were not inlined during bundling", - fmt.Sprintf("Found un-inlined requires:\n\n%s\n\nThis indicates a bundling failure. Check:\n1. All required files are in actions/setup/js/\n2. Bundler configuration includes all dependencies\n3. No circular dependencies exist\n\nRun 'make build' to regenerate bundles", strings.Join(foundRequires, "\n")), - ) - } - - bundlerSafetyLog.Print("Validation successful: no local require statements found") - return nil -} - -// validateNoModuleReferences checks that the bundled JavaScript contains no module.exports or exports references -// This is required for GitHub Script mode where no module system exists. -// Returns an error if any module references are found, otherwise returns nil -func validateNoModuleReferences(bundledContent string) error { - bundlerSafetyLog.Printf("Validating no module references: %d bytes", len(bundledContent)) - - lines := strings.Split(bundledContent, "\n") - var foundReferences []string - - for lineNum, line := range lines { - trimmed := strings.TrimSpace(line) - - // Skip comment lines - if strings.HasPrefix(trimmed, "//") || strings.HasPrefix(trimmed, "/*") || strings.HasPrefix(trimmed, "*") { - continue - } - - // Check for module.exports - if moduleExportsRegex.MatchString(line) { - foundReferences = append(foundReferences, fmt.Sprintf("line %d: module.exports reference", lineNum+1)) - } - - // Check for exports. - if exportsRegex.MatchString(line) { - foundReferences = append(foundReferences, fmt.Sprintf("line %d: exports reference", lineNum+1)) - } - } - - if len(foundReferences) > 0 { - bundlerSafetyLog.Printf("Validation failed: found %d module references", len(foundReferences)) - return NewValidationError( - "bundled-javascript", - fmt.Sprintf("%d module references", len(foundReferences)), - "bundled JavaScript for GitHub Script mode contains module.exports or exports references", - fmt.Sprintf("Found module references:\n\n%s\n\nGitHub Script mode does not support CommonJS module system. Check:\n1. Bundle configuration removes module references\n2. Code doesn't use module.exports or exports\n3. Using appropriate runtime mode (consider 'nodejs' mode if module system is needed)\n\nRun 'make build' to regenerate bundles", strings.Join(foundReferences, "\n")), - ) - } - - bundlerSafetyLog.Print("Validation successful: no module references found") - return nil -} - -// ValidateEmbeddedResourceRequires checks that all embedded JavaScript files in the sources map -// have their local require() dependencies available in the sources map. This prevents bundling failures -// when a file requires a local module that isn't embedded. -// -// This validation helps catch missing files in GetJavaScriptSources() at build/test time rather than -// at runtime when bundling fails. -// -// Parameters: -// - sources: map of file paths to their content (from GetJavaScriptSources()) -// -// Returns an error if any embedded file has local requires that reference files not in sources -func ValidateEmbeddedResourceRequires(sources map[string]string) error { - bundlerSafetyLog.Printf("Validating embedded resources: checking %d files for missing local requires", len(sources)) - - // Regular expression to match local require statements - // Matches: require('./...') or require("../...") - localRequireRegex := regexp.MustCompile(`require\(['"](\.\.?/[^'"]+)['"]\)`) - - var missingDeps []string - - // Check each file in sources - for filePath, content := range sources { - bundlerSafetyLog.Printf("Checking file: %s (%d bytes)", filePath, len(content)) - - // Find all local requires in this file - matches := localRequireRegex.FindAllStringSubmatch(content, -1) - if len(matches) == 0 { - continue - } - - bundlerSafetyLog.Printf("Found %d require statements in %s", len(matches), filePath) - - // Check each require - for _, match := range matches { - if len(match) <= 1 { - continue - } - - requirePath := match[1] - - // Resolve the required file path relative to the current file - currentDir := "" - if strings.Contains(filePath, "/") { - parts := strings.Split(filePath, "/") - currentDir = strings.Join(parts[:len(parts)-1], "/") - } - - var resolvedPath string - if currentDir == "" { - resolvedPath = requirePath - } else { - resolvedPath = currentDir + "/" + requirePath - } - - // Ensure .cjs extension - if !strings.HasSuffix(resolvedPath, ".cjs") && !strings.HasSuffix(resolvedPath, ".js") { - resolvedPath += ".cjs" - } - - // Normalize the path (remove ./ and ../) - resolvedPath = stringutil.NormalizePath(resolvedPath) - - // Check if the required file exists in sources - if _, ok := sources[resolvedPath]; !ok { - missingDep := fmt.Sprintf("%s requires '%s' (resolved to '%s') but it's not in sources map", - filePath, requirePath, resolvedPath) - missingDeps = append(missingDeps, missingDep) - bundlerSafetyLog.Printf("Missing dependency: %s", missingDep) - } else { - bundlerSafetyLog.Printf("Dependency OK: %s -> %s", filePath, resolvedPath) - } - } - } - - if len(missingDeps) > 0 { - bundlerSafetyLog.Printf("Validation failed: found %d missing dependencies", len(missingDeps)) - return NewValidationError( - "embedded-javascript", - fmt.Sprintf("%d missing dependencies", len(missingDeps)), - "embedded JavaScript files have missing local require() dependencies", - fmt.Sprintf("Missing dependencies:\n\n%s\n\nTo fix:\n1. Add missing .cjs files to actions/setup/js/\n2. Update GetJavaScriptSources() in pkg/workflow/js.go to include them\n3. Ensure file paths match require() statements\n4. Run 'make build' to regenerate bundles\n\nExample:\n//go:embed actions/setup/js/missing-file.cjs\nvar missingFileSource string", strings.Join(missingDeps, "\n")), - ) - } - - bundlerSafetyLog.Printf("Validation successful: all local requires are available in sources") - return nil -} diff --git a/pkg/workflow/bundler_scope_mixing_test.go b/pkg/workflow/bundler_scope_mixing_test.go deleted file mode 100644 index bc404d7aee..0000000000 --- a/pkg/workflow/bundler_scope_mixing_test.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestBundleJavaScriptWithMixedScopeRequires tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithMixedScopeRequires(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/bundler_scope_narrowing_test.go b/pkg/workflow/bundler_scope_narrowing_test.go deleted file mode 100644 index e03b010ea1..0000000000 --- a/pkg/workflow/bundler_scope_narrowing_test.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestBundleJavaScriptScopeNarrowing tests bundler functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptScopeNarrowing(t *testing.T) { - t.Skip("Bundler tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/bundler_script_validation.go b/pkg/workflow/bundler_script_validation.go deleted file mode 100644 index 17211e686b..0000000000 --- a/pkg/workflow/bundler_script_validation.go +++ /dev/null @@ -1,149 +0,0 @@ -// This file provides JavaScript script content validation for agentic workflows. -// -// # Script Content Validation -// -// This file validates JavaScript script content to ensure compatibility with runtime modes -// and adherence to platform conventions. Validation enforces proper API usage patterns -// for GitHub Script mode vs Node.js mode. -// -// # Validation Functions -// -// - validateNoExecSync() - Ensures GitHub Script mode scripts use exec instead of execSync -// - validateNoGitHubScriptGlobals() - Ensures Node.js scripts don't use GitHub Actions globals -// -// # Design Rationale -// -// The script content validation enforces two key constraints: -// 1. GitHub Script mode: Should not use execSync (use async exec from @actions/exec instead) -// 2. Node.js mode: Should not use GitHub Actions globals (core.*, exec.*, github.*) -// -// These rules ensure that scripts follow platform conventions: -// - GitHub Script mode runs inline in GitHub Actions YAML with GitHub-specific globals available -// - Node.js mode runs as standalone scripts with standard Node.js APIs only -// -// Validation happens at registration time (via panic) to catch errors during development/testing -// rather than at runtime. -// -// # When to Add Validation Here -// -// Add validation to this file when: -// - It validates JavaScript code content based on runtime mode -// - It checks for API usage patterns (execSync, GitHub Actions globals) -// - It validates script content for compatibility with execution environment -// -// For bundling functions, see bundler.go. -// For bundle safety validation, see bundler_safety_validation.go. -// For runtime mode validation, see bundler_runtime_validation.go. -// For general validation, see validation.go. -// For detailed documentation, see scratchpad/validation-architecture.md - -package workflow - -import ( - "fmt" - "regexp" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var bundlerScriptLog = logger.New("workflow:bundler_script_validation") - -// validateNoExecSync checks that GitHub Script mode scripts do not use execSync -// GitHub Script mode should use exec instead for better async/await handling -// Returns an error if execSync is found, otherwise returns nil -func validateNoExecSync(scriptName string, content string, mode RuntimeMode) error { - // Only validate GitHub Script mode - if mode != RuntimeModeGitHubScript { - return nil - } - - bundlerScriptLog.Printf("Validating no execSync in GitHub Script: %s (%d bytes)", scriptName, len(content)) - - // Regular expression to match execSync usage - // Matches: execSync(...) with various patterns - execSyncRegex := regexp.MustCompile(`\bexecSync\s*\(`) - - lines := strings.Split(content, "\n") - var foundUsages []string - - for lineNum, line := range lines { - trimmed := strings.TrimSpace(line) - - // Skip comment lines - if strings.HasPrefix(trimmed, "//") || strings.HasPrefix(trimmed, "/*") || strings.HasPrefix(trimmed, "*") { - continue - } - - // Check for execSync usage - if execSyncRegex.MatchString(line) { - foundUsages = append(foundUsages, fmt.Sprintf("line %d: %s", lineNum+1, strings.TrimSpace(line))) - } - } - - if len(foundUsages) > 0 { - bundlerScriptLog.Printf("Validation failed: found %d execSync usage(s) in %s", len(foundUsages), scriptName) - return fmt.Errorf("GitHub Script mode script '%s' contains %d execSync usage(s):\n %s\n\nGitHub Script mode should use exec instead of execSync for better async/await handling", - scriptName, len(foundUsages), strings.Join(foundUsages, "\n ")) - } - - bundlerScriptLog.Printf("Validation successful: no execSync usage found in %s", scriptName) - return nil -} - -// validateNoGitHubScriptGlobals checks that Node.js mode scripts do not use GitHub Actions globals -// Node.js scripts should not rely on actions/github-script globals like core.*, exec.*, or github.* -// Returns an error if GitHub Actions globals are found, otherwise returns nil -func validateNoGitHubScriptGlobals(scriptName string, content string, mode RuntimeMode) error { - // Only validate Node.js mode - if mode != RuntimeModeNodeJS { - return nil - } - - bundlerScriptLog.Printf("Validating no GitHub Actions globals in Node.js script: %s (%d bytes)", scriptName, len(content)) - - // Regular expressions to match GitHub Actions globals - // Matches: core.method, exec.method, github.property - coreGlobalRegex := regexp.MustCompile(`\bcore\.\w+`) - execGlobalRegex := regexp.MustCompile(`\bexec\.\w+`) - githubGlobalRegex := regexp.MustCompile(`\bgithub\.\w+`) - - lines := strings.Split(content, "\n") - var foundUsages []string - - for lineNum, line := range lines { - trimmed := strings.TrimSpace(line) - - // Skip comment lines and type references - if strings.HasPrefix(trimmed, "//") || strings.HasPrefix(trimmed, "/*") || strings.HasPrefix(trimmed, "*") { - continue - } - if strings.Contains(trimmed, "/// 0 { - bundlerScriptLog.Printf("Validation failed: found %d GitHub Actions global usage(s) in %s", len(foundUsages), scriptName) - return fmt.Errorf("node.js mode script '%s' contains %d GitHub Actions global usage(s):\n %s\n\nNode.js scripts should not use GitHub Actions globals (core.*, exec.*, github.*)", - scriptName, len(foundUsages), strings.Join(foundUsages, "\n ")) - } - - bundlerScriptLog.Printf("Validation successful: no GitHub Actions globals found in %s", scriptName) - return nil -} diff --git a/pkg/workflow/bundler_script_validation_test.go b/pkg/workflow/bundler_script_validation_test.go deleted file mode 100644 index e9f767f315..0000000000 --- a/pkg/workflow/bundler_script_validation_test.go +++ /dev/null @@ -1,244 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestValidateNoExecSync_GitHubScriptMode(t *testing.T) { - tests := []struct { - name string - scriptName string - content string - mode RuntimeMode - expectError bool - }{ - { - name: "GitHub Script mode with execSync should fail", - scriptName: "test_script", - content: ` -const { execSync } = require("child_process"); -const result = execSync("ls -la"); -`, - mode: RuntimeModeGitHubScript, - expectError: true, - }, - { - name: "GitHub Script mode with exec should pass", - scriptName: "test_script", - content: ` -const { exec } = require("@actions/exec"); -await exec.exec("ls -la"); -`, - mode: RuntimeModeGitHubScript, - expectError: false, - }, - { - name: "GitHub Script mode without exec should pass", - scriptName: "test_script", - content: ` -const fs = require("fs"); -const data = fs.readFileSync("file.txt"); -`, - mode: RuntimeModeGitHubScript, - expectError: false, - }, - { - name: "Node.js mode with execSync should pass (not checked)", - scriptName: "test_script", - content: ` -const { execSync } = require("child_process"); -const result = execSync("ls -la"); -`, - mode: RuntimeModeNodeJS, - expectError: false, - }, - { - name: "GitHub Script mode with execSync in comment should pass", - scriptName: "test_script", - content: ` -// Don't use execSync, use exec instead -const { exec } = require("@actions/exec"); -`, - mode: RuntimeModeGitHubScript, - expectError: false, - }, - { - name: "GitHub Script mode with multiple execSync calls should fail", - scriptName: "test_script", - content: ` -const { execSync } = require("child_process"); -execSync("git status"); -const output = execSync("git diff"); -`, - mode: RuntimeModeGitHubScript, - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := validateNoExecSync(tt.scriptName, tt.content, tt.mode) - if tt.expectError { - require.Error(t, err, "Expected validation to fail") - assert.Contains(t, err.Error(), "execSync", "Error should mention execSync") - } else { - assert.NoError(t, err, "Expected validation to pass") - } - }) - } -} - -func TestValidateNoGitHubScriptGlobals_NodeJSMode(t *testing.T) { - tests := []struct { - name string - scriptName string - content string - mode RuntimeMode - expectError bool - }{ - { - name: "Node.js mode with core.* should fail", - scriptName: "test_script", - content: ` -const fs = require("fs"); -core.info("This is a message"); -`, - mode: RuntimeModeNodeJS, - expectError: true, - }, - { - name: "Node.js mode with exec.* should fail", - scriptName: "test_script", - content: ` -const fs = require("fs"); -await exec.exec("ls -la"); -`, - mode: RuntimeModeNodeJS, - expectError: true, - }, - { - name: "Node.js mode with github.* should fail", - scriptName: "test_script", - content: ` -const fs = require("fs"); -const repo = github.context.repo; -`, - mode: RuntimeModeNodeJS, - expectError: true, - }, - { - name: "Node.js mode without GitHub Actions globals should pass", - scriptName: "test_script", - content: ` -const fs = require("fs"); -const data = fs.readFileSync("file.txt"); -console.log("Processing data"); -`, - mode: RuntimeModeNodeJS, - expectError: false, - }, - { - name: "GitHub Script mode with core.* should pass (not checked)", - scriptName: "test_script", - content: ` -core.info("This is a message"); -core.setOutput("result", "value"); -`, - mode: RuntimeModeGitHubScript, - expectError: false, - }, - { - name: "Node.js mode with GitHub Actions globals in comment should pass", - scriptName: "test_script", - content: ` -// Don't use core.info in Node.js scripts -console.log("Use console.log instead"); -`, - mode: RuntimeModeNodeJS, - expectError: false, - }, - { - name: "Node.js mode with type reference should pass", - scriptName: "test_script", - content: ` -/// -const fs = require("fs"); -`, - mode: RuntimeModeNodeJS, - expectError: false, - }, - { - name: "Node.js mode with multiple GitHub Actions globals should fail", - scriptName: "test_script", - content: ` -const fs = require("fs"); -core.info("Message"); -exec.exec("ls"); -const repo = github.context.repo; -`, - mode: RuntimeModeNodeJS, - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := validateNoGitHubScriptGlobals(tt.scriptName, tt.content, tt.mode) - if tt.expectError { - assert.Error(t, err, "Expected validation to fail") - } else { - assert.NoError(t, err, "Expected validation to pass") - } - }) - } -} - -func TestScriptRegistry_RegisterWithMode_Validation(t *testing.T) { - t.Run("GitHub Script mode with execSync should return error", func(t *testing.T) { - registry := NewScriptRegistry() - invalidScript := ` -const { execSync } = require("child_process"); -execSync("ls -la"); -` - err := registry.RegisterWithMode("invalid_script", invalidScript, RuntimeModeGitHubScript) - require.Error(t, err, "Should return error when registering GitHub Script with execSync") - assert.Contains(t, err.Error(), "execSync", "Error should mention execSync") - }) - - t.Run("Node.js mode with GitHub Actions globals should return error", func(t *testing.T) { - registry := NewScriptRegistry() - invalidScript := ` -const fs = require("fs"); -core.info("This should not be here"); -` - err := registry.RegisterWithMode("invalid_script", invalidScript, RuntimeModeNodeJS) - require.Error(t, err, "Should return error when registering Node.js script with GitHub Actions globals") - assert.Contains(t, err.Error(), "GitHub Actions global", "Error should mention GitHub Actions globals") - }) - - t.Run("Valid GitHub Script mode should not return error", func(t *testing.T) { - registry := NewScriptRegistry() - validScript := ` -const { exec } = require("@actions/exec"); -core.info("This is valid for GitHub Script mode"); -` - err := registry.RegisterWithMode("valid_script", validScript, RuntimeModeGitHubScript) - assert.NoError(t, err, "Should not return error with valid GitHub Script") - }) - - t.Run("Valid Node.js mode should not return error", func(t *testing.T) { - registry := NewScriptRegistry() - validScript := ` -const fs = require("fs"); -const { execSync } = require("child_process"); -console.log("This is valid for Node.js mode"); -` - err := registry.RegisterWithMode("valid_script", validScript, RuntimeModeNodeJS) - assert.NoError(t, err, "Should not return error with valid Node.js script") - }) -} diff --git a/pkg/workflow/bundler_test.go b/pkg/workflow/bundler_test.go deleted file mode 100644 index 7f11094316..0000000000 --- a/pkg/workflow/bundler_test.go +++ /dev/null @@ -1,79 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestBundleJavaScriptFromSources tests bundling JavaScript from source map -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptFromSources(t *testing.T) { - t.Skip("JavaScript bundling tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptFromSourcesWithoutRequires tests bundling without requires -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptFromSourcesWithoutRequires(t *testing.T) { - t.Skip("JavaScript bundling without requires tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestRemoveExports tests removing exports from JavaScript -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestRemoveExports(t *testing.T) { - t.Skip("Remove exports tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptFromSourcesWithMultipleRequires tests bundling with multiple requires -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptFromSourcesWithMultipleRequires(t *testing.T) { - t.Skip("JavaScript bundling with multiple requires tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptFromSourcesWithNestedPath tests bundling with nested paths -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptFromSourcesWithNestedPath(t *testing.T) { - t.Skip("JavaScript bundling with nested paths tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestValidateNoLocalRequires tests validation that no local requires remain -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestValidateNoLocalRequires(t *testing.T) { - t.Skip("Validate no local requires tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptValidationSuccess tests successful validation -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptValidationSuccess(t *testing.T) { - t.Skip("JavaScript bundling validation success tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptValidationFailure tests validation failure handling -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptValidationFailure(t *testing.T) { - t.Skip("JavaScript bundling validation failure tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptWithNpmPackages tests bundling with npm packages -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptWithNpmPackages(t *testing.T) { - t.Skip("JavaScript bundling with npm packages tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestRemoveExportsMultiLine tests removing multi-line exports -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestRemoveExportsMultiLine(t *testing.T) { - t.Skip("Remove multi-line exports tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestRemoveExportsConditional tests removing conditional exports -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestRemoveExportsConditional(t *testing.T) { - t.Skip("Remove conditional exports tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBundleJavaScriptMergesDestructuredImports tests merging destructured imports -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBundleJavaScriptMergesDestructuredImports(t *testing.T) { - t.Skip("JavaScript bundling destructured imports tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/cache_memory_integration_test.go b/pkg/workflow/cache_memory_integration_test.go index 47778730de..b920405868 100644 --- a/pkg/workflow/cache_memory_integration_test.go +++ b/pkg/workflow/cache_memory_integration_test.go @@ -39,7 +39,7 @@ tools: "# Cache memory file share configuration from frontmatter processed below", "- name: Create cache-memory directory", "- name: Cache cache-memory file share data", - "uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", "key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}", "path: /tmp/gh-aw/cache-memory", "cat \"/opt/gh-aw/prompts/cache_memory_prompt.md\"", diff --git a/pkg/workflow/cache_memory_restore_only_test.go b/pkg/workflow/cache_memory_restore_only_test.go index 8c15e8aeb2..bae2efe814 100644 --- a/pkg/workflow/cache_memory_restore_only_test.go +++ b/pkg/workflow/cache_memory_restore_only_test.go @@ -3,6 +3,7 @@ package workflow import ( + "fmt" "os" "path/filepath" "strings" @@ -35,13 +36,13 @@ tools: expectedInLock: []string{ "# Cache memory file share configuration from frontmatter processed below", "- name: Restore cache-memory file share data", - "actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/restore@", // SHA varies, just check action name "key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}", "path: /tmp/gh-aw/cache-memory", }, notExpectedInLock: []string{ "- name: Upload cache-memory data as artifact", - "uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + // Note: We can't use "uses: actions/cache@" here because cache/restore also matches }, }, { @@ -65,10 +66,10 @@ tools: expectedInLock: []string{ "# Cache memory file share configuration from frontmatter processed below", "- name: Cache cache-memory file share data (default)", - "actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", // SHA varies "key: memory-default-${{ github.run_id }}", "- name: Restore cache-memory file share data (readonly)", - "actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/restore@", // SHA varies "key: memory-readonly-${{ github.run_id }}", }, notExpectedInLock: []string{ @@ -103,9 +104,9 @@ tools: ---`, expectedInLock: []string{ "- name: Cache cache-memory file share data (writeable)", - "actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", // SHA varies "- name: Restore cache-memory file share data (readonly1)", - "actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/restore@", // SHA varies "- name: Restore cache-memory file share data (readonly2)", }, notExpectedInLock: []string{ @@ -149,14 +150,29 @@ tools: // Check expected strings are present for _, expected := range tt.expectedInLock { if !strings.Contains(lockStr, expected) { - t.Errorf("Expected to find '%s' in lock file but it was missing.\nLock file content:\n%s", expected, lockStr) + // Show a snippet of the lock file for context (first 100 lines) + lines := strings.Split(lockStr, "\n") + snippet := strings.Join(lines[:min(100, len(lines))], "\n") + t.Errorf("Expected to find '%s' in lock file but it was missing.\nFirst 100 lines of lock file:\n%s\n...(truncated)", expected, snippet) } } // Check unexpected strings are NOT present for _, notExpected := range tt.notExpectedInLock { if strings.Contains(lockStr, notExpected) { - t.Errorf("Did not expect to find '%s' in lock file but it was present.\nLock file content:\n%s", notExpected, lockStr) + // Find the line containing the unexpected string for context + lines := strings.Split(lockStr, "\n") + var contextLines []string + for i, line := range lines { + if strings.Contains(line, notExpected) { + start := max(0, i-3) + end := min(len(lines), i+4) + contextLines = append(contextLines, fmt.Sprintf("Lines %d-%d:", start+1, end)) + contextLines = append(contextLines, lines[start:end]...) + break + } + } + t.Errorf("Did not expect to find '%s' in lock file but it was present.\nContext:\n%s", notExpected, strings.Join(contextLines, "\n")) } } }) diff --git a/pkg/workflow/cache_memory_threat_detection_test.go b/pkg/workflow/cache_memory_threat_detection_test.go index 3f334047ca..eec16ac379 100644 --- a/pkg/workflow/cache_memory_threat_detection_test.go +++ b/pkg/workflow/cache_memory_threat_detection_test.go @@ -3,6 +3,7 @@ package workflow import ( + "fmt" "os" "path/filepath" "strings" @@ -42,7 +43,7 @@ Test workflow with cache-memory and threat detection enabled.`, expectedInLock: []string{ // In agent job, should use actions/cache/restore instead of actions/cache "- name: Restore cache-memory file share data", - "uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/restore@", "key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}", // Should upload artifact with if: always() "- name: Upload cache-memory data as artifact", @@ -55,7 +56,7 @@ Test workflow with cache-memory and threat detection enabled.`, "if: always() && needs.agent.outputs.detection_success == 'true'", "- name: Download cache-memory artifact (default)", "- name: Save cache-memory to cache (default)", - "uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/save@", }, notExpectedInLock: []string{ // Should NOT use regular actions/cache in agent job @@ -82,7 +83,7 @@ Test workflow with cache-memory but no threat detection.`, expectedInLock: []string{ // Without threat detection, should use regular actions/cache "- name: Cache cache-memory file share data", - "uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", "key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}", }, notExpectedInLock: []string{ @@ -121,7 +122,7 @@ Test workflow with multiple cache-memory and threat detection enabled.`, expectedInLock: []string{ // Both caches should use restore "- name: Restore cache-memory file share data (default)", - "uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/restore@", "key: memory-default-${{ github.run_id }}", "- name: Restore cache-memory file share data (session)", "key: memory-session-${{ github.run_id }}", @@ -169,7 +170,7 @@ Test workflow with restore-only cache-memory and threat detection enabled.`, expectedInLock: []string{ // Should use restore for restore-only cache (no ID suffix for single default cache) "- name: Restore cache-memory file share data", - "uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache/restore@", }, notExpectedInLock: []string{ // Should NOT upload artifact for restore-only @@ -204,18 +205,35 @@ Test workflow with restore-only cache-memory and threat detection enabled.`, t.Fatalf("Failed to read lock file: %v", err) } lockContent := string(lockYAML) + lines := strings.Split(lockContent, "\n") // Check expected strings for _, expected := range tt.expectedInLock { if !strings.Contains(lockContent, expected) { - t.Errorf("Expected lock YAML to contain %q, but it didn't.\nGenerated YAML:\n%s", expected, lockContent) + // Show first 100 lines for context (not entire file) + preview := strings.Join(lines[:min(100, len(lines))], "\n") + if len(lines) > 100 { + preview += fmt.Sprintf("\n... (%d more lines)", len(lines)-100) + } + t.Errorf("Expected lock YAML to contain %q, but it didn't.\nFirst 100 lines:\n%s", expected, preview) } } // Check not expected strings for _, notExpected := range tt.notExpectedInLock { if strings.Contains(lockContent, notExpected) { - t.Errorf("Expected lock YAML NOT to contain %q, but it did.\nGenerated YAML:\n%s", notExpected, lockContent) + // Find the matching line and show context + matchIdx := -1 + for i, line := range lines { + if strings.Contains(line, notExpected) || strings.Contains(strings.Join(lines[max(0, i-1):min(len(lines), i+2)], "\n"), notExpected) { + matchIdx = i + break + } + } + start := max(0, matchIdx-3) + end := min(len(lines), matchIdx+4) + context := strings.Join(lines[start:end], "\n") + t.Errorf("Expected lock YAML NOT to contain %q, but it did.\nContext around match (lines %d-%d):\n%s", notExpected, start+1, end, context) } } }) diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index a1861819cd..fd1dd9a158 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -419,17 +419,24 @@ func ParseCheckoutConfigs(raw any) ([]*CheckoutConfig, error) { return nil, fmt.Errorf("checkout must be an object or an array of objects, got %T", raw) } - // Validate that at most one checkout has current: true. - // Multiple current checkouts are not allowed since only one repo can be - // the logical primary target for the agent at a time. - currentCount := 0 + // Validate that at most one logical checkout target has current: true. + // Multiple current checkouts are not allowed since only one repo/path pair can be + // the primary target for the agent at a time. Multiple configs that merge into the + // same (repository, path) pair are treated as a single logical checkout. + currentTargets := make(map[string]struct{}) for _, cfg := range configs { - if cfg.Current { - currentCount++ + if !cfg.Current { + continue } + + repo := strings.TrimSpace(cfg.Repository) + path := strings.TrimSpace(cfg.Path) + key := repo + "\x00" + path + + currentTargets[key] = struct{}{} } - if currentCount > 1 { - return nil, fmt.Errorf("only one checkout may have current: true, found %d", currentCount) + if len(currentTargets) > 1 { + return nil, fmt.Errorf("only one checkout target may have current: true, found %d", len(currentTargets)) } return configs, nil @@ -460,6 +467,11 @@ func checkoutConfigFromMap(m map[string]any) (*CheckoutConfig, error) { if !ok { return nil, errors.New("checkout.path must be a string") } + // Normalize "." to empty string: both mean the workspace root and + // are treated identically by the checkout step generator. + if s == "." { + s = "" + } cfg.Path = s } diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index a5fb7cf811..f5c520e5c6 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -243,7 +243,7 @@ func TestParseCheckoutConfigs(t *testing.T) { configs, err := ParseCheckoutConfigs(raw) require.NoError(t, err, "array should parse without error") require.Len(t, configs, 2, "should produce two configs") - assert.Equal(t, ".", configs[0].Path, "first path should be set") + assert.Empty(t, configs[0].Path, "first path should be normalized from '.' to empty") assert.Equal(t, "owner/repo", configs[1].Repository, "second repo should be set") }) @@ -372,7 +372,7 @@ func TestCheckoutCurrentFlag(t *testing.T) { } _, err := ParseCheckoutConfigs(raw) require.Error(t, err, "multiple current: true should return error") - assert.Contains(t, err.Error(), "only one checkout may have current: true", "error should mention the constraint") + assert.Contains(t, err.Error(), "only one checkout target may have current: true", "error should mention the constraint") }) t.Run("single current: true in array is valid", func(t *testing.T) { @@ -500,7 +500,7 @@ func TestBuildCheckoutsPromptContent(t *testing.T) { t.Run("multiple checkouts all listed", func(t *testing.T) { content := buildCheckoutsPromptContent([]*CheckoutConfig{ - {Path: "."}, + {Path: ""}, {Repository: "owner/target", Path: "./target", Current: true}, {Repository: "owner/libs", Path: "./libs"}, }) diff --git a/pkg/workflow/codex_engine_test.go b/pkg/workflow/codex_engine_test.go index b01db5b917..9b9bbcc2e2 100644 --- a/pkg/workflow/codex_engine_test.go +++ b/pkg/workflow/codex_engine_test.go @@ -325,7 +325,8 @@ func TestCodexEngineRenderMCPConfig(t *testing.T) { "\"port\": $MCP_GATEWAY_PORT,", "\"domain\": \"${MCP_GATEWAY_DOMAIN}\",", "\"apiKey\": \"${MCP_GATEWAY_API_KEY}\",", - "\"payloadDir\": \"${MCP_GATEWAY_PAYLOAD_DIR}\"", + "\"payloadDir\": \"${MCP_GATEWAY_PAYLOAD_DIR}\",", + fmt.Sprintf("\"payloadSizeThreshold\": %d", constants.DefaultMCPGatewayPayloadSizeThreshold), "}", "}", "GH_AW_MCP_CONFIG_EOF", diff --git a/pkg/workflow/compile_outputs_pr_test.go b/pkg/workflow/compile_outputs_pr_test.go index 0207007860..815cbbe1d6 100644 --- a/pkg/workflow/compile_outputs_pr_test.go +++ b/pkg/workflow/compile_outputs_pr_test.go @@ -154,7 +154,7 @@ This workflow tests the create_pull_request job generation. t.Error("Expected 'Download patch artifact' step in create_pull_request job") } - if !strings.Contains(lockContentStr, "actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53") { + if !strings.Contains(lockContentStr, "actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3") { t.Error("Expected download-artifact action to be used in create_pull_request job") } diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go index 900b60290b..5cff00d3c2 100644 --- a/pkg/workflow/compiler.go +++ b/pkg/workflow/compiler.go @@ -208,7 +208,7 @@ func (c *Compiler) validateWorkflowData(workflowData *WorkflowData, markdownPath // Validate: threat detection requires sandbox.agent to be enabled (detection runs inside AWF) if workflowData.SafeOutputs != nil && workflowData.SafeOutputs.ThreatDetection != nil && isAgentSandboxDisabled(workflowData) { - return formatCompilerError(markdownPath, "error", "threat detection requires sandbox.agent to be enabled. Threat detection runs inside the agent sandbox (AWF) with fully blocked network. Either enable sandbox.agent or remove the threat-detection configuration from safe-outputs.", errors.New("threat detection requires sandbox.agent")) + return formatCompilerError(markdownPath, "error", "threat detection requires sandbox.agent to be enabled. Threat detection runs inside the agent sandbox (AWF) with fully blocked network. Either enable sandbox.agent or use 'threat-detection: false' to disable the threat-detection configuration in safe-outputs.", errors.New("threat detection requires sandbox.agent")) } // Emit experimental warning for safe-inputs feature diff --git a/pkg/workflow/compiler_action_mode_test.go b/pkg/workflow/compiler_action_mode_test.go index fb0e3c7a77..5ebcf5fbf4 100644 --- a/pkg/workflow/compiler_action_mode_test.go +++ b/pkg/workflow/compiler_action_mode_test.go @@ -4,11 +4,7 @@ package workflow import ( "os" - "strings" "testing" - - "github.com/github/gh-aw/pkg/stringutil" - "github.com/stretchr/testify/require" ) // TestActionModeDetection tests the DetectActionMode function @@ -274,186 +270,3 @@ func TestActionModeDetectionWithReleaseFlag(t *testing.T) { }) } } - -// TestReleaseModeCompilation tests workflow compilation in release mode -// Note: This test uses create_issue which already has ScriptName set. -// Other safe outputs (add_labels, etc.) don't have ScriptName yet and will use inline mode. -func TestReleaseModeCompilation(t *testing.T) { - // Create a temporary directory for the test - tempDir := t.TempDir() - - // Save original environment - origSHA := os.Getenv("GITHUB_SHA") - origRef := os.Getenv("GITHUB_REF") - defer func() { - if origSHA != "" { - os.Setenv("GITHUB_SHA", origSHA) - } else { - os.Unsetenv("GITHUB_SHA") - } - if origRef != "" { - os.Setenv("GITHUB_REF", origRef) - } else { - os.Unsetenv("GITHUB_REF") - } - }() - - // Set release tag for testing - os.Setenv("GITHUB_REF", "refs/tags/v1.0.0") // Simulate release tag for auto-detection - - // Create a test workflow file - workflowContent := `--- -name: Test Release Mode -on: issues -safe-outputs: - create-issue: - max: 1 ---- - -Test workflow with release mode. -` - - workflowPath := tempDir + "/test-workflow.md" - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write test workflow: %v", err) - } - - // Save the original script to restore after test - origScript := DefaultScriptRegistry.Get("create_issue") - origActionPath := DefaultScriptRegistry.GetActionPath("create_issue") - - // Register test script with action path - testScript := `const { core } = require('@actions/core'); core.info('test');` - err := DefaultScriptRegistry.RegisterWithAction( - "create_issue", - testScript, - RuntimeModeGitHubScript, - "./actions/create-issue", - ) - require.NoError(t, err) - - // Restore after test - defer func() { - if origActionPath != "" { - _ = DefaultScriptRegistry.RegisterWithAction("create_issue", origScript, RuntimeModeGitHubScript, origActionPath) - } else { - _ = DefaultScriptRegistry.RegisterWithMode("create_issue", origScript, RuntimeModeGitHubScript) - } - }() - - // Compile - should auto-detect release mode from GITHUB_REF - compiler := NewCompilerWithVersion("1.0.0") - // Don't set action mode explicitly - let it auto-detect - compiler.SetActionMode(DetectActionMode("1.0.0")) - compiler.SetNoEmit(false) - - if compiler.GetActionMode() != ActionModeRelease { - t.Fatalf("Expected auto-detected release mode, got %s", compiler.GetActionMode()) - } - - if err := compiler.CompileWorkflow(workflowPath); err != nil { - t.Fatalf("Compilation failed: %v", err) - } - - // Read lock file - lockPath := stringutil.MarkdownToLockFile(workflowPath) - lockContent, err := os.ReadFile(lockPath) - if err != nil { - t.Fatalf("Failed to read lock file: %v", err) - } - - lockStr := string(lockContent) - - // Verify safe_outputs job exists (consolidated mode) - if !strings.Contains(lockStr, "safe_outputs:") { - t.Error("Expected safe_outputs job in compiled workflow") - } - - // Verify handler manager step is present (create_issue is now handled by handler manager) - if !strings.Contains(lockStr, "id: process_safe_outputs") { - t.Error("Expected process_safe_outputs step in compiled workflow (create-issue is now handled by handler manager)") - } - // Verify handler config contains create_issue - if !strings.Contains(lockStr, "create_issue") { - t.Error("Expected create_issue in handler config") - } -} - -// TestDevModeCompilation tests workflow compilation in dev mode -// Note: This test uses create_issue which already has ScriptName set. -func TestDevModeCompilation(t *testing.T) { - tempDir := t.TempDir() - - // Save original environment - origRef := os.Getenv("GITHUB_REF") - defer os.Setenv("GITHUB_REF", origRef) - - // Set environment for dev mode - os.Setenv("GITHUB_REF", "") // Local development (no GITHUB_REF) - - workflowContent := `--- -name: Test Dev Mode -on: issues -safe-outputs: - create-issue: - max: 1 ---- - -Test -` - - workflowPath := tempDir + "/test-workflow.md" - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write workflow: %v", err) - } - - // Save original script - origScript := DefaultScriptRegistry.Get("create_issue") - origActionPath := DefaultScriptRegistry.GetActionPath("create_issue") - - testScript := `const { core } = require('@actions/core'); core.info('test');` - err := DefaultScriptRegistry.RegisterWithAction("create_issue", testScript, RuntimeModeGitHubScript, "./actions/create-issue") - require.NoError(t, err) - - defer func() { - if origActionPath != "" { - _ = DefaultScriptRegistry.RegisterWithAction("create_issue", origScript, RuntimeModeGitHubScript, origActionPath) - } else { - _ = DefaultScriptRegistry.RegisterWithMode("create_issue", origScript, RuntimeModeGitHubScript) - } - }() - - compiler := NewCompilerWithVersion("1.0.0") - compiler.SetActionMode(DetectActionMode("dev")) - compiler.SetNoEmit(false) - - if compiler.GetActionMode() != ActionModeDev { - t.Fatalf("Expected auto-detected dev mode, got %s", compiler.GetActionMode()) - } - - if err := compiler.CompileWorkflow(workflowPath); err != nil { - t.Fatalf("Compilation failed: %v", err) - } - - lockPath := stringutil.MarkdownToLockFile(workflowPath) - lockContent, err := os.ReadFile(lockPath) - if err != nil { - t.Fatalf("Failed to read lock file: %v", err) - } - - lockStr := string(lockContent) - - // Verify safe_outputs job exists (consolidated mode) - if !strings.Contains(lockStr, "safe_outputs:") { - t.Error("Expected safe_outputs job in compiled workflow") - } - - // Verify handler manager step is present (create_issue is now handled by handler manager) - if !strings.Contains(lockStr, "id: process_safe_outputs") { - t.Error("Expected process_safe_outputs step in compiled workflow (create-issue is now handled by handler manager)") - } - // Verify handler config contains create_issue - if !strings.Contains(lockStr, "create_issue") { - t.Error("Expected create_issue in handler config") - } -} diff --git a/pkg/workflow/compiler_activation_jobs.go b/pkg/workflow/compiler_activation_jobs.go index 8f4b2b9898..8e75f1c5cc 100644 --- a/pkg/workflow/compiler_activation_jobs.go +++ b/pkg/workflow/compiler_activation_jobs.go @@ -1026,12 +1026,10 @@ func (c *Compiler) generateCheckoutGitHubFolderForActivation(data *WorkflowData) } } - // Check if we have contents permission - without it, checkout is not possible - permParser := NewPermissionsParser(data.Permissions) - if !permParser.HasContentsReadAccess() { - compilerActivationJobsLog.Print("Skipping .github checkout in activation: no contents read access") - return nil - } + // Note: We don't check data.Permissions for contents read access here because + // the activation job ALWAYS gets contents:read added to its permissions (see buildActivationJob + // around line 720). The workflow's original permissions may not include contents:read, + // but the activation job will always have it for GitHub API access and runtime imports. // For activation job, always add sparse checkout of .github and .agents folders // This is needed for runtime imports during prompt generation diff --git a/pkg/workflow/compiler_artifacts_test.go b/pkg/workflow/compiler_artifacts_test.go index d0f46b2e57..7b3b5e3486 100644 --- a/pkg/workflow/compiler_artifacts_test.go +++ b/pkg/workflow/compiler_artifacts_test.go @@ -118,7 +118,7 @@ post-steps: - name: First Post Step run: echo "first" - name: Second Post Step - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f + uses: actions/upload-artifact@v4 # SHA will be pinned with: name: test-artifact path: test-file.txt @@ -272,8 +272,8 @@ This workflow should generate a unified artifact upload step that includes the p } // Verify the upload step uses the correct action - if !strings.Contains(lockYAML, "uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f") { - t.Error("Expected 'actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f' action to be used") + if !strings.Contains(lockYAML, "uses: actions/upload-artifact@") { // SHA varies + t.Error("Expected 'actions/upload-artifact' action to be used") } // Verify the unified artifact name diff --git a/pkg/workflow/compiler_cache_test.go b/pkg/workflow/compiler_cache_test.go index 65824476ae..c768d97347 100644 --- a/pkg/workflow/compiler_cache_test.go +++ b/pkg/workflow/compiler_cache_test.go @@ -3,6 +3,7 @@ package workflow import ( + "fmt" "os" "path/filepath" "strings" @@ -46,7 +47,7 @@ tools: "# Cache configuration from frontmatter was processed and added to the main job steps", "# Cache configuration from frontmatter processed below", "- name: Cache", - "uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", // SHA varies "key: node-modules-${{ hashFiles('package-lock.json') }}", "path: node_modules", "restore-keys: node-modules-", @@ -89,7 +90,7 @@ tools: "# Cache configuration from frontmatter processed below", "- name: Cache (node-modules-${{ hashFiles('package-lock.json') }})", "- name: Cache (build-cache-${{ github.sha }})", - "uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", // SHA varies "key: node-modules-${{ hashFiles('package-lock.json') }}", "key: build-cache-${{ github.sha }}", "path: node_modules", @@ -131,7 +132,7 @@ tools: expectedInLock: []string{ "# Cache configuration from frontmatter processed below", "- name: Cache", - "uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830", + "uses: actions/cache@", // SHA varies "key: full-cache-${{ github.sha }}", "path: dist", "restore-keys: |", @@ -180,7 +181,10 @@ tools: // Check that expected strings are present for _, expected := range tt.expectedInLock { if !strings.Contains(lockContent, expected) { - t.Errorf("Expected lock file to contain '%s' but it didn't.\nContent:\n%s", expected, lockContent) + // Show a snippet of the lock file for context (first 100 lines) + lines := strings.Split(lockContent, "\n") + snippet := strings.Join(lines[:min(100, len(lines))], "\n") + t.Errorf("Expected lock file to contain '%s' but it didn't.\nFirst 100 lines:\n%s\n...(truncated)", expected, snippet) } } @@ -188,7 +192,19 @@ tools: // (frontmatter is embedded as comments, so we need to exclude comment lines) for _, notExpected := range tt.notExpectedInLock { if containsInNonCommentLines(lockContent, notExpected) { - t.Errorf("Lock file should NOT contain '%s' in non-comment lines but it did.\nContent:\n%s", notExpected, lockContent) + // Find the line containing the unexpected string for context + lines := strings.Split(lockContent, "\n") + var contextLines []string + for i, line := range lines { + if strings.Contains(line, strings.TrimSpace(notExpected)) { + start := max(0, i-3) + end := min(len(lines), i+4) + contextLines = append(contextLines, fmt.Sprintf("Lines %d-%d:", start+1, end)) + contextLines = append(contextLines, lines[start:end]...) + break + } + } + t.Errorf("Lock file should NOT contain '%s' in non-comment lines but it did.\nContext:\n%s", notExpected, strings.Join(contextLines, "\n")) } } }) @@ -247,7 +263,10 @@ This workflow should get default permissions applied automatically. for _, expectedPerm := range expectedDefaultPermissions { if !strings.Contains(lockContentStr, expectedPerm) { - t.Errorf("Expected default permission '%s' not found in generated workflow.\nGenerated content:\n%s", expectedPerm, lockContentStr) + // Show first 100 lines for context + lines := strings.Split(lockContentStr, "\n") + snippet := strings.Join(lines[:min(100, len(lines))], "\n") + t.Errorf("Expected default permission '%s' not found in generated workflow.\nFirst 100 lines:\n%s\n...(truncated)", expectedPerm, snippet) } } @@ -430,7 +449,19 @@ This workflow has custom permissions that should override defaults. for _, defaultPerm := range defaultOnlyPermissions { if strings.Contains(lockContentStr, defaultPerm) { - t.Errorf("Default permission '%s' should not be present when custom permissions are specified.\nGenerated content:\n%s", defaultPerm, lockContentStr) + // Find the line containing the unexpected permission for context + lines := strings.Split(lockContentStr, "\n") + var contextLines []string + for i, line := range lines { + if strings.Contains(line, defaultPerm) { + start := max(0, i-3) + end := min(len(lines), i+4) + contextLines = append(contextLines, fmt.Sprintf("Lines %d-%d:", start+1, end)) + contextLines = append(contextLines, lines[start:end]...) + break + } + } + t.Errorf("Default permission '%s' should not be present when custom permissions are specified.\nContext:\n%s", defaultPerm, strings.Join(contextLines, "\n")) } } } diff --git a/pkg/workflow/compiler_custom_actions_test.go b/pkg/workflow/compiler_custom_actions_test.go index 33c61a5c73..b09c327e02 100644 --- a/pkg/workflow/compiler_custom_actions_test.go +++ b/pkg/workflow/compiler_custom_actions_test.go @@ -8,7 +8,6 @@ import ( "testing" "github.com/github/gh-aw/pkg/stringutil" - "github.com/stretchr/testify/require" ) // TestActionModeValidation tests the ActionMode type validation @@ -101,130 +100,6 @@ func TestActionModeIsScript(t *testing.T) { } } -// TestScriptRegistryWithAction tests registering scripts with action paths -func TestScriptRegistryWithAction(t *testing.T) { - registry := NewScriptRegistry() - - testScript := `console.log('test');` - actionPath := "./actions/test-action" - - err := registry.RegisterWithAction("test_script", testScript, RuntimeModeGitHubScript, actionPath) - require.NoError(t, err) - - if !registry.Has("test_script") { - t.Error("Script should be registered") - } - - if got := registry.GetActionPath("test_script"); got != actionPath { - t.Errorf("Expected action path %q, got %q", actionPath, got) - } - - if got := registry.GetSource("test_script"); got != testScript { - t.Errorf("Expected source %q, got %q", testScript, got) - } -} - -// TestScriptRegistryActionPathEmpty tests that scripts without action paths return empty string -func TestScriptRegistryActionPathEmpty(t *testing.T) { - registry := NewScriptRegistry() - - testScript := `console.log('test');` - registry.Register("test_script", testScript) - - if got := registry.GetActionPath("test_script"); got != "" { - t.Errorf("Expected empty action path, got %q", got) - } -} - -// TestCustomActionModeCompilation tests workflow compilation with custom action mode -func TestCustomActionModeCompilation(t *testing.T) { - // Create a temporary directory for the test - tempDir := t.TempDir() - - // Create a test workflow file - workflowContent := `--- -name: Test Custom Actions -on: issues -safe-outputs: - create-issue: - max: 1 ---- - -Test workflow with safe-outputs. -` - - workflowPath := tempDir + "/test-workflow.md" - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write test workflow: %v", err) - } - - // Register a test script with an action path - // Save original state first - origSource := DefaultScriptRegistry.GetSource("create_issue") - origActionPath := DefaultScriptRegistry.GetActionPath("create_issue") - - testScript := ` -const { core } = require('@actions/core'); -core.info('Creating issue'); -` - err := DefaultScriptRegistry.RegisterWithAction( - "create_issue", - testScript, - RuntimeModeGitHubScript, - "./actions/create-issue", - ) - require.NoError(t, err) - - // Restore after test - defer func() { - if origSource != "" { - if origActionPath != "" { - _ = DefaultScriptRegistry.RegisterWithAction("create_issue", origSource, RuntimeModeGitHubScript, origActionPath) - } else { - _ = DefaultScriptRegistry.RegisterWithMode("create_issue", origSource, RuntimeModeGitHubScript) - } - } - }() - - // Compile with dev action mode - compiler := NewCompilerWithVersion("1.0.0") - compiler.SetActionMode(ActionModeDev) - compiler.SetNoEmit(false) - - if err := compiler.CompileWorkflow(workflowPath); err != nil { - t.Fatalf("Compilation failed: %v", err) - } - - // Read the generated lock file - lockPath := stringutil.MarkdownToLockFile(workflowPath) - lockContent, err := os.ReadFile(lockPath) - if err != nil { - t.Fatalf("Failed to read lock file: %v", err) - } - - lockStr := string(lockContent) - - // Verify safe_outputs job exists (consolidated mode) - found := strings.Contains(lockStr, "safe_outputs:") - if !found { - t.Fatal("safe_outputs job not found in lock file") - } - - // Verify handler manager step is present (create_issue is now handled by handler manager) - if !strings.Contains(lockStr, "id: process_safe_outputs") { - t.Error("Expected process_safe_outputs step in compiled workflow (create-issue is now handled by handler manager)") - } - // Verify handler config contains create_issue - if !strings.Contains(lockStr, "create_issue") { - t.Error("Expected create_issue in handler config") - } - - // Verify the workflow compiles successfully with custom action mode - if !strings.Contains(lockStr, "actions/github-script") { - t.Error("Expected github-script action in compiled workflow") - } -} - // TestInlineActionModeCompilation tests workflow compilation with inline mode (default) func TestInlineActionModeCompilation(t *testing.T) { // Create a temporary directory for the test @@ -281,73 +156,6 @@ Test workflow with dev mode. } } -// TestCustomActionModeFallback tests that compilation falls back to inline mode -// when action path is not registered -func TestCustomActionModeFallback(t *testing.T) { - // Create a temporary directory for the test - tempDir := t.TempDir() - - // Create a test workflow file - workflowContent := `--- -name: Test Fallback -on: issues -safe-outputs: - create-issue: - max: 1 ---- - -Test fallback to inline mode. -` - - workflowPath := tempDir + "/test-workflow.md" - if err := os.WriteFile(workflowPath, []byte(workflowContent), 0644); err != nil { - t.Fatalf("Failed to write test workflow: %v", err) - } - - // Ensure create_issue is registered without an action path - // Save original state first - origSource := DefaultScriptRegistry.GetSource("create_issue") - origActionPath := DefaultScriptRegistry.GetActionPath("create_issue") - - testScript := `console.log('test');` - err := DefaultScriptRegistry.RegisterWithMode("create_issue", testScript, RuntimeModeGitHubScript) - require.NoError(t, err) - - // Restore after test - defer func() { - if origSource != "" { - if origActionPath != "" { - _ = DefaultScriptRegistry.RegisterWithAction("create_issue", origSource, RuntimeModeGitHubScript, origActionPath) - } else { - _ = DefaultScriptRegistry.RegisterWithMode("create_issue", origSource, RuntimeModeGitHubScript) - } - } - }() - - // Compile with dev action mode - compiler := NewCompilerWithVersion("1.0.0") - compiler.SetActionMode(ActionModeDev) - compiler.SetNoEmit(false) - - if err := compiler.CompileWorkflow(workflowPath); err != nil { - t.Fatalf("Compilation failed: %v", err) - } - - // Read the generated lock file - lockPath := stringutil.MarkdownToLockFile(workflowPath) - lockContent, err := os.ReadFile(lockPath) - if err != nil { - t.Fatalf("Failed to read lock file: %v", err) - } - - lockStr := string(lockContent) - - // Verify it falls back to actions/github-script when action path is not found - if !strings.Contains(lockStr, "actions/github-script@") { - t.Error("Expected fallback to 'actions/github-script@' when action path not found") - } -} - // TestScriptActionModeCompilation tests workflow compilation with script mode func TestScriptActionModeCompilation(t *testing.T) { // Create a temporary directory for the test diff --git a/pkg/workflow/compiler_customsteps_test.go b/pkg/workflow/compiler_customsteps_test.go index ad5e5c8e8e..d01c33637f 100644 --- a/pkg/workflow/compiler_customsteps_test.go +++ b/pkg/workflow/compiler_customsteps_test.go @@ -26,9 +26,9 @@ func TestCustomStepsIndentation(t *testing.T) { name: "standard_2_space_indentation", stepsYAML: `steps: - name: Checkout code - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + uses: actions/checkout@v5 - name: Set up Go - uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c + uses: actions/setup-go@v5 with: go-version-file: go.mod cache: true`, @@ -38,7 +38,7 @@ func TestCustomStepsIndentation(t *testing.T) { name: "odd_3_space_indentation", stepsYAML: `steps: - name: Odd indent - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + uses: actions/checkout@v5 with: param: value`, description: "3-space indentation should be normalized to standard format", diff --git a/pkg/workflow/compiler_orchestrator_workflow.go b/pkg/workflow/compiler_orchestrator_workflow.go index 6d7b0653e7..289e43fb88 100644 --- a/pkg/workflow/compiler_orchestrator_workflow.go +++ b/pkg/workflow/compiler_orchestrator_workflow.go @@ -73,6 +73,11 @@ func (c *Compiler) ParseWorkflowFile(markdownPath string) (*WorkflowData, error) return nil, fmt.Errorf("%s: %w", cleanPath, err) } + // Validate GitHub guard policy configuration + if err := validateGitHubGuardPolicy(workflowData.ParsedTools, workflowData.Name); err != nil { + return nil, fmt.Errorf("%s: %w", cleanPath, err) + } + // Use shared action cache and resolver from the compiler actionCache, actionResolver := c.getSharedActionResolver() workflowData.ActionCache = actionCache diff --git a/pkg/workflow/compiler_string_api.go b/pkg/workflow/compiler_string_api.go index 30fe1c61c7..94f65371dc 100644 --- a/pkg/workflow/compiler_string_api.go +++ b/pkg/workflow/compiler_string_api.go @@ -130,6 +130,11 @@ func (c *Compiler) ParseWorkflowString(content string, virtualPath string) (*Wor return nil, fmt.Errorf("%s: %w", cleanPath, err) } + // Validate GitHub guard policy configuration + if err := validateGitHubGuardPolicy(workflowData.ParsedTools, workflowData.Name); err != nil { + return nil, fmt.Errorf("%s: %w", cleanPath, err) + } + // Setup action cache and resolver actionCache, actionResolver := c.getSharedActionResolver() workflowData.ActionCache = actionCache diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go index 7857b5245d..f96fae0b86 100644 --- a/pkg/workflow/compiler_types.go +++ b/pkg/workflow/compiler_types.go @@ -32,11 +32,6 @@ func WithVersion(version string) CompilerOption { return func(c *Compiler) { c.version = version } } -// WithActionMode overrides the auto-detected action mode -func WithActionMode(mode ActionMode) CompilerOption { - return func(c *Compiler) { c.actionMode = mode } -} - // WithSkipValidation configures whether to skip schema validation func WithSkipValidation(skip bool) CompilerOption { return func(c *Compiler) { c.skipValidation = skip } @@ -67,23 +62,6 @@ func WithWorkflowIdentifier(identifier string) CompilerOption { return func(c *Compiler) { c.workflowIdentifier = identifier } } -// WithRepositorySlug sets the repository slug for schedule scattering -func WithRepositorySlug(slug string) CompilerOption { - return func(c *Compiler) { c.repositorySlug = slug } -} - -// WithGitRoot sets the git repository root directory for action cache path -func WithGitRoot(gitRoot string) CompilerOption { - return func(c *Compiler) { c.gitRoot = gitRoot } -} - -// WithInlinePrompt configures whether to inline markdown content directly in the compiled YAML -// instead of using runtime-import macros. This is required for Wasm/browser builds where -// the filesystem is unavailable at runtime. -func WithInlinePrompt(inline bool) CompilerOption { - return func(c *Compiler) { c.inlinePrompt = inline } -} - // FileTracker interface for tracking files created during compilation type FileTracker interface { TrackCreated(filePath string) @@ -99,11 +77,6 @@ func SetDefaultVersion(version string) { defaultVersion = version } -// GetDefaultVersion returns the default version -func GetDefaultVersion() string { - return defaultVersion -} - // Compiler handles converting markdown workflows to GitHub Actions YAML type Compiler struct { verbose bool @@ -281,21 +254,11 @@ func (c *Compiler) SetWorkflowIdentifier(identifier string) { c.workflowIdentifier = identifier } -// GetWorkflowIdentifier returns the current workflow identifier -func (c *Compiler) GetWorkflowIdentifier() string { - return c.workflowIdentifier -} - // SetRepositorySlug sets the repository slug for schedule scattering func (c *Compiler) SetRepositorySlug(slug string) { c.repositorySlug = slug } -// GetRepositorySlug returns the repository slug -func (c *Compiler) GetRepositorySlug() string { - return c.repositorySlug -} - // GetScheduleWarnings returns all accumulated schedule warnings for this compiler instance func (c *Compiler) GetScheduleWarnings() []string { return c.scheduleWarnings diff --git a/pkg/workflow/copilot_participant_steps.go b/pkg/workflow/copilot_participant_steps.go deleted file mode 100644 index d7fa717f81..0000000000 --- a/pkg/workflow/copilot_participant_steps.go +++ /dev/null @@ -1,153 +0,0 @@ -package workflow - -import ( - "fmt" - "slices" - - "github.com/github/gh-aw/pkg/logger" -) - -var copilotParticipantLog = logger.New("workflow:copilot_participant_steps") - -// CopilotParticipantConfig holds configuration for generating Copilot participant steps -type CopilotParticipantConfig struct { - // Participants is the list of users/bots to assign/review - Participants []string - // ParticipantType is either "assignee" or "reviewer" - ParticipantType string - // CustomToken is the custom GitHub token from the safe output config - CustomToken string - // SafeOutputsToken is the GitHub token from the safe-outputs config - SafeOutputsToken string - // ConditionStepID is the step ID to check for output (e.g., "create_issue", "create_pull_request") - ConditionStepID string - // ConditionOutputKey is the output key to check (e.g., "issue_number", "pull_request_url") - ConditionOutputKey string -} - -// buildCopilotParticipantSteps generates steps for adding Copilot participants (assignees or reviewers) -// This function extracts the common logic between issue assignees and PR reviewers -func buildCopilotParticipantSteps(config CopilotParticipantConfig) []string { - copilotParticipantLog.Printf("Building Copilot participant steps: type=%s, count=%d", config.ParticipantType, len(config.Participants)) - - if len(config.Participants) == 0 { - copilotParticipantLog.Print("No participants to add, returning empty steps") - return nil - } - - var steps []string - - // Add checkout step for gh CLI to work - steps = append(steps, " - name: Checkout repository for gh CLI\n") - steps = append(steps, fmt.Sprintf(" if: steps.%s.outputs.%s != ''\n", config.ConditionStepID, config.ConditionOutputKey)) - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/checkout"))) - steps = append(steps, " with:\n") - steps = append(steps, " persist-credentials: false\n") - - // Check if any participant is "copilot" to determine token preference - hasCopilotParticipant := slices.Contains(config.Participants, "copilot") - - // Choose the first non-empty custom token for precedence - effectiveCustomToken := config.CustomToken - if effectiveCustomToken == "" { - effectiveCustomToken = config.SafeOutputsToken - } - - // Use agent token preference if adding copilot as participant, otherwise use regular token - var effectiveToken string - if hasCopilotParticipant { - copilotParticipantLog.Print("Using Copilot coding agent token preference") - effectiveToken = getEffectiveCopilotCodingAgentGitHubToken(effectiveCustomToken) - } else { - copilotParticipantLog.Print("Using regular GitHub token") - effectiveToken = getEffectiveGitHubToken(effectiveCustomToken) - } - - // Generate participant-specific steps - switch config.ParticipantType { - case "assignee": - copilotParticipantLog.Printf("Generating issue assignee steps for %d participants", len(config.Participants)) - steps = append(steps, buildIssueAssigneeSteps(config, effectiveToken)...) - case "reviewer": - copilotParticipantLog.Printf("Generating PR reviewer steps for %d participants", len(config.Participants)) - steps = append(steps, buildPRReviewerSteps(config, effectiveToken)...) - } - - return steps -} - -// buildIssueAssigneeSteps generates steps for assigning issues -func buildIssueAssigneeSteps(config CopilotParticipantConfig, effectiveToken string) []string { - var steps []string - - for i, assignee := range config.Participants { - // Special handling: "copilot" should be passed as "@copilot" to gh CLI - actualAssignee := assignee - if assignee == "copilot" { - actualAssignee = "@copilot" - } - - steps = append(steps, fmt.Sprintf(" - name: Assign issue to %s\n", assignee)) - steps = append(steps, fmt.Sprintf(" if: steps.%s.outputs.%s != ''\n", config.ConditionStepID, config.ConditionOutputKey)) - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) - steps = append(steps, " env:\n") - steps = append(steps, fmt.Sprintf(" GH_TOKEN: %s\n", effectiveToken)) - steps = append(steps, fmt.Sprintf(" ASSIGNEE: %q\n", actualAssignee)) - steps = append(steps, fmt.Sprintf(" ISSUE_NUMBER: ${{ steps.%s.outputs.%s }}\n", config.ConditionStepID, config.ConditionOutputKey)) - steps = append(steps, " with:\n") - steps = append(steps, " script: |\n") - steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") - steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") - // Load script from external file using require() - steps = append(steps, " const { main } = require('/opt/gh-aw/actions/assign_issue.cjs');\n") - steps = append(steps, " await main({ github, context, core, exec, io });\n") - - // Add a comment after each assignee step except the last - if i < len(config.Participants)-1 { - steps = append(steps, "\n") - } - } - - return steps -} - -// buildPRReviewerSteps generates steps for adding PR reviewers -func buildPRReviewerSteps(config CopilotParticipantConfig, effectiveToken string) []string { - var steps []string - - for i, reviewer := range config.Participants { - // Special handling: "copilot" uses the GitHub API with "copilot-pull-request-reviewer[bot]" - // because gh pr edit --add-reviewer does not support @copilot - if reviewer == "copilot" { - steps = append(steps, fmt.Sprintf(" - name: Add %s as reviewer\n", reviewer)) - steps = append(steps, " if: steps.create_pull_request.outputs.pull_request_number != ''\n") - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) - steps = append(steps, " env:\n") - steps = append(steps, " PR_NUMBER: ${{ steps.create_pull_request.outputs.pull_request_number }}\n") - steps = append(steps, " with:\n") - steps = append(steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) - steps = append(steps, " script: |\n") - steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") - steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") - // Load script from external file using require() - steps = append(steps, " const { main } = require('/opt/gh-aw/actions/add_copilot_reviewer.cjs');\n") - steps = append(steps, " await main({ github, context, core, exec, io });\n") - } else { - steps = append(steps, fmt.Sprintf(" - name: Add %s as reviewer\n", reviewer)) - steps = append(steps, " if: steps.create_pull_request.outputs.pull_request_url != ''\n") - steps = append(steps, " env:\n") - steps = append(steps, fmt.Sprintf(" GH_TOKEN: %s\n", effectiveToken)) - steps = append(steps, fmt.Sprintf(" REVIEWER: %q\n", reviewer)) - steps = append(steps, " PR_URL: ${{ steps.create_pull_request.outputs.pull_request_url }}\n") - steps = append(steps, " run: |\n") - steps = append(steps, " gh pr edit \"$PR_URL\" --add-reviewer \"$REVIEWER\"\n") - } - - // Add a comment after each reviewer step except the last - if i < len(config.Participants)-1 { - steps = append(steps, "\n") - } - } - - return steps -} diff --git a/pkg/workflow/copilot_participant_steps_test.go b/pkg/workflow/copilot_participant_steps_test.go deleted file mode 100644 index 5cc8925d86..0000000000 --- a/pkg/workflow/copilot_participant_steps_test.go +++ /dev/null @@ -1,49 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -// TestBuildCopilotParticipantSteps_EmptyParticipants tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_EmptyParticipants(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBuildCopilotParticipantSteps_IssueAssignee tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_IssueAssignee(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBuildCopilotParticipantSteps_CopilotAssignee tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_CopilotAssignee(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBuildCopilotParticipantSteps_PRReviewer tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_PRReviewer(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBuildCopilotParticipantSteps_CopilotReviewer tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_CopilotReviewer(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBuildCopilotParticipantSteps_CustomToken tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_CustomToken(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} - -// TestBuildCopilotParticipantSteps_MixedParticipants tests workflow functionality -// SKIPPED: Scripts are now loaded from external files at runtime using require() pattern -func TestBuildCopilotParticipantSteps_MixedParticipants(t *testing.T) { - t.Skip("Workflow tests skipped - scripts now use require() pattern to load external files at runtime") -} diff --git a/pkg/workflow/create_issue.go b/pkg/workflow/create_issue.go index aaccca04ef..6e94d40aff 100644 --- a/pkg/workflow/create_issue.go +++ b/pkg/workflow/create_issue.go @@ -1,8 +1,6 @@ package workflow import ( - "errors" - "fmt" "slices" "github.com/github/gh-aw/pkg/logger" @@ -97,150 +95,3 @@ func (c *Compiler) parseIssuesConfig(outputMap map[string]any) *CreateIssuesConf func hasCopilotAssignee(assignees []string) bool { return slices.Contains(assignees, "copilot") } - -// filterNonCopilotAssignees returns assignees excluding "copilot" -func filterNonCopilotAssignees(assignees []string) []string { - var result []string - for _, a := range assignees { - if a != "copilot" { - result = append(result, a) - } - } - return result -} - -// buildCopilotCodingAgentAssignmentStep generates a post-step for assigning Copilot coding agent to created issues -// This step uses the agent token with full precedence chain -func buildCopilotCodingAgentAssignmentStep(configToken, safeOutputsToken string) []string { - var steps []string - - // Choose the first non-empty custom token for precedence - effectiveCustomToken := configToken - if effectiveCustomToken == "" { - effectiveCustomToken = safeOutputsToken - } - - // Get the effective agent token with full precedence chain - effectiveToken := getEffectiveCopilotCodingAgentGitHubToken(effectiveCustomToken) - - steps = append(steps, " - name: Assign Copilot to created issues\n") - steps = append(steps, " if: steps.create_issue.outputs.issues_to_assign_copilot != ''\n") - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) - steps = append(steps, " with:\n") - steps = append(steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) - steps = append(steps, " script: |\n") - steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") - steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") - // Load script from external file using require() - steps = append(steps, " const { main } = require('/opt/gh-aw/actions/assign_copilot_to_created_issues.cjs');\n") - steps = append(steps, " await main({ github, context, core, exec, io });\n") - - return steps -} - -// buildCreateOutputIssueJob creates the create_issue job -func (c *Compiler) buildCreateOutputIssueJob(data *WorkflowData, mainJobName string) (*Job, error) { - if data.SafeOutputs == nil || data.SafeOutputs.CreateIssues == nil { - return nil, errors.New("safe-outputs.create-issue configuration is required") - } - - if createIssueLog.Enabled() { - createIssueLog.Printf("Building create-issue job: workflow=%s, main_job=%s, assignees=%d, labels=%d", - data.Name, mainJobName, len(data.SafeOutputs.CreateIssues.Assignees), len(data.SafeOutputs.CreateIssues.Labels)) - } - - // Build custom environment variables specific to create-issue using shared helpers - var customEnvVars []string - customEnvVars = append(customEnvVars, buildTitlePrefixEnvVar("GH_AW_ISSUE_TITLE_PREFIX", data.SafeOutputs.CreateIssues.TitlePrefix)...) - customEnvVars = append(customEnvVars, buildLabelsEnvVar("GH_AW_ISSUE_LABELS", data.SafeOutputs.CreateIssues.Labels)...) - customEnvVars = append(customEnvVars, buildLabelsEnvVar("GH_AW_ISSUE_ALLOWED_LABELS", data.SafeOutputs.CreateIssues.AllowedLabels)...) - customEnvVars = append(customEnvVars, buildAllowedReposEnvVar("GH_AW_ALLOWED_REPOS", data.SafeOutputs.CreateIssues.AllowedRepos)...) - - // Add expires value if set - if data.SafeOutputs.CreateIssues.Expires > 0 { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ISSUE_EXPIRES: \"%d\"\n", data.SafeOutputs.CreateIssues.Expires)) - } - - // Add group flag if set - customEnvVars = append(customEnvVars, buildTemplatableBoolEnvVar("GH_AW_ISSUE_GROUP", data.SafeOutputs.CreateIssues.Group)...) - if data.SafeOutputs.CreateIssues.Group != nil { - createIssueLog.Print("Issue grouping flag set") - } - - // Add close-older-issues flag if enabled - customEnvVars = append(customEnvVars, buildTemplatableBoolEnvVar("GH_AW_CLOSE_OLDER_ISSUES", data.SafeOutputs.CreateIssues.CloseOlderIssues)...) - if data.SafeOutputs.CreateIssues.CloseOlderIssues != nil { - createIssueLog.Print("Close older issues flag set") - } - - // Add footer flag if explicitly set to false - if data.SafeOutputs.CreateIssues.Footer != nil && *data.SafeOutputs.CreateIssues.Footer == "false" { - customEnvVars = append(customEnvVars, " GH_AW_FOOTER: \"false\"\n") - createIssueLog.Print("Footer disabled - XML markers will be included but visible footer content will be omitted") - } - - // Add standard environment variables (metadata + staged/target repo) - customEnvVars = append(customEnvVars, c.buildStandardSafeOutputEnvVars(data, data.SafeOutputs.CreateIssues.TargetRepoSlug)...) - - // Check if copilot is in assignees - if so, we'll output issues for assign_to_agent job - assignCopilot := hasCopilotAssignee(data.SafeOutputs.CreateIssues.Assignees) - if assignCopilot { - customEnvVars = append(customEnvVars, " GH_AW_ASSIGN_COPILOT: \"true\"\n") - createIssueLog.Print("Copilot assignment requested - will output issues_to_assign_copilot for assign_to_agent job") - } - - // Build post-steps for non-copilot assignees only - // Copilot assignment must be done in a separate step with the agent token - var postSteps []string - - // Get the effective GitHub token to use for gh CLI - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - - nonCopilotAssignees := filterNonCopilotAssignees(data.SafeOutputs.CreateIssues.Assignees) - if len(nonCopilotAssignees) > 0 { - postSteps = buildCopilotParticipantSteps(CopilotParticipantConfig{ - Participants: nonCopilotAssignees, - ParticipantType: "assignee", - CustomToken: data.SafeOutputs.CreateIssues.GitHubToken, - SafeOutputsToken: safeOutputsToken, - ConditionStepID: "create_issue", - ConditionOutputKey: "issue_number", - }) - } - - // Add post-step for copilot assignment using agent token - if assignCopilot { - postSteps = append(postSteps, buildCopilotCodingAgentAssignmentStep(data.SafeOutputs.CreateIssues.GitHubToken, safeOutputsToken)...) - } - - // Create outputs for the job - outputs := map[string]string{ - "issue_number": "${{ steps.create_issue.outputs.issue_number }}", - "issue_url": "${{ steps.create_issue.outputs.issue_url }}", - "temporary_id_map": "${{ steps.create_issue.outputs.temporary_id_map }}", - } - - // Add issues_to_assign_copilot output if copilot assignment is requested - if assignCopilot { - outputs["issues_to_assign_copilot"] = "${{ steps.create_issue.outputs.issues_to_assign_copilot }}" - } - - // Use the shared builder function to create the job - return c.buildSafeOutputJob(data, SafeOutputJobConfig{ - JobName: "create_issue", - StepName: "Create Output Issue", - StepID: "create_issue", - MainJobName: mainJobName, - CustomEnvVars: customEnvVars, - Script: getCreateIssueScript(), - ScriptName: "create_issue", // For custom action mode - Permissions: NewPermissionsContentsReadIssuesWrite(), - Outputs: outputs, - PostSteps: postSteps, - Token: data.SafeOutputs.CreateIssues.GitHubToken, - TargetRepoSlug: data.SafeOutputs.CreateIssues.TargetRepoSlug, - }) -} diff --git a/pkg/workflow/create_pull_request.go b/pkg/workflow/create_pull_request.go index b14c6f24c3..c38d11f898 100644 --- a/pkg/workflow/create_pull_request.go +++ b/pkg/workflow/create_pull_request.go @@ -1,10 +1,6 @@ package workflow import ( - "errors" - "fmt" - - "github.com/github/gh-aw/pkg/constants" "github.com/github/gh-aw/pkg/logger" ) @@ -38,209 +34,6 @@ type CreatePullRequestsConfig struct { GithubTokenForExtraEmptyCommit string `yaml:"github-token-for-extra-empty-commit,omitempty"` // Token used to push an empty commit to trigger CI events. Use a PAT or "app" for GitHub App auth. } -// buildCreateOutputPullRequestJob creates the create_pull_request job -func (c *Compiler) buildCreateOutputPullRequestJob(data *WorkflowData, mainJobName string) (*Job, error) { - if data.SafeOutputs == nil || data.SafeOutputs.CreatePullRequests == nil { - return nil, errors.New("safe-outputs.create-pull-request configuration is required") - } - - if createPRLog.Enabled() { - draftValue := "true" // Default - if data.SafeOutputs.CreatePullRequests.Draft != nil { - draftValue = *data.SafeOutputs.CreatePullRequests.Draft - } - fallbackAsIssue := getFallbackAsIssue(data.SafeOutputs.CreatePullRequests) - createPRLog.Printf("Building create-pull-request job: workflow=%s, main_job=%s, draft=%v, reviewers=%d, fallback_as_issue=%v", - data.Name, mainJobName, draftValue, len(data.SafeOutputs.CreatePullRequests.Reviewers), fallbackAsIssue) - } - - // Build pre-steps for patch download, checkout, and git config - var preSteps []string - - // Step 1: Download patch artifact from unified agent-artifacts - preSteps = append(preSteps, " - name: Download patch artifact\n") - preSteps = append(preSteps, " continue-on-error: true\n") - preSteps = append(preSteps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/download-artifact"))) - preSteps = append(preSteps, " with:\n") - preSteps = append(preSteps, " name: agent-artifacts\n") - preSteps = append(preSteps, " path: /tmp/gh-aw/\n") - - // Step 2: Checkout repository - // Step 3: Configure Git credentials - // Pass the target repo to configure git remote correctly for cross-repo operations - // Use token precedence chain instead of hardcoded github.token - // Precedence: create-pull-request config token > safe-outputs token > GH_AW_GITHUB_TOKEN || GITHUB_TOKEN - var configToken string - if data.SafeOutputs.CreatePullRequests != nil { - configToken = data.SafeOutputs.CreatePullRequests.GitHubToken - } - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - // Choose the first non-empty custom token for precedence - effectiveCustomToken := configToken - if effectiveCustomToken == "" { - effectiveCustomToken = safeOutputsToken - } - // Get effective token (handles fallback to GH_AW_GITHUB_TOKEN || GITHUB_TOKEN) - gitToken := getEffectiveSafeOutputGitHubToken(effectiveCustomToken) - - // Use the resolved token for checkout - preSteps = buildCheckoutRepository(preSteps, c, data.SafeOutputs.CreatePullRequests.TargetRepoSlug, gitToken) - - preSteps = append(preSteps, c.generateGitConfigurationStepsWithToken(gitToken, data.SafeOutputs.CreatePullRequests.TargetRepoSlug)...) - - // Build custom environment variables specific to create-pull-request - var customEnvVars []string - // Pass the workflow ID for branch naming - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_ID: %q\n", mainJobName)) - // Pass custom base branch only if explicitly configured; JS will resolve dynamically otherwise - if data.SafeOutputs.CreatePullRequests.BaseBranch != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_CUSTOM_BASE_BRANCH: %q\n", data.SafeOutputs.CreatePullRequests.BaseBranch)) - } - customEnvVars = append(customEnvVars, buildTitlePrefixEnvVar("GH_AW_PR_TITLE_PREFIX", data.SafeOutputs.CreatePullRequests.TitlePrefix)...) - customEnvVars = append(customEnvVars, buildLabelsEnvVar("GH_AW_PR_LABELS", data.SafeOutputs.CreatePullRequests.Labels)...) - customEnvVars = append(customEnvVars, buildLabelsEnvVar("GH_AW_PR_ALLOWED_LABELS", data.SafeOutputs.CreatePullRequests.AllowedLabels)...) - // Pass draft setting - default to true for backwards compatibility - if data.SafeOutputs.CreatePullRequests.Draft != nil { - customEnvVars = append(customEnvVars, buildTemplatableBoolEnvVar("GH_AW_PR_DRAFT", data.SafeOutputs.CreatePullRequests.Draft)...) - } else { - customEnvVars = append(customEnvVars, " GH_AW_PR_DRAFT: \"true\"\n") - } - - // Pass the if-no-changes configuration - ifNoChanges := data.SafeOutputs.CreatePullRequests.IfNoChanges - if ifNoChanges == "" { - ifNoChanges = "warn" // Default value - } - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_PR_IF_NO_CHANGES: %q\n", ifNoChanges)) - - // Pass the allow-empty configuration - if data.SafeOutputs.CreatePullRequests.AllowEmpty != nil { - customEnvVars = append(customEnvVars, buildTemplatableBoolEnvVar("GH_AW_PR_ALLOW_EMPTY", data.SafeOutputs.CreatePullRequests.AllowEmpty)...) - } else { - customEnvVars = append(customEnvVars, " GH_AW_PR_ALLOW_EMPTY: \"false\"\n") - } - - // Pass the auto-merge configuration - if data.SafeOutputs.CreatePullRequests.AutoMerge != nil { - customEnvVars = append(customEnvVars, buildTemplatableBoolEnvVar("GH_AW_PR_AUTO_MERGE", data.SafeOutputs.CreatePullRequests.AutoMerge)...) - } else { - customEnvVars = append(customEnvVars, " GH_AW_PR_AUTO_MERGE: \"false\"\n") - } - - // Pass the fallback-as-issue configuration - default to true for backwards compatibility - if data.SafeOutputs.CreatePullRequests.FallbackAsIssue != nil { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_PR_FALLBACK_AS_ISSUE: \"%t\"\n", *data.SafeOutputs.CreatePullRequests.FallbackAsIssue)) - } else { - customEnvVars = append(customEnvVars, " GH_AW_PR_FALLBACK_AS_ISSUE: \"true\"\n") - } - - // Pass the maximum patch size configuration - maxPatchSize := 1024 // Default value - if data.SafeOutputs != nil && data.SafeOutputs.MaximumPatchSize > 0 { - maxPatchSize = data.SafeOutputs.MaximumPatchSize - } - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_MAX_PATCH_SIZE: %d\n", maxPatchSize)) - - // Pass activation comment information if available (for updating the comment with PR link) - // These outputs are only available when reaction is configured in the workflow - if data.AIReaction != "" && data.AIReaction != "none" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_COMMENT_ID: ${{ needs.%s.outputs.comment_id }}\n", constants.ActivationJobName)) - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_COMMENT_REPO: ${{ needs.%s.outputs.comment_repo }}\n", constants.ActivationJobName)) - } - - // Add expires value if set (only for same-repo PRs - when target-repo is not set) - if data.SafeOutputs.CreatePullRequests.Expires > 0 && data.SafeOutputs.CreatePullRequests.TargetRepoSlug == "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_PR_EXPIRES: \"%d\"\n", data.SafeOutputs.CreatePullRequests.Expires)) - } - - // Add footer flag if explicitly set to false - if data.SafeOutputs.CreatePullRequests.Footer != nil && *data.SafeOutputs.CreatePullRequests.Footer == "false" { - customEnvVars = append(customEnvVars, " GH_AW_FOOTER: \"false\"\n") - createPRLog.Print("Footer disabled - XML markers will be included but visible footer content will be omitted") - } - - // Add extra empty commit token (for pushing an empty commit to trigger CI) - // Defaults to GH_AW_CI_TRIGGER_TOKEN when not explicitly configured - ciTriggerToken := data.SafeOutputs.CreatePullRequests.GithubTokenForExtraEmptyCommit - switch ciTriggerToken { - case "app": - customEnvVars = append(customEnvVars, " GH_AW_CI_TRIGGER_TOKEN: ${{ steps.safe-outputs-app-token.outputs.token || '' }}\n") - createPRLog.Print("Extra empty commit using GitHub App token") - case "default", "": - // Use the magic GH_AW_CI_TRIGGER_TOKEN secret (default behavior when not explicitly configured) - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_CI_TRIGGER_TOKEN: %s\n", getEffectiveCITriggerGitHubToken(""))) - createPRLog.Print("Extra empty commit using GH_AW_CI_TRIGGER_TOKEN") - default: - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_CI_TRIGGER_TOKEN: %s\n", ciTriggerToken)) - createPRLog.Printf("Extra empty commit using explicit token") - } - - // Add standard environment variables (metadata + staged/target repo) - customEnvVars = append(customEnvVars, c.buildStandardSafeOutputEnvVars(data, data.SafeOutputs.CreatePullRequests.TargetRepoSlug)...) - - // Build post-steps for reviewers if configured - var postSteps []string - if len(data.SafeOutputs.CreatePullRequests.Reviewers) > 0 { - // Get the effective GitHub token to use for gh CLI - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - - postSteps = buildCopilotParticipantSteps(CopilotParticipantConfig{ - Participants: data.SafeOutputs.CreatePullRequests.Reviewers, - ParticipantType: "reviewer", - CustomToken: data.SafeOutputs.CreatePullRequests.GitHubToken, - SafeOutputsToken: safeOutputsToken, - ConditionStepID: "create_pull_request", - ConditionOutputKey: "pull_request_url", - }) - } - - // Create outputs for the job - outputs := map[string]string{ - "pull_request_number": "${{ steps.create_pull_request.outputs.pull_request_number }}", - "pull_request_url": "${{ steps.create_pull_request.outputs.pull_request_url }}", - "issue_number": "${{ steps.create_pull_request.outputs.issue_number }}", - "issue_url": "${{ steps.create_pull_request.outputs.issue_url }}", - "branch_name": "${{ steps.create_pull_request.outputs.branch_name }}", - "fallback_used": "${{ steps.create_pull_request.outputs.fallback_used }}", - "error_message": "${{ steps.create_pull_request.outputs.error_message }}", - } - - // Choose permissions based on fallback-as-issue setting - fallbackAsIssue := getFallbackAsIssue(data.SafeOutputs.CreatePullRequests) - var permissions *Permissions - if fallbackAsIssue { - // Default: include issues: write for fallback behavior - permissions = NewPermissionsContentsWriteIssuesWritePRWrite() - createPRLog.Print("Using permissions with issues:write (fallback-as-issue enabled)") - } else { - // Fallback disabled: only need contents: write and pull-requests: write - permissions = NewPermissionsContentsWritePRWrite() - createPRLog.Print("Using permissions without issues:write (fallback-as-issue disabled)") - } - - // Use the shared builder function to create the job - return c.buildSafeOutputJob(data, SafeOutputJobConfig{ - JobName: "create_pull_request", - StepName: "Create Pull Request", - StepID: "create_pull_request", - MainJobName: mainJobName, - CustomEnvVars: customEnvVars, - Script: "", // Legacy - handler manager uses require() to load handler from /tmp/gh-aw/actions - Permissions: permissions, - Outputs: outputs, - PreSteps: preSteps, - PostSteps: postSteps, - Token: data.SafeOutputs.CreatePullRequests.GitHubToken, - TargetRepoSlug: data.SafeOutputs.CreatePullRequests.TargetRepoSlug, - }) -} - // parsePullRequestsConfig handles only create-pull-request (singular) configuration func (c *Compiler) parsePullRequestsConfig(outputMap map[string]any) *CreatePullRequestsConfig { // Check for singular form only diff --git a/pkg/workflow/custom_action_copilot_token_test.go b/pkg/workflow/custom_action_copilot_token_test.go deleted file mode 100644 index 91772a204b..0000000000 --- a/pkg/workflow/custom_action_copilot_token_test.go +++ /dev/null @@ -1,51 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// TestCustomActionCopilotTokenFallback tests that custom actions use the correct -// Copilot token fallback when no custom token is provided -func TestCustomActionCopilotTokenFallback(t *testing.T) { - compiler := NewCompiler() - - // Register a test custom action - testScript := `console.log('test');` - actionPath := "./actions/test-action" - err := DefaultScriptRegistry.RegisterWithAction("test_handler", testScript, RuntimeModeGitHubScript, actionPath) - require.NoError(t, err) - - workflowData := &WorkflowData{ - Name: "Test Workflow", - SafeOutputs: &SafeOutputsConfig{}, - } - - // Test with UseCopilotRequestsToken=true and no custom token - config := GitHubScriptStepConfig{ - StepName: "Test Custom Action", - StepID: "test", - CustomToken: "", // No custom token - UseCopilotRequestsToken: true, - } - - steps := compiler.buildCustomActionStep(workflowData, config, "test_handler") - stepsContent := strings.Join(steps, "") - - t.Logf("Generated steps:\n%s", stepsContent) - - // Should use COPILOT_GITHUB_TOKEN directly (no fallback chain) - // Note: COPILOT_GITHUB_TOKEN is the recommended token for Copilot operations - // and does NOT have a fallback to GITHUB_TOKEN because GITHUB_TOKEN lacks - // permissions for agent sessions and bot assignments - assert.Contains(t, stepsContent, "secrets.COPILOT_GITHUB_TOKEN", "Should use COPILOT_GITHUB_TOKEN") - assert.NotContains(t, stepsContent, "COPILOT_TOKEN ||", "Should not use deprecated COPILOT_TOKEN") - - // Verify no fallback chain (COPILOT_GITHUB_TOKEN is used directly) - assert.NotContains(t, stepsContent, "||", "Should not have fallback chain for Copilot token") -} diff --git a/pkg/workflow/data/action_pins.json b/pkg/workflow/data/action_pins.json index 80f6d33962..103020bbd7 100644 --- a/pkg/workflow/data/action_pins.json +++ b/pkg/workflow/data/action_pins.json @@ -1,34 +1,29 @@ { "entries": { - "actions/ai-inference@v2.0.6": { + "actions/ai-inference@v2.0.7": { "repo": "actions/ai-inference", - "version": "v2.0.6", - "sha": "a380166897b5408b8fb7dddd148142794cb5624a" + "version": "v2.0.7", + "sha": "e09e65981758de8b2fdab13c2bfb7c7d5493b0b6" }, - "actions/attest-build-provenance@v2.4.0": { + "actions/attest-build-provenance@v4.1.0": { "repo": "actions/attest-build-provenance", - "version": "v2.4.0", - "sha": "e8998f949152b193b063cb0ec769d69d929409be" + "version": "v4.1.0", + "sha": "a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32" }, - "actions/cache/restore@v4.3.0": { + "actions/cache/restore@v5.0.3": { "repo": "actions/cache/restore", - "version": "v4.3.0", - "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" + "version": "v5.0.3", + "sha": "cdf6c1fa76f9f475f3d7449005a359c84ca0f306" }, - "actions/cache/save@v4.3.0": { + "actions/cache/save@v5.0.3": { "repo": "actions/cache/save", - "version": "v4.3.0", - "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" + "version": "v5.0.3", + "sha": "cdf6c1fa76f9f475f3d7449005a359c84ca0f306" }, - "actions/cache@v4.3.0": { + "actions/cache@v5.0.3": { "repo": "actions/cache", - "version": "v4.3.0", - "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" - }, - "actions/checkout@v4.3.1": { - "repo": "actions/checkout", - "version": "v4.3.1", - "sha": "34e114876b0b11c390a56381ad16ebd13914f8d5" + "version": "v5.0.3", + "sha": "cdf6c1fa76f9f475f3d7449005a359c84ca0f306" }, "actions/checkout@v5.0.1": { "repo": "actions/checkout", @@ -40,20 +35,15 @@ "version": "v6.0.2", "sha": "de0fac2e4500dabe0009e67214ff5f5447ce83dd" }, - "actions/create-github-app-token@v2.2.1": { + "actions/create-github-app-token@v3.0.0-beta.2": { "repo": "actions/create-github-app-token", - "version": "v2.2.1", - "sha": "29824e69f54612133e76f7eaac726eef6c875baf" + "version": "v3.0.0-beta.2", + "sha": "bf559f85448f9380bcfa2899dbdc01eb5b37be3a" }, - "actions/download-artifact@v6": { + "actions/download-artifact@v8.0.0": { "repo": "actions/download-artifact", - "version": "v6", - "sha": "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53" - }, - "actions/github-script@v7.1.0": { - "repo": "actions/github-script", - "version": "v7.1.0", - "sha": "f28e40c7f34bde8b3046d885e986cb6290c5673b" + "version": "v8.0.0", + "sha": "70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3" }, "actions/github-script@v8": { "repo": "actions/github-script", @@ -65,70 +55,50 @@ "version": "v4.3.1", "sha": "67a3573c9a986a3f9c594539f4ab511d57bb3ce9" }, - "actions/setup-go@v5.6.0": { - "repo": "actions/setup-go", - "version": "v5.6.0", - "sha": "40f1582b2485089dde7abd97c1529aa768e1baff" + "actions/setup-dotnet@v5.1.0": { + "repo": "actions/setup-dotnet", + "version": "v5.1.0", + "sha": "baa11fbfe1d6520db94683bd5c7a3818018e4309" }, - "actions/setup-go@v6": { + "actions/setup-go@v6.3.0": { "repo": "actions/setup-go", - "version": "v6", + "version": "v6.3.0", "sha": "4b73464bb391d4059bd26b0524d20df3927bd417" }, - "actions/setup-go@v6.2.0": { - "repo": "actions/setup-go", - "version": "v6.2.0", - "sha": "7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5" - }, - "actions/setup-java@v4.8.0": { + "actions/setup-java@v5.2.0": { "repo": "actions/setup-java", - "version": "v4.8.0", - "sha": "c1e323688fd81a25caa38c78aa6df2d33d3e20d9" - }, - "actions/setup-node@v4.4.0": { - "repo": "actions/setup-node", - "version": "v4.4.0", - "sha": "49933ea5288caeca8642d1e84afbd3f7d6820020" + "version": "v5.2.0", + "sha": "be666c2fcd27ec809703dec50e508c2fdc7f6654" }, "actions/setup-node@v6.2.0": { "repo": "actions/setup-node", "version": "v6.2.0", "sha": "6044e13b5dc448c55e2357c09f80417699197238" }, - "actions/setup-python@v5.6.0": { + "actions/setup-python@v6.2.0": { "repo": "actions/setup-python", - "version": "v5.6.0", - "sha": "a26af69be951a213d495a4c3e4e4022e16d87065" - }, - "actions/upload-artifact@v4.6.2": { - "repo": "actions/upload-artifact", - "version": "v4.6.2", - "sha": "ea165f8d65b6e75b540449e92b4886f43607fa02" + "version": "v6.2.0", + "sha": "a309ff8b426b58ec0e2a45f0f869d46889d02405" }, - "actions/upload-artifact@v5": { + "actions/upload-artifact@v5.0.0": { "repo": "actions/upload-artifact", - "version": "v5", + "version": "v5.0.0", "sha": "330a01c490aca151604b8cf639adc76d48f6c5d4" }, - "actions/upload-artifact@v6": { + "actions/upload-artifact@v7.0.0": { "repo": "actions/upload-artifact", - "version": "v6", - "sha": "b7c566a772e6b6bfb58ed0dc250532a479d7789f" + "version": "v7.0.0", + "sha": "bbbca2ddaa5d8feaa63e36b76fdaad77386f024f" }, - "anchore/sbom-action@v0": { + "anchore/sbom-action@v0.23.0": { "repo": "anchore/sbom-action", - "version": "v0", + "version": "v0.23.0", "sha": "17ae1740179002c89186b61233e0f892c3118b11" }, - "anchore/sbom-action@v0.22.2": { - "repo": "anchore/sbom-action", - "version": "v0.22.2", - "sha": "28d71544de8eaf1b958d335707167c5f783590ad" - }, - "astral-sh/setup-uv@v5.4.2": { + "astral-sh/setup-uv@v7.3.0": { "repo": "astral-sh/setup-uv", - "version": "v5.4.2", - "sha": "d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86" + "version": "v7.3.0", + "sha": "eac588ad8def6316056a12d4907a9d4d84ff7a3b" }, "cli/gh-extension-precompile@v2.1.0": { "repo": "cli/gh-extension-precompile", @@ -165,15 +135,15 @@ "version": "v1.20.4", "sha": "dff508cca8ce57162e7aa6c4769a4f97c2fed638" }, - "github/codeql-action/upload-sarif@v3.32.4": { + "github/codeql-action/upload-sarif@v4.32.4": { "repo": "github/codeql-action/upload-sarif", - "version": "v3.32.4", - "sha": "85b88275909735f5bc23196090e03d2eb148b3de" + "version": "v4.32.4", + "sha": "e34fc2711fb7964ca6850c8a8382121f34745f3b" }, - "github/stale-repos@v3.0.2": { + "github/stale-repos@v8.0.4": { "repo": "github/stale-repos", - "version": "v3.0.2", - "sha": "a21e55567b83cf3c3f3f9085d3038dc6cee02598" + "version": "v8.0.4", + "sha": "6084a41431c4ce8842a7e879b1a15082b88742ae" }, "haskell-actions/setup@v2.10.3": { "repo": "haskell-actions/setup", diff --git a/pkg/workflow/dependency_tracker.go b/pkg/workflow/dependency_tracker.go deleted file mode 100644 index 0d517468d6..0000000000 --- a/pkg/workflow/dependency_tracker.go +++ /dev/null @@ -1,121 +0,0 @@ -package workflow - -import ( - "fmt" - "path/filepath" - "regexp" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var dependencyTrackerLog = logger.New("workflow:dependency_tracker") - -// FindJavaScriptDependencies analyzes a JavaScript file and recursively finds all its dependencies -// without actually bundling the code. Returns a map of file paths that are required. -// -// Parameters: -// - mainContent: The JavaScript content to analyze -// - sources: Map of file paths to their content -// - basePath: Base directory path for resolving relative imports (e.g., "js") -// -// Returns: -// - Map of file paths (relative to basePath) that are dependencies -// - Error if a required file is not found in sources -func FindJavaScriptDependencies(mainContent string, sources map[string]string, basePath string) (map[string]bool, error) { - dependencyTrackerLog.Printf("Finding JavaScript dependencies: source_count=%d, base_path=%s", len(sources), basePath) - - // Track discovered dependencies - dependencies := make(map[string]bool) - - // Track files we've already processed to avoid circular dependencies - processed := make(map[string]bool) - - // Recursively find dependencies starting from the main content - if err := findDependenciesRecursive(mainContent, basePath, sources, dependencies, processed); err != nil { - dependencyTrackerLog.Printf("Dependency tracking failed: %v", err) - return nil, err - } - - dependencyTrackerLog.Printf("Dependency tracking completed: found %d dependencies", len(dependencies)) - return dependencies, nil -} - -// findDependenciesRecursive processes content and recursively tracks its dependencies -func findDependenciesRecursive(content string, currentPath string, sources map[string]string, dependencies map[string]bool, processed map[string]bool) error { - // Regular expression to match require('./...') or require("./...") - // This matches both single-line and multi-line destructuring: - // const { x } = require("./file.cjs"); - // const { - // x, - // y - // } = require("./file.cjs"); - // Captures the require path where it starts with ./ or ../ - requireRegex := regexp.MustCompile(`(?s)(?:const|let|var)\s+(?:\{[^}]*\}|\w+)\s*=\s*require\(['"](\.\.?/[^'"]+)['"]\);?`) - - // Find all requires - matches := requireRegex.FindAllStringSubmatch(content, -1) - - if len(matches) == 0 { - // No requires found, nothing to track - return nil - } - - dependencyTrackerLog.Printf("Found %d require statements in current file", len(matches)) - - for _, match := range matches { - if len(match) < 2 { - continue - } - - // Extract the require path - requirePath := match[1] - - // Resolve the full path relative to current path - var fullPath string - if currentPath == "" { - fullPath = requirePath - } else { - fullPath = filepath.Join(currentPath, requirePath) - } - - // Ensure .cjs extension - if !strings.HasSuffix(fullPath, ".cjs") && !strings.HasSuffix(fullPath, ".js") { - fullPath += ".cjs" - } - - // Normalize the path (clean up ./ and ../) - fullPath = filepath.Clean(fullPath) - - // Convert Windows path separators to forward slashes for consistency - fullPath = filepath.ToSlash(fullPath) - - // Check if we've already processed this file - if processed[fullPath] { - dependencyTrackerLog.Printf("Skipping already processed dependency: %s", fullPath) - continue - } - - // Mark as processed - processed[fullPath] = true - - // Add to dependencies - dependencies[fullPath] = true - dependencyTrackerLog.Printf("Added dependency: %s", fullPath) - - // Look up the required file in sources - requiredContent, ok := sources[fullPath] - if !ok { - dependencyTrackerLog.Printf("Required file not found in sources: %s", fullPath) - return fmt.Errorf("required file not found in sources: %s", fullPath) - } - - // Recursively find dependencies of this file - requiredDir := filepath.Dir(fullPath) - if err := findDependenciesRecursive(requiredContent, requiredDir, sources, dependencies, processed); err != nil { - return err - } - } - - return nil -} diff --git a/pkg/workflow/dependency_tracker_test.go b/pkg/workflow/dependency_tracker_test.go deleted file mode 100644 index c13cc9bd2f..0000000000 --- a/pkg/workflow/dependency_tracker_test.go +++ /dev/null @@ -1,185 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestFindJavaScriptDependencies(t *testing.T) { - tests := []struct { - name string - mainContent string - sources map[string]string - basePath string - wantDeps map[string]bool - wantErr bool - errorMessage string - }{ - { - name: "simple single dependency", - mainContent: `const { foo } = require("./helper.cjs"); -console.log(foo());`, - sources: map[string]string{ - "js/helper.cjs": `function foo() { return "bar"; } -module.exports = { foo };`, - }, - basePath: "js", - wantDeps: map[string]bool{ - "js/helper.cjs": true, - }, - wantErr: false, - }, - { - name: "chained dependencies", - mainContent: `const { a } = require("./module-a.cjs"); -console.log(a);`, - sources: map[string]string{ - "js/module-a.cjs": `const { b } = require("./module-b.cjs"); -module.exports = { a: b };`, - "js/module-b.cjs": `module.exports = { b: "value" };`, - }, - basePath: "js", - wantDeps: map[string]bool{ - "js/module-a.cjs": true, - "js/module-b.cjs": true, - }, - wantErr: false, - }, - { - name: "circular dependencies handled", - mainContent: `const { x } = require("./a.cjs");`, - sources: map[string]string{ - "js/a.cjs": `const { y } = require("./b.cjs"); -module.exports = { x: y };`, - "js/b.cjs": `const { x } = require("./a.cjs"); -module.exports = { y: "val" };`, - }, - basePath: "js", - wantDeps: map[string]bool{ - "js/a.cjs": true, - "js/b.cjs": true, - }, - wantErr: false, - }, - { - name: "no dependencies", - mainContent: `console.log("no requires here"); -const x = 42;`, - sources: map[string]string{}, - basePath: "js", - wantDeps: map[string]bool{}, - wantErr: false, - }, - { - name: "missing dependency error", - mainContent: `const { missing } = require("./not-found.cjs");`, - sources: map[string]string{}, - basePath: "js", - wantDeps: nil, - wantErr: true, - errorMessage: "required file not found in sources", - }, - { - name: "multiple dependencies", - mainContent: `const { a } = require("./a.cjs"); -const { b } = require("./b.cjs"); -const { c } = require("./c.cjs");`, - sources: map[string]string{ - "js/a.cjs": `module.exports = { a: 1 };`, - "js/b.cjs": `module.exports = { b: 2 };`, - "js/c.cjs": `module.exports = { c: 3 };`, - }, - basePath: "js", - wantDeps: map[string]bool{ - "js/a.cjs": true, - "js/b.cjs": true, - "js/c.cjs": true, - }, - wantErr: false, - }, - { - name: "multi-line destructuring", - mainContent: `const { - foo, - bar, - baz -} = require("./utils.cjs");`, - sources: map[string]string{ - "js/utils.cjs": `module.exports = { foo: 1, bar: 2, baz: 3 };`, - }, - basePath: "js", - wantDeps: map[string]bool{ - "js/utils.cjs": true, - }, - wantErr: false, - }, - { - name: "safe-outputs MCP server dependencies", - mainContent: `const { createServer, registerTool, normalizeTool, start } = require("./mcp_server_core.cjs"); -const { loadConfig } = require("./safe_outputs_config.cjs"); -const { createAppendFunction } = require("./safe_outputs_append.cjs"); -const { createHandlers } = require("./safe_outputs_handlers.cjs");`, - sources: map[string]string{ - "js/mcp_server_core.cjs": `const { readBuffer } = require("./read_buffer.cjs"); -module.exports = { createServer, registerTool, normalizeTool, start };`, - "js/read_buffer.cjs": `module.exports = { readBuffer };`, - "js/safe_outputs_config.cjs": `module.exports = { loadConfig };`, - "js/safe_outputs_append.cjs": `module.exports = { createAppendFunction };`, - "js/safe_outputs_handlers.cjs": `const { normalize } = require("./normalize_branch_name.cjs"); -module.exports = { createHandlers };`, - "js/normalize_branch_name.cjs": `module.exports = { normalize };`, - }, - basePath: "js", - wantDeps: map[string]bool{ - "js/mcp_server_core.cjs": true, - "js/read_buffer.cjs": true, - "js/safe_outputs_config.cjs": true, - "js/safe_outputs_append.cjs": true, - "js/safe_outputs_handlers.cjs": true, - "js/normalize_branch_name.cjs": true, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotDeps, err := FindJavaScriptDependencies(tt.mainContent, tt.sources, tt.basePath) - - if (err != nil) != tt.wantErr { - t.Errorf("FindJavaScriptDependencies() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if tt.wantErr { - if err == nil { - t.Errorf("FindJavaScriptDependencies() expected error containing %q but got no error", tt.errorMessage) - } else if tt.errorMessage != "" && !strings.Contains(err.Error(), tt.errorMessage) { - t.Errorf("FindJavaScriptDependencies() error = %q, expected to contain %q", err.Error(), tt.errorMessage) - } - return - } - - // Check that all wanted dependencies are present - for dep := range tt.wantDeps { - if !gotDeps[dep] { - t.Errorf("FindJavaScriptDependencies() missing expected dependency: %q", dep) - } - } - - // Check that no unexpected dependencies are present - for dep := range gotDeps { - if !tt.wantDeps[dep] { - t.Errorf("FindJavaScriptDependencies() unexpected dependency: %q", dep) - } - } - - // Check count - if len(gotDeps) != len(tt.wantDeps) { - t.Errorf("FindJavaScriptDependencies() got %d dependencies, want %d", len(gotDeps), len(tt.wantDeps)) - } - }) - } -} diff --git a/pkg/workflow/env_mirror.go b/pkg/workflow/env_mirror.go deleted file mode 100644 index ae31020082..0000000000 --- a/pkg/workflow/env_mirror.go +++ /dev/null @@ -1,137 +0,0 @@ -// This file provides environment variable mirroring for agent containers. -// -// This file contains logic for mirroring essential GitHub Actions runner environment -// variables into the agent container. The Ubuntu runner image provides many environment -// variables that workflows and actions depend on (e.g., JAVA_HOME, ANDROID_HOME, -// CHROMEWEBDRIVER, CONDA, etc.). This module ensures these are available inside -// the AWF (Agent Workflow Firewall) container. -// -// Environment variables are passed through using AWF's --env flag, which sets -// environment variables only if they exist on the host. This ensures graceful -// handling of missing variables. -// -// Reference: scratchpad/ubuntulatest.md section "Environment Variables" - -package workflow - -import ( - "sort" - - "github.com/github/gh-aw/pkg/logger" -) - -var envMirrorLog = logger.New("workflow:env_mirror") - -// MirroredEnvVars is the list of environment variables from the GitHub Actions -// Ubuntu runner that should be mirrored into the agent container. -// -// These are grouped by category: -// - Java JDK homes (for multiple Java versions) -// - Android SDK paths -// - Browser WebDriver paths -// - Package manager paths -// - Go workspace path -// -// Variables are only passed through if they exist on the host runner. -// Reference: scratchpad/ubuntulatest.md -var MirroredEnvVars = []string{ - // Java JDK homes (multiple versions available on Ubuntu runner) - "JAVA_HOME", - "JAVA_HOME_8_X64", - "JAVA_HOME_11_X64", - "JAVA_HOME_17_X64", - "JAVA_HOME_21_X64", - "JAVA_HOME_25_X64", - - // Android SDK paths - "ANDROID_HOME", - "ANDROID_SDK_ROOT", - "ANDROID_NDK", - "ANDROID_NDK_HOME", - "ANDROID_NDK_ROOT", - "ANDROID_NDK_LATEST_HOME", - - // Browser WebDriver paths (for Selenium/browser automation) - "CHROMEWEBDRIVER", - "EDGEWEBDRIVER", - "GECKOWEBDRIVER", - "SELENIUM_JAR_PATH", - - // Package manager paths - "CONDA", - "VCPKG_INSTALLATION_ROOT", - - // Go workspace path - "GOPATH", - - // .NET environment - "DOTNET_ROOT", - - // Python environment - "PIPX_HOME", - "PIPX_BIN_DIR", - - // Ruby environment - "GEM_HOME", - "GEM_PATH", - - // Rust environment - "CARGO_HOME", - "RUSTUP_HOME", - - // Homebrew (Linux) - "HOMEBREW_PREFIX", - "HOMEBREW_CELLAR", - "HOMEBREW_REPOSITORY", - - // Swift - "SWIFT_PATH", - - // Common tool homes - "GOROOT", - "NVM_DIR", - - // Azure environment - "AZURE_EXTENSION_DIR", -} - -// GetMirroredEnvArgs returns the AWF command-line arguments for mirroring -// environment variables from the runner into the agent container. -// -// AWF uses the --env flag to pass environment variables in KEY=VALUE format. -// The output uses shell variable expansion syntax (e.g., JAVA_HOME=${JAVA_HOME}) -// so that the actual value is resolved at runtime from the host environment. -// -// Example output: ["--env", "JAVA_HOME=${JAVA_HOME}", "--env", "ANDROID_HOME=${ANDROID_HOME}", ...] -// -// This function always returns the same list of environment variables to mirror. -// Variables that don't exist on the host will expand to empty strings at runtime. -func GetMirroredEnvArgs() []string { - envMirrorLog.Print("Generating mirrored environment variable arguments") - - // Sort for consistent output - sortedVars := make([]string, len(MirroredEnvVars)) - copy(sortedVars, MirroredEnvVars) - sort.Strings(sortedVars) - - var args []string - for _, envVar := range sortedVars { - // Use shell variable expansion syntax so the value is resolved at runtime - // Pre-wrap in double quotes so shellEscapeArg preserves them (allowing shell expansion) - args = append(args, "--env", "\""+envVar+"=${"+envVar+"}\"") - } - - envMirrorLog.Printf("Generated %d environment variable mirror arguments", len(sortedVars)) - return args -} - -// GetMirroredEnvVarsList returns the list of environment variables that -// are mirrored from the runner to the agent container. -// -// This is useful for documentation and debugging purposes. -func GetMirroredEnvVarsList() []string { - result := make([]string, len(MirroredEnvVars)) - copy(result, MirroredEnvVars) - sort.Strings(result) - return result -} diff --git a/pkg/workflow/env_mirror_test.go b/pkg/workflow/env_mirror_test.go deleted file mode 100644 index e3a7a57286..0000000000 --- a/pkg/workflow/env_mirror_test.go +++ /dev/null @@ -1,221 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestGetMirroredEnvArgs(t *testing.T) { - args := GetMirroredEnvArgs() - - // Should return pairs of --env and KEY=${KEY} format - require.NotEmpty(t, args, "Should return environment variable arguments") - require.Equal(t, 0, len(args)%2, "Arguments should come in pairs (--env, KEY=${KEY})") - - // Verify the structure of arguments - for i := 0; i < len(args); i += 2 { - assert.Equal(t, "--env", args[i], "Even indices should be --env flag") - assert.NotEmpty(t, args[i+1], "Odd indices should be environment variable assignments") - // Verify the "KEY=${KEY}" format with outer double quotes - assert.True(t, len(args[i+1]) >= 2 && args[i+1][0] == '"' && args[i+1][len(args[i+1])-1] == '"', - "Should be wrapped in double quotes for shell expansion, got: %s", args[i+1]) - assert.Contains(t, args[i+1], "=", "Should contain = for KEY=VALUE format") - assert.Contains(t, args[i+1], "=${", "Should contain =${ for shell expansion") - assert.Contains(t, args[i+1], "}", "Should contain } for shell expansion") - } -} - -func TestGetMirroredEnvArgs_ContainsExpectedVariables(t *testing.T) { - args := GetMirroredEnvArgs() - - // Convert to a set for easy lookup (extract variable name from "KEY=${KEY}" format) - varSet := make(map[string]bool) - for i := 1; i < len(args); i += 2 { - // Extract the variable name from "KEY=${KEY}" format - envAssignment := args[i] - // Skip the leading quote and get the part before the '=' - if len(envAssignment) > 1 && envAssignment[0] == '"' { - for j := 1; j < len(envAssignment); j++ { - if envAssignment[j] == '=' { - varSet[envAssignment[1:j]] = true - break - } - } - } - } - - // Test that critical environment variables are included - expectedVars := []string{ - "JAVA_HOME", - "JAVA_HOME_17_X64", - "ANDROID_HOME", - "CHROMEWEBDRIVER", - "GECKOWEBDRIVER", - "CONDA", - "VCPKG_INSTALLATION_ROOT", - "GOPATH", - } - - for _, expected := range expectedVars { - assert.True(t, varSet[expected], "Should include %s in mirrored environment variables", expected) - } -} - -func TestGetMirroredEnvArgs_IsSorted(t *testing.T) { - args := GetMirroredEnvArgs() - - // Extract just the variable names from "KEY=${KEY}" format (odd indices) - var varNames []string - for i := 1; i < len(args); i += 2 { - envAssignment := args[i] - // Skip the leading quote and get the part before the '=' - if len(envAssignment) > 1 && envAssignment[0] == '"' { - for j := 1; j < len(envAssignment); j++ { - if envAssignment[j] == '=' { - varNames = append(varNames, envAssignment[1:j]) - break - } - } - } - } - - // Verify they are sorted - for i := 1; i < len(varNames); i++ { - assert.LessOrEqual(t, varNames[i-1], varNames[i], - "Environment variables should be sorted, but %s comes after %s", - varNames[i-1], varNames[i]) - } -} - -func TestGetMirroredEnvVarsList(t *testing.T) { - vars := GetMirroredEnvVarsList() - - require.NotEmpty(t, vars, "Should return a list of environment variables") - - // Verify the list contains expected variables - varSet := make(map[string]bool) - for _, v := range vars { - varSet[v] = true - } - - assert.True(t, varSet["JAVA_HOME"], "Should include JAVA_HOME") - assert.True(t, varSet["ANDROID_HOME"], "Should include ANDROID_HOME") - assert.True(t, varSet["CHROMEWEBDRIVER"], "Should include CHROMEWEBDRIVER") -} - -func TestGetMirroredEnvVarsList_IsSorted(t *testing.T) { - vars := GetMirroredEnvVarsList() - - // Verify they are sorted - for i := 1; i < len(vars); i++ { - assert.LessOrEqual(t, vars[i-1], vars[i], - "Environment variables should be sorted, but %s comes after %s", - vars[i-1], vars[i]) - } -} - -func TestMirroredEnvVars_NoDuplicates(t *testing.T) { - vars := GetMirroredEnvVarsList() - - seen := make(map[string]bool) - for _, v := range vars { - assert.False(t, seen[v], "Duplicate environment variable found: %s", v) - seen[v] = true - } -} - -func TestMirroredEnvVars_IncludesJavaVersions(t *testing.T) { - vars := GetMirroredEnvVarsList() - - varSet := make(map[string]bool) - for _, v := range vars { - varSet[v] = true - } - - // Java versions commonly available on GitHub Actions runners - javaVersions := []string{ - "JAVA_HOME_8_X64", - "JAVA_HOME_11_X64", - "JAVA_HOME_17_X64", - "JAVA_HOME_21_X64", - } - - for _, javaVar := range javaVersions { - assert.True(t, varSet[javaVar], "Should include %s for Java version support", javaVar) - } -} - -func TestMirroredEnvVars_IncludesAndroidVars(t *testing.T) { - vars := GetMirroredEnvVarsList() - - varSet := make(map[string]bool) - for _, v := range vars { - varSet[v] = true - } - - // Android environment variables from the runner - androidVars := []string{ - "ANDROID_HOME", - "ANDROID_SDK_ROOT", - "ANDROID_NDK", - "ANDROID_NDK_HOME", - } - - for _, androidVar := range androidVars { - assert.True(t, varSet[androidVar], "Should include %s for Android development support", androidVar) - } -} - -func TestMirroredEnvVars_IncludesBrowserVars(t *testing.T) { - vars := GetMirroredEnvVarsList() - - varSet := make(map[string]bool) - for _, v := range vars { - varSet[v] = true - } - - // Browser/WebDriver environment variables from the runner - browserVars := []string{ - "CHROMEWEBDRIVER", - "EDGEWEBDRIVER", - "GECKOWEBDRIVER", - "SELENIUM_JAR_PATH", - } - - for _, browserVar := range browserVars { - assert.True(t, varSet[browserVar], "Should include %s for browser automation support", browserVar) - } -} - -func TestGetMirroredEnvArgs_CorrectFormat(t *testing.T) { - args := GetMirroredEnvArgs() - - // Find ANDROID_HOME in the args and verify its format - found := false - for i := 0; i < len(args); i += 2 { - if args[i] == "--env" && i+1 < len(args) { - // Check for the specific format: "KEY=${KEY}" with outer double quotes - if args[i+1] == "\"ANDROID_HOME=${ANDROID_HOME}\"" { - found = true - break - } - } - } - assert.True(t, found, "Should include \"ANDROID_HOME=${ANDROID_HOME}\" in correct format with outer double quotes") - - // Also verify JAVA_HOME format - foundJava := false - for i := 0; i < len(args); i += 2 { - if args[i] == "--env" && i+1 < len(args) { - if args[i+1] == "\"JAVA_HOME=${JAVA_HOME}\"" { - foundJava = true - break - } - } - } - assert.True(t, foundJava, "Should include \"JAVA_HOME=${JAVA_HOME}\" in correct format with outer double quotes") -} diff --git a/pkg/workflow/git_patch_test.go b/pkg/workflow/git_patch_test.go index c7e0cc4191..eb69232313 100644 --- a/pkg/workflow/git_patch_test.go +++ b/pkg/workflow/git_patch_test.go @@ -89,7 +89,7 @@ Please do the following tasks: } // Verify the upload step uses actions/upload-artifact - if !strings.Contains(lockContent, "uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f") { + if !strings.Contains(lockContent, "uses: actions/upload-artifact@") { // SHA varies t.Error("Expected upload-artifact action to be used for unified artifact upload step") } diff --git a/pkg/workflow/inline_imports_test.go b/pkg/workflow/inline_imports_test.go index 70e7ac7fd9..96364357ca 100644 --- a/pkg/workflow/inline_imports_test.go +++ b/pkg/workflow/inline_imports_test.go @@ -12,219 +12,6 @@ import ( "github.com/stretchr/testify/require" ) -// TestInlinedImports_FrontmatterField verifies that inlined-imports: true activates -// compile-time inlining of imports (without inputs) and the main workflow markdown. -func TestInlinedImports_FrontmatterField(t *testing.T) { - tmpDir := t.TempDir() - - // Create a shared import file with markdown content - sharedDir := filepath.Join(tmpDir, ".github", "workflows", "shared") - require.NoError(t, os.MkdirAll(sharedDir, 0o755)) - sharedFile := filepath.Join(sharedDir, "common.md") - sharedContent := `--- -tools: - bash: true ---- - -# Shared Instructions - -Always follow best practices. -` - require.NoError(t, os.WriteFile(sharedFile, []byte(sharedContent), 0o644)) - - // Create the main workflow file with inlined-imports: true - workflowDir := filepath.Join(tmpDir, ".github", "workflows") - workflowFile := filepath.Join(workflowDir, "test-workflow.md") - workflowContent := `--- -name: inlined-imports-test -on: - workflow_dispatch: -permissions: - contents: read -engine: copilot -inlined-imports: true -imports: - - shared/common.md ---- - -# Main Workflow - -This is the main workflow content. -` - require.NoError(t, os.WriteFile(workflowFile, []byte(workflowContent), 0o644)) - - compiler := NewCompiler( - WithNoEmit(true), - WithSkipValidation(true), - ) - - wd, err := compiler.ParseWorkflowFile(workflowFile) - require.NoError(t, err, "should parse workflow file") - require.NotNil(t, wd) - - // WorkflowData.InlinedImports should be true (parsed into the workspace data) - assert.True(t, wd.InlinedImports, "WorkflowData.InlinedImports should be true") - - // ParsedFrontmatter should also have InlinedImports = true - require.NotNil(t, wd.ParsedFrontmatter, "ParsedFrontmatter should not be nil") - assert.True(t, wd.ParsedFrontmatter.InlinedImports, "InlinedImports should be true") - - // Compile and get YAML - yamlContent, err := compiler.CompileToYAML(wd, workflowFile) - require.NoError(t, err, "should compile workflow") - require.NotEmpty(t, yamlContent, "YAML should not be empty") - - // With inlined-imports: true, the import should be inlined (no runtime-import macros) - assert.NotContains(t, yamlContent, "{{#runtime-import", "should not generate any runtime-import macros") - - // The shared content should be inlined in the prompt - assert.Contains(t, yamlContent, "Shared Instructions", "shared import content should be inlined") - assert.Contains(t, yamlContent, "Always follow best practices", "shared import content should be inlined") - - // The main workflow content should also be inlined (no runtime-import for main file) - assert.Contains(t, yamlContent, "Main Workflow", "main workflow content should be inlined") - assert.Contains(t, yamlContent, "This is the main workflow content", "main workflow content should be inlined") -} - -// TestInlinedImports_Disabled verifies that without inlined-imports, runtime-import macros are used. -func TestInlinedImports_Disabled(t *testing.T) { - tmpDir := t.TempDir() - - sharedDir := filepath.Join(tmpDir, ".github", "workflows", "shared") - require.NoError(t, os.MkdirAll(sharedDir, 0o755)) - sharedFile := filepath.Join(sharedDir, "common.md") - sharedContent := `--- -tools: - bash: true ---- - -# Shared Instructions - -Always follow best practices. -` - require.NoError(t, os.WriteFile(sharedFile, []byte(sharedContent), 0o644)) - - workflowDir := filepath.Join(tmpDir, ".github", "workflows") - workflowFile := filepath.Join(workflowDir, "test-workflow.md") - workflowContent := `--- -name: no-inlined-imports-test -on: - workflow_dispatch: -permissions: - contents: read -engine: copilot -imports: - - shared/common.md ---- - -# Main Workflow - -This is the main workflow content. -` - require.NoError(t, os.WriteFile(workflowFile, []byte(workflowContent), 0o644)) - - compiler := NewCompiler( - WithNoEmit(true), - WithSkipValidation(true), - ) - - wd, err := compiler.ParseWorkflowFile(workflowFile) - require.NoError(t, err, "should parse workflow file") - require.NotNil(t, wd) - - require.NotNil(t, wd.ParsedFrontmatter, "ParsedFrontmatter should be populated") - assert.False(t, wd.ParsedFrontmatter.InlinedImports, "InlinedImports should be false by default") - - yamlContent, err := compiler.CompileToYAML(wd, workflowFile) - require.NoError(t, err, "should compile workflow") - - // Without inlined-imports, the import should use runtime-import macro (with full path from workspace root) - assert.Contains(t, yamlContent, "{{#runtime-import .github/workflows/shared/common.md}}", "should generate runtime-import macro for import") - - // The main workflow markdown should also use a runtime-import macro - assert.Contains(t, yamlContent, "{{#runtime-import .github/workflows/test-workflow.md}}", "should generate runtime-import macro for main workflow") -} - -// TestInlinedImports_HashChangesWithBody verifies that the frontmatter hash includes -// the entire markdown body when inlined-imports: true. -func TestInlinedImports_HashChangesWithBody(t *testing.T) { - tmpDir := t.TempDir() - - content1 := `--- -name: test -on: - workflow_dispatch: -inlined-imports: true -engine: copilot ---- - -# Original body -` - content2 := `--- -name: test -on: - workflow_dispatch: -inlined-imports: true -engine: copilot ---- - -# Modified body - different -` - // Normal mode (no inlined-imports) - body changes should not affect hash - contentNormal1 := `--- -name: test -on: - workflow_dispatch: -engine: copilot ---- - -# Body variant A -` - contentNormal2 := `--- -name: test -on: - workflow_dispatch: -engine: copilot ---- - -# Body variant B - same hash expected -` - - file1 := filepath.Join(tmpDir, "test1.md") - file2 := filepath.Join(tmpDir, "test2.md") - fileN1 := filepath.Join(tmpDir, "normal1.md") - fileN2 := filepath.Join(tmpDir, "normal2.md") - require.NoError(t, os.WriteFile(file1, []byte(content1), 0o644)) - require.NoError(t, os.WriteFile(file2, []byte(content2), 0o644)) - require.NoError(t, os.WriteFile(fileN1, []byte(contentNormal1), 0o644)) - require.NoError(t, os.WriteFile(fileN2, []byte(contentNormal2), 0o644)) - - cache := parser.NewImportCache(tmpDir) - - hash1, err := parser.ComputeFrontmatterHashFromFile(file1, cache) - require.NoError(t, err) - hash2, err := parser.ComputeFrontmatterHashFromFile(file2, cache) - require.NoError(t, err) - hashN1, err := parser.ComputeFrontmatterHashFromFile(fileN1, cache) - require.NoError(t, err) - hashN2, err := parser.ComputeFrontmatterHashFromFile(fileN2, cache) - require.NoError(t, err) - - // With inlined-imports: true, different body content should produce different hashes - assert.NotEqual(t, hash1, hash2, - "with inlined-imports: true, different body content should produce different hashes") - - // Without inlined-imports, body-only changes produce the same hash - // (only env./vars. expressions from body are included) - assert.Equal(t, hashN1, hashN2, - "without inlined-imports, body-only changes should not affect hash") - - // inlined-imports mode should also produce a different hash than normal mode - // (frontmatter text differs, so hash differs regardless of body treatment) - assert.NotEqual(t, hash1, hashN1, - "inlined-imports and normal mode should produce different hashes (different frontmatter)") -} - // TestInlinedImports_FrontmatterHashInline_SameBodySameHash verifies determinism. func TestInlinedImports_FrontmatterHashInline_SameBodySameHash(t *testing.T) { tmpDir := t.TempDir() @@ -252,47 +39,6 @@ engine: copilot assert.Equal(t, hash1, hash2, "same content should produce the same hash") } -// TestInlinedImports_InlinePromptActivated verifies that inlined-imports also activates inline prompt mode. -func TestInlinedImports_InlinePromptActivated(t *testing.T) { - tmpDir := t.TempDir() - - workflowDir := filepath.Join(tmpDir, ".github", "workflows") - require.NoError(t, os.MkdirAll(workflowDir, 0o755)) - workflowFile := filepath.Join(workflowDir, "inline-test.md") - workflowContent := `--- -name: inline-test -on: - workflow_dispatch: -permissions: - contents: read -engine: copilot -inlined-imports: true ---- - -# My Workflow - -Do something useful. -` - require.NoError(t, os.WriteFile(workflowFile, []byte(workflowContent), 0o644)) - - compiler := NewCompiler( - WithNoEmit(true), - WithSkipValidation(true), - ) - - wd, err := compiler.ParseWorkflowFile(workflowFile) - require.NoError(t, err) - - yamlContent, err := compiler.CompileToYAML(wd, workflowFile) - require.NoError(t, err) - - // When inlined-imports is true, the main markdown body is also inlined (no runtime-import for main file) - assert.NotContains(t, yamlContent, "{{#runtime-import", "should not generate any runtime-import macros") - // Main workflow content should be inlined - assert.Contains(t, yamlContent, "My Workflow", "main workflow content should be inlined") - assert.Contains(t, yamlContent, "Do something useful", "main workflow body should be inlined") -} - // TestInlinedImports_AgentFileError verifies that when inlined-imports: true and a custom agent // file is imported, ParseWorkflowFile returns a compilation error. // Agent files require runtime access and will not be resolved without sources. diff --git a/pkg/workflow/js.go b/pkg/workflow/js.go index 5d14936e91..737f4c70d5 100644 --- a/pkg/workflow/js.go +++ b/pkg/workflow/js.go @@ -97,10 +97,6 @@ func GetLogParserBootstrap() string { return "" } -func GetSafeOutputsMCPServerScript() string { - return "" -} - func GetSafeOutputsToolsJSON() string { return safeOutputsToolsJSONContent } @@ -121,10 +117,6 @@ func GetMCPLoggerScript() string { return "" } -func GetSafeInputsMCPServerScript() string { - return "" -} - func GetSafeInputsMCPServerHTTPScript() string { return "" } @@ -133,14 +125,6 @@ func GetSafeInputsConfigLoaderScript() string { return "" } -func GetSafeInputsToolFactoryScript() string { - return "" -} - -func GetSafeInputsBootstrapScript() string { - return "" -} - func GetSafeInputsValidationScript() string { return "" } @@ -153,26 +137,6 @@ func GetMCPHandlerPythonScript() string { return "" } -func GetSafeOutputsConfigScript() string { - return "" -} - -func GetSafeOutputsAppendScript() string { - return "" -} - -func GetSafeOutputsHandlersScript() string { - return "" -} - -func GetSafeOutputsToolsLoaderScript() string { - return "" -} - -func GetSafeOutputsBootstrapScript() string { - return "" -} - // Helper functions for formatting JavaScript in YAML func removeJavaScriptComments(code string) string { @@ -543,64 +507,4 @@ func WriteJavaScriptToYAML(yaml *strings.Builder, script string) { } } -// WriteJavaScriptToYAMLPreservingComments writes a JavaScript script with proper indentation to a strings.Builder -// while preserving JSDoc and inline comments, but removing TypeScript-specific comments. -// Used for security-sensitive scripts like redact_secrets. -func WriteJavaScriptToYAMLPreservingComments(yaml *strings.Builder, script string) { - // Validate that script is not empty - this helps catch errors where getter functions - // return empty strings after embedded scripts were removed - if strings.TrimSpace(script) == "" { - jsLog.Print("WARNING: Attempted to write empty JavaScript script to YAML (preserving comments)") - return - } - - scriptLines := strings.Split(script, "\n") - previousLineWasEmpty := false - hasWrittenContent := false // Track if we've written any content yet - - for i, line := range scriptLines { - trimmed := strings.TrimSpace(line) - - // Skip TypeScript-specific comments - if strings.HasPrefix(trimmed, "// @ts-") || strings.HasPrefix(trimmed, "/// = 0x2194 && r <= 0x27BF) || // arrows, misc symbols, dingbats + (r >= 0xFE00 && r <= 0xFE0F) || // variation selectors + r >= 0x1F000 // supplementary multilingual plane (most modern emoji) +} + func scanUnicodeAbuse(content string) []SecurityFinding { var findings []SecurityFinding lines := strings.Split(content, "\n") @@ -222,6 +236,7 @@ func scanUnicodeAbuse(content string) []SecurityFinding { lineNo := lineNum + 1 // Check for zero-width and invisible characters + var prevRune rune for i := 0; i < len(line); { r, size := utf8.DecodeRuneInString(line[i:]) if r == utf8.RuneError && size <= 1 { @@ -230,6 +245,22 @@ func scanUnicodeAbuse(content string) []SecurityFinding { } if name, ok := dangerousUnicodeRunes[r]; ok { + // U+200D (ZWJ) is a standard component of emoji sequences such as + // 🧑‍🤝‍🧑 (people holding hands) or 👨‍👩‍👧 (family). Only flag it when + // it is NOT flanked by emoji-range codepoints on both sides. + // prevRune is 0 (null) at the start of each line, so a ZWJ at + // the beginning of a line is always flagged (isEmojiLike(0)==false). + if r == '\u200D' { + var nextRune rune + if i+size < len(line) { + nextRune, _ = utf8.DecodeRuneInString(line[i+size:]) + } + if isEmojiLike(prevRune) && isEmojiLike(nextRune) { + prevRune = r + i += size + continue + } + } findings = append(findings, SecurityFinding{ Category: CategoryUnicodeAbuse, Description: "contains invisible character: " + name, @@ -260,6 +291,7 @@ func scanUnicodeAbuse(content string) []SecurityFinding { } } + prevRune = r i += size } } diff --git a/pkg/workflow/markdown_security_scanner_test.go b/pkg/workflow/markdown_security_scanner_test.go index a54b9b5ac0..69fdade36e 100644 --- a/pkg/workflow/markdown_security_scanner_test.go +++ b/pkg/workflow/markdown_security_scanner_test.go @@ -115,7 +115,70 @@ func TestScanMarkdownSecurity_UnicodeAbuse_AllowsNormalWhitespace(t *testing.T) assert.Empty(t, findings, "should not flag normal whitespace characters") } -// --- Hidden Content Tests --- +func TestScanMarkdownSecurity_UnicodeAbuse_AllowsEmojiZWJ(t *testing.T) { + // ZWJ (U+200D) is legitimate between emoji codepoints and must not be flagged. + tests := []struct { + name string + content string + }{ + { + name: "people holding hands (issue report example)", + content: "Status: \U0001F9D1\u200D\U0001F91D\u200D\U0001F9D1 Team Triage", + }, + { + name: "family emoji", + content: "Group: \U0001F468\u200D\U0001F469\u200D\U0001F467", + }, + { + name: "woman technologist", + content: "Role: \U0001F469\u200D\U0001F4BB", + }, + { + name: "rainbow flag (variation selector before ZWJ)", + content: "Flag: \U0001F3F3\uFE0F\u200D\U0001F308", + }, + { + name: "couple with heart (symbol-range emoji before ZWJ)", + content: "Love: \U0001F468\u200D\u2764\uFE0F\u200D\U0001F469", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + findings := ScanMarkdownSecurity(tt.content) + assert.Empty(t, findings, "should not flag emoji ZWJ sequence in %s", tt.name) + }) + } +} + +func TestScanMarkdownSecurity_UnicodeAbuse_FlagsNonEmojiZWJ(t *testing.T) { + // ZWJ between non-emoji (ASCII) characters is still suspicious and must be flagged. + tests := []struct { + name string + content string + }{ + { + name: "ZWJ between ASCII letters", + content: "Hello\u200Dworld", + }, + { + name: "ZWJ at start of text", + content: "\u200DHello", + }, + { + name: "ZWJ at end of line", + content: "Hello\u200D", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + findings := ScanMarkdownSecurity(tt.content) + require.NotEmpty(t, findings, "should flag ZWJ outside emoji sequence in %s", tt.name) + assert.Equal(t, CategoryUnicodeAbuse, findings[0].Category, "category should be unicode-abuse") + }) + } +} func TestScanMarkdownSecurity_HiddenContent_SuspiciousHTMLComments(t *testing.T) { tests := []struct { diff --git a/pkg/workflow/markdown_unfencing.go b/pkg/workflow/markdown_unfencing.go deleted file mode 100644 index 12e1c2746f..0000000000 --- a/pkg/workflow/markdown_unfencing.go +++ /dev/null @@ -1,141 +0,0 @@ -package workflow - -import ( - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var markdownUnfencingLog = logger.New("workflow:markdown_unfencing") - -// UnfenceMarkdown removes an outer code fence from markdown content if the entire -// content is wrapped in a markdown/md code fence. This handles cases where agents -// accidentally wrap the entire markdown body in a code fence. -// -// The function detects: -// - Content starting with ```markdown, ```md, ~~~markdown, or ~~~md (case insensitive) -// - Content ending with ``` or ~~~ -// - The closing fence must match the opening fence type (backticks or tildes) -// -// Returns the unfenced content if a wrapping fence is detected, otherwise returns -// the original content unchanged. -func UnfenceMarkdown(content string) string { - if content == "" { - return content - } - - markdownUnfencingLog.Printf("Checking content for outer markdown fence (%d bytes)", len(content)) - - // Trim leading/trailing whitespace for analysis - trimmed := strings.TrimSpace(content) - - // Check for opening fence: ```markdown, ```md, ~~~markdown, or ~~~md - // Must be at the start of the content (after trimming) - lines := strings.Split(trimmed, "\n") - if len(lines) < 2 { - // Need at least opening fence and closing fence - return content - } - - firstLine := strings.TrimSpace(lines[0]) - lastLine := strings.TrimSpace(lines[len(lines)-1]) - - // Check if first line is a markdown code fence - var fenceChar string - var fenceLength int - var isMarkdownFence bool - - // Check for backtick fences (3 or more backticks) - if strings.HasPrefix(firstLine, "```") { - fenceChar = "`" - // Count the number of consecutive backticks - fenceLength = 0 - for _, ch := range firstLine { - if ch == '`' { - fenceLength++ - } else { - break - } - } - remainder := strings.TrimSpace(firstLine[fenceLength:]) - // Check if it's markdown or md language tag or empty - if remainder == "" || strings.EqualFold(remainder, "markdown") || strings.EqualFold(remainder, "md") { - isMarkdownFence = true - } - } else if strings.HasPrefix(firstLine, "~~~") { - // Check for tilde fences (3 or more tildes) - fenceChar = "~" - // Count the number of consecutive tildes - fenceLength = 0 - for _, ch := range firstLine { - if ch == '~' { - fenceLength++ - } else { - break - } - } - remainder := strings.TrimSpace(firstLine[fenceLength:]) - // Check if it's markdown or md language tag or empty - if remainder == "" || strings.EqualFold(remainder, "markdown") || strings.EqualFold(remainder, "md") { - isMarkdownFence = true - } - } - - if !isMarkdownFence { - // Not a markdown fence, return original content - markdownUnfencingLog.Print("No outer markdown fence detected, returning content unchanged") - return content - } - - markdownUnfencingLog.Printf("Detected opening markdown fence: char=%q, length=%d", fenceChar, fenceLength) - - // Check if last line is a matching closing fence - // Must have at least as many fence characters as the opening fence - var isClosingFence bool - if fenceChar == "`" { - // Count backticks in last line - closingFenceLength := 0 - for _, ch := range lastLine { - if ch == '`' { - closingFenceLength++ - } else { - break - } - } - // Must have at least as many backticks as opening fence - if closingFenceLength >= fenceLength && strings.TrimSpace(lastLine[closingFenceLength:]) == "" { - isClosingFence = true - } - } else if fenceChar == "~" { - // Count tildes in last line - closingFenceLength := 0 - for _, ch := range lastLine { - if ch == '~' { - closingFenceLength++ - } else { - break - } - } - // Must have at least as many tildes as opening fence - if closingFenceLength >= fenceLength && strings.TrimSpace(lastLine[closingFenceLength:]) == "" { - isClosingFence = true - } - } - - if !isClosingFence { - // No matching closing fence, return original content - markdownUnfencingLog.Print("No matching closing fence found, returning content unchanged") - return content - } - - // Extract the content between the fences - // Remove first and last lines - innerLines := lines[1 : len(lines)-1] - innerContent := strings.Join(innerLines, "\n") - - markdownUnfencingLog.Printf("Unfenced markdown content: removed outer %s fence", fenceChar) - - // Return the inner content with original leading/trailing whitespace style preserved - // We preserve the trimming behavior that was applied - return strings.TrimSpace(innerContent) -} diff --git a/pkg/workflow/markdown_unfencing_test.go b/pkg/workflow/markdown_unfencing_test.go deleted file mode 100644 index cd2e182a64..0000000000 --- a/pkg/workflow/markdown_unfencing_test.go +++ /dev/null @@ -1,277 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestUnfenceMarkdown(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "basic markdown fence with backticks", - input: "```markdown\nThis is the content\n```", - expected: "This is the content", - }, - { - name: "markdown fence with md language tag", - input: "```md\nThis is the content\n```", - expected: "This is the content", - }, - { - name: "markdown fence with tildes", - input: "~~~markdown\nThis is the content\n~~~", - expected: "This is the content", - }, - { - name: "markdown fence with md and tildes", - input: "~~~md\nThis is the content\n~~~", - expected: "This is the content", - }, - { - name: "markdown fence with no language tag", - input: "```\nThis is the content\n```", - expected: "This is the content", - }, - { - name: "markdown fence with multiline content", - input: "```markdown\nLine 1\nLine 2\nLine 3\n```", - expected: "Line 1\nLine 2\nLine 3", - }, - { - name: "markdown fence with nested code blocks", - input: "```markdown\nHere is some code:\n```javascript\nconsole.log(\"hello\");\n```\n```", - expected: "Here is some code:\n```javascript\nconsole.log(\"hello\");\n```", - }, - { - name: "markdown fence with leading and trailing whitespace", - input: " ```markdown\nContent here\n``` ", - expected: "Content here", - }, - { - name: "markdown fence case insensitive", - input: "```MARKDOWN\nContent\n```", - expected: "Content", - }, - { - name: "markdown fence with MD uppercase", - input: "```MD\nContent\n```", - expected: "Content", - }, - { - name: "not a markdown fence - different language", - input: "```javascript\nconsole.log(\"test\");\n```", - expected: "```javascript\nconsole.log(\"test\");\n```", - }, - { - name: "not fenced - no closing fence", - input: "```markdown\nThis has no closing fence", - expected: "```markdown\nThis has no closing fence", - }, - { - name: "not fenced - mismatched fence types", - input: "```markdown\nContent\n~~~", - expected: "```markdown\nContent\n~~~", - }, - { - name: "not fenced - content before opening fence", - input: "Some text before\n```markdown\nContent\n```", - expected: "Some text before\n```markdown\nContent\n```", - }, - { - name: "not fenced - content after closing fence", - input: "```markdown\nContent\n```\nSome text after", - expected: "```markdown\nContent\n```\nSome text after", - }, - { - name: "empty string", - input: "", - expected: "", - }, - { - name: "only whitespace", - input: " \n\t\t\t\n\t\t\t", - expected: " \n\t\t\t\n\t\t\t", - }, - { - name: "single line", - input: "```markdown", - expected: "```markdown", - }, - { - name: "markdown fence with empty content", - input: "```markdown\n```", - expected: "", - }, - { - name: "markdown fence with only whitespace content", - input: "```markdown\n \n```", - expected: "", - }, - { - name: "markdown fence with complex nested structures", - input: "```markdown\n# Heading\n\nSome text with **bold** and *italic*.\n\n```python\ndef hello():\n print(\"world\")\n```\n\nMore text here.\n```", - expected: "# Heading\n\nSome text with **bold** and *italic*.\n\n```python\ndef hello():\n print(\"world\")\n```\n\nMore text here.", - }, - { - name: "markdown fence with special characters", - input: "```markdown\nContent with ${{ github.actor }} and @mentions\n```", - expected: "Content with ${{ github.actor }} and @mentions", - }, - { - name: "longer backtick fence", - input: "````markdown\nContent\n````", - expected: "Content", - }, - { - name: "longer tilde fence", - input: "~~~~markdown\nContent\n~~~~", - expected: "Content", - }, - { - name: "markdown fence with extra spaces in language tag", - input: "``` markdown \nContent\n```", - expected: "Content", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := UnfenceMarkdown(tt.input) - assert.Equal(t, tt.expected, result, "Unfenced content should match expected") - }) - } -} - -func TestUnfenceMarkdownPreservesNonWrappedContent(t *testing.T) { - // Test that normal markdown content is not modified - tests := []struct { - name string - input string - }{ - { - name: "normal markdown with headers", - input: "# Title\n\nSome content here.\n\n## Subtitle\n\nMore content.", - }, - { - name: "markdown with multiple code blocks", - input: "Some text\n\n```javascript\ncode1();\n```\n\nMore text\n\n```python\ncode2()\n```", - }, - { - name: "markdown with inline code", - input: "Use `code` for inline code snippets.", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := UnfenceMarkdown(tt.input) - assert.Equal(t, tt.input, result, "Non-wrapped content should remain unchanged") - }) - } -} - -func TestUnfenceMarkdownFenceLengthMatching(t *testing.T) { - // Test that fence lengths must match (closing must be >= opening) - tests := []struct { - name string - input string - expected string - }{ - { - name: "4 backticks opening, 4 backticks closing", - input: "````markdown\nContent\n````", - expected: "Content", - }, - { - name: "4 backticks opening, 5 backticks closing", - input: "````markdown\nContent\n`````", - expected: "Content", - }, - { - name: "5 backticks opening, 5 backticks closing", - input: "`````markdown\nContent\n`````", - expected: "Content", - }, - { - name: "3 backticks opening, 4 backticks closing", - input: "```markdown\nContent\n````", - expected: "Content", - }, - { - name: "4 backticks opening, 3 backticks closing - should not unfence", - input: "````markdown\nContent\n```", - expected: "````markdown\nContent\n```", - }, - { - name: "10 backticks opening, 10 backticks closing", - input: "``````````markdown\nContent\n``````````", - expected: "Content", - }, - { - name: "4 tildes opening, 4 tildes closing", - input: "~~~~markdown\nContent\n~~~~", - expected: "Content", - }, - { - name: "5 tildes opening, 6 tildes closing", - input: "~~~~~markdown\nContent\n~~~~~~", - expected: "Content", - }, - { - name: "4 tildes opening, 3 tildes closing - should not unfence", - input: "~~~~markdown\nContent\n~~~", - expected: "~~~~markdown\nContent\n~~~", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := UnfenceMarkdown(tt.input) - assert.Equal(t, tt.expected, result, "Fence length matching should work correctly") - }) - } -} - -func TestUnfenceMarkdownRealWorldExamples(t *testing.T) { - // Test real-world examples that might come from agents - tests := []struct { - name string - input string - expected string - }{ - { - name: "agent response with issue update", - input: "```markdown\n# Issue Analysis\n\nI've reviewed the code and found the following:\n\n- Bug in line 42\n- Missing validation\n```", - expected: "# Issue Analysis\n\nI've reviewed the code and found the following:\n\n- Bug in line 42\n- Missing validation", - }, - { - name: "agent response with code examples", - input: "```markdown\nHere's the fix:\n\n```go\nfunc Fix() {\n // Fixed code\n}\n```\n\nThis should resolve the issue.\n```", - expected: "Here's the fix:\n\n```go\nfunc Fix() {\n // Fixed code\n}\n```\n\nThis should resolve the issue.", - }, - { - name: "agent response with multiple sections", - input: "```md\n## Summary\n\nCompleted the task.\n\n## Changes\n\n- Updated file A\n- Fixed bug in B\n\n## Testing\n\nAll tests pass.\n```", - expected: "## Summary\n\nCompleted the task.\n\n## Changes\n\n- Updated file A\n- Fixed bug in B\n\n## Testing\n\nAll tests pass.", - }, - { - name: "plain markdown without fence - no change", - input: "## Summary\n\nTask completed successfully.", - expected: "## Summary\n\nTask completed successfully.", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := UnfenceMarkdown(tt.input) - assert.Equal(t, tt.expected, result, "Real-world examples should unfence correctly") - }) - } -} diff --git a/pkg/workflow/mcp_gateway_config.go b/pkg/workflow/mcp_gateway_config.go index e1d29fb26f..e09b719c98 100644 --- a/pkg/workflow/mcp_gateway_config.go +++ b/pkg/workflow/mcp_gateway_config.go @@ -119,14 +119,22 @@ func buildMCPGatewayConfig(workflowData *WorkflowData) *MCPGatewayRuntimeConfig // Ensure default configuration is set ensureDefaultMCPGatewayConfig(workflowData) + // Get payload size threshold (use default if not configured) + payloadSizeThreshold := workflowData.SandboxConfig.MCP.PayloadSizeThreshold + if payloadSizeThreshold == 0 { + payloadSizeThreshold = constants.DefaultMCPGatewayPayloadSizeThreshold + } + // Return gateway config with required fields populated // Use ${...} syntax for environment variable references that will be resolved by the gateway at runtime // Per MCP Gateway Specification v1.0.0 section 4.2, variable expressions use "${VARIABLE_NAME}" syntax return &MCPGatewayRuntimeConfig{ - Port: int(DefaultMCPGatewayPort), // Will be formatted as "${MCP_GATEWAY_PORT}" in renderer - Domain: "${MCP_GATEWAY_DOMAIN}", // Gateway variable expression - APIKey: "${MCP_GATEWAY_API_KEY}", // Gateway variable expression - PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", // Gateway variable expression for payload directory + Port: int(DefaultMCPGatewayPort), // Will be formatted as "${MCP_GATEWAY_PORT}" in renderer + Domain: "${MCP_GATEWAY_DOMAIN}", // Gateway variable expression + APIKey: "${MCP_GATEWAY_API_KEY}", // Gateway variable expression + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", // Gateway variable expression for payload directory + PayloadPathPrefix: workflowData.SandboxConfig.MCP.PayloadPathPrefix, // Optional path prefix for agent containers + PayloadSizeThreshold: payloadSizeThreshold, // Size threshold in bytes } } diff --git a/pkg/workflow/mcp_gateway_config_test.go b/pkg/workflow/mcp_gateway_config_test.go index 3259c74c70..55dab161c2 100644 --- a/pkg/workflow/mcp_gateway_config_test.go +++ b/pkg/workflow/mcp_gateway_config_test.go @@ -165,6 +165,38 @@ func TestEnsureDefaultMCPGatewayConfig(t *testing.T) { assert.Equal(t, "/custom/payloads", wd.SandboxConfig.MCP.PayloadDir, "Custom payloadDir should be preserved") }, }, + { + name: "preserves payloadPathPrefix when specified", + workflowData: &WorkflowData{ + SandboxConfig: &SandboxConfig{ + MCP: &MCPGatewayRuntimeConfig{ + Container: "custom-container", + Version: "v1.0.0", + Port: 8080, + PayloadPathPrefix: "/workspace/payloads", + }, + }, + }, + validate: func(t *testing.T, wd *WorkflowData) { + assert.Equal(t, "/workspace/payloads", wd.SandboxConfig.MCP.PayloadPathPrefix, "PayloadPathPrefix should be preserved") + }, + }, + { + name: "preserves payloadSizeThreshold when specified", + workflowData: &WorkflowData{ + SandboxConfig: &SandboxConfig{ + MCP: &MCPGatewayRuntimeConfig{ + Container: "custom-container", + Version: "v1.0.0", + Port: 8080, + PayloadSizeThreshold: 1048576, // 1MB + }, + }, + }, + validate: func(t *testing.T, wd *WorkflowData) { + assert.Equal(t, 1048576, wd.SandboxConfig.MCP.PayloadSizeThreshold, "PayloadSizeThreshold should be preserved") + }, + }, } for _, tt := range tests { @@ -196,20 +228,22 @@ func TestBuildMCPGatewayConfig(t *testing.T) { }, }, expected: &MCPGatewayRuntimeConfig{ - Port: int(DefaultMCPGatewayPort), - Domain: "${MCP_GATEWAY_DOMAIN}", - APIKey: "${MCP_GATEWAY_API_KEY}", - PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + Port: int(DefaultMCPGatewayPort), + Domain: "${MCP_GATEWAY_DOMAIN}", + APIKey: "${MCP_GATEWAY_API_KEY}", + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + PayloadSizeThreshold: constants.DefaultMCPGatewayPayloadSizeThreshold, }, }, { name: "creates default gateway config", workflowData: &WorkflowData{}, expected: &MCPGatewayRuntimeConfig{ - Port: int(DefaultMCPGatewayPort), - Domain: "${MCP_GATEWAY_DOMAIN}", - APIKey: "${MCP_GATEWAY_API_KEY}", - PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + Port: int(DefaultMCPGatewayPort), + Domain: "${MCP_GATEWAY_DOMAIN}", + APIKey: "${MCP_GATEWAY_API_KEY}", + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + PayloadSizeThreshold: constants.DefaultMCPGatewayPayloadSizeThreshold, }, }, { @@ -222,10 +256,63 @@ func TestBuildMCPGatewayConfig(t *testing.T) { }, }, expected: &MCPGatewayRuntimeConfig{ - Port: int(DefaultMCPGatewayPort), - Domain: "${MCP_GATEWAY_DOMAIN}", - APIKey: "${MCP_GATEWAY_API_KEY}", - PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + Port: int(DefaultMCPGatewayPort), + Domain: "${MCP_GATEWAY_DOMAIN}", + APIKey: "${MCP_GATEWAY_API_KEY}", + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + PayloadSizeThreshold: constants.DefaultMCPGatewayPayloadSizeThreshold, + }, + }, + { + name: "with custom payloadPathPrefix", + workflowData: &WorkflowData{ + SandboxConfig: &SandboxConfig{ + MCP: &MCPGatewayRuntimeConfig{ + PayloadPathPrefix: "/workspace/payloads", + }, + }, + }, + expected: &MCPGatewayRuntimeConfig{ + Port: int(DefaultMCPGatewayPort), + Domain: "${MCP_GATEWAY_DOMAIN}", + APIKey: "${MCP_GATEWAY_API_KEY}", + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + PayloadPathPrefix: "/workspace/payloads", + PayloadSizeThreshold: constants.DefaultMCPGatewayPayloadSizeThreshold, + }, + }, + { + name: "with custom payloadSizeThreshold", + workflowData: &WorkflowData{ + SandboxConfig: &SandboxConfig{ + MCP: &MCPGatewayRuntimeConfig{ + PayloadSizeThreshold: 1048576, // 1MB + }, + }, + }, + expected: &MCPGatewayRuntimeConfig{ + Port: int(DefaultMCPGatewayPort), + Domain: "${MCP_GATEWAY_DOMAIN}", + APIKey: "${MCP_GATEWAY_API_KEY}", + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + PayloadSizeThreshold: 1048576, + }, + }, + { + name: "uses default payloadSizeThreshold when not specified", + workflowData: &WorkflowData{ + SandboxConfig: &SandboxConfig{ + MCP: &MCPGatewayRuntimeConfig{ + // PayloadSizeThreshold not specified + }, + }, + }, + expected: &MCPGatewayRuntimeConfig{ + Port: int(DefaultMCPGatewayPort), + Domain: "${MCP_GATEWAY_DOMAIN}", + APIKey: "${MCP_GATEWAY_API_KEY}", + PayloadDir: "${MCP_GATEWAY_PAYLOAD_DIR}", + PayloadSizeThreshold: constants.DefaultMCPGatewayPayloadSizeThreshold, }, }, } @@ -241,6 +328,8 @@ func TestBuildMCPGatewayConfig(t *testing.T) { assert.Equal(t, tt.expected.Domain, result.Domain, "Domain should match") assert.Equal(t, tt.expected.APIKey, result.APIKey, "APIKey should match") assert.Equal(t, tt.expected.PayloadDir, result.PayloadDir, "PayloadDir should match") + assert.Equal(t, tt.expected.PayloadPathPrefix, result.PayloadPathPrefix, "PayloadPathPrefix should match") + assert.Equal(t, tt.expected.PayloadSizeThreshold, result.PayloadSizeThreshold, "PayloadSizeThreshold should match") } }) } diff --git a/pkg/workflow/mcp_github_config.go b/pkg/workflow/mcp_github_config.go index 1e954ccaa4..9da6c1949d 100644 --- a/pkg/workflow/mcp_github_config.go +++ b/pkg/workflow/mcp_github_config.go @@ -241,6 +241,29 @@ func getGitHubAllowedTools(githubTool any) []string { return nil } +// getGitHubGuardPolicies extracts guard policies from GitHub tool configuration. +// It reads the flat repos/min-integrity fields and wraps them for MCP gateway rendering. +// Returns nil if no guard policies are configured. +func getGitHubGuardPolicies(githubTool any) map[string]any { + if toolConfig, ok := githubTool.(map[string]any); ok { + repos, hasRepos := toolConfig["repos"] + integrity, hasIntegrity := toolConfig["min-integrity"] + if hasRepos || hasIntegrity { + policy := map[string]any{} + if hasRepos { + policy["repos"] = repos + } + if hasIntegrity { + policy["min-integrity"] = integrity + } + return map[string]any{ + "allow-only": policy, + } + } + } + return nil +} + func getGitHubDockerImageVersion(githubTool any) string { githubDockerImageVersion := string(constants.DefaultGitHubMCPServerVersion) // Default Docker image version // Extract version setting from tool properties diff --git a/pkg/workflow/mcp_logs_upload_test.go b/pkg/workflow/mcp_logs_upload_test.go index 0c40048590..c8df7f9494 100644 --- a/pkg/workflow/mcp_logs_upload_test.go +++ b/pkg/workflow/mcp_logs_upload_test.go @@ -65,8 +65,8 @@ Please navigate to example.com and take a screenshot. t.Error("Expected 'Upload agent artifacts' step to be in generated workflow") } - // Verify the upload step uses actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f - if !strings.Contains(lockContentStr, "uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f") { + // Verify the upload step uses actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f + if !strings.Contains(lockContentStr, "uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f") { t.Error("Expected upload-artifact action to be used for artifact upload step") } @@ -167,8 +167,8 @@ This workflow does not use Playwright but should still have MCP logs upload. t.Error("Expected MCP logs path in unified artifact upload even when Playwright is not used") } - // Verify the upload step uses actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f - if !strings.Contains(lockContentStr, "uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f") { + // Verify the upload step uses actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f + if !strings.Contains(lockContentStr, "uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f") { t.Error("Expected upload-artifact action to be used for artifact upload step") } diff --git a/pkg/workflow/mcp_renderer.go b/pkg/workflow/mcp_renderer.go index 79395b84b5..c497939d83 100644 --- a/pkg/workflow/mcp_renderer.go +++ b/pkg/workflow/mcp_renderer.go @@ -76,6 +76,7 @@ package workflow import ( + "encoding/json" "fmt" "os" "sort" @@ -168,6 +169,7 @@ func (r *MCPConfigRendererUnified) RenderGitHubMCP(yaml *strings.Builder, github IncludeToolsField: r.options.IncludeCopilotFields, AllowedTools: getGitHubAllowedTools(githubTool), IncludeEnvSection: r.options.IncludeCopilotFields, + GuardPolicies: getGitHubGuardPolicies(githubTool), }) } else { // Local mode - use Docker-based GitHub MCP server (default) @@ -186,6 +188,7 @@ func (r *MCPConfigRendererUnified) RenderGitHubMCP(yaml *strings.Builder, github IncludeTypeField: r.options.IncludeCopilotFields, AllowedTools: getGitHubAllowedTools(githubTool), EffectiveToken: "", // Token passed via env + GuardPolicies: getGitHubGuardPolicies(githubTool), }) } @@ -676,6 +679,8 @@ type GitHubMCPDockerOptions struct { EffectiveToken string // Mounts specifies volume mounts for the GitHub MCP server container (format: "host:container:mode") Mounts []string + // GuardPolicies specifies access control policies for the MCP gateway (e.g., allow-only repos/integrity) + GuardPolicies map[string]any } // RenderGitHubMCPDockerConfig renders the GitHub MCP server configuration for Docker (local mode). @@ -771,7 +776,13 @@ func RenderGitHubMCPDockerConfig(yaml *strings.Builder, options GitHubMCPDockerO fmt.Fprintf(yaml, " \"%s\": \"%s\"%s\n", key, envVars[key], comma) } - yaml.WriteString(" }\n") + // Close env section, with trailing comma if guard-policies follows + if len(options.GuardPolicies) > 0 { + yaml.WriteString(" },\n") + renderGuardPoliciesJSON(yaml, options.GuardPolicies, " ") + } else { + yaml.WriteString(" }\n") + } } // GitHubMCPRemoteOptions defines configuration for GitHub MCP remote mode rendering @@ -794,6 +805,8 @@ type GitHubMCPRemoteOptions struct { AllowedTools []string // IncludeEnvSection indicates whether to include the env section (Copilot needs it, Claude doesn't) IncludeEnvSection bool + // GuardPolicies specifies access control policies for the MCP gateway (e.g., allow-only repos/integrity) + GuardPolicies map[string]any } // RenderGitHubMCPRemoteConfig renders the GitHub MCP server configuration for remote (hosted) mode. @@ -836,7 +849,7 @@ func RenderGitHubMCPRemoteConfig(yaml *strings.Builder, options GitHubMCPRemoteO writeHeadersToYAML(yaml, headers, " ") // Close headers section - if options.IncludeToolsField || options.IncludeEnvSection { + if options.IncludeToolsField || options.IncludeEnvSection || len(options.GuardPolicies) > 0 { yaml.WriteString(" },\n") } else { yaml.WriteString(" }\n") @@ -856,7 +869,7 @@ func RenderGitHubMCPRemoteConfig(yaml *strings.Builder, options GitHubMCPRemoteO } yaml.WriteString("\n") } - if options.IncludeEnvSection { + if options.IncludeEnvSection || len(options.GuardPolicies) > 0 { yaml.WriteString(" ],\n") } else { yaml.WriteString(" ]\n") @@ -867,10 +880,38 @@ func RenderGitHubMCPRemoteConfig(yaml *strings.Builder, options GitHubMCPRemoteO if options.IncludeEnvSection { yaml.WriteString(" \"env\": {\n") yaml.WriteString(" \"GITHUB_PERSONAL_ACCESS_TOKEN\": \"\\${GITHUB_MCP_SERVER_TOKEN}\"\n") - yaml.WriteString(" }\n") + // Close env section, with trailing comma if guard-policies follows + if len(options.GuardPolicies) > 0 { + yaml.WriteString(" },\n") + } else { + yaml.WriteString(" }\n") + } + } + + // Add guard-policies if configured + if len(options.GuardPolicies) > 0 { + renderGuardPoliciesJSON(yaml, options.GuardPolicies, " ") } } +// renderGuardPoliciesJSON renders a "guard-policies" JSON field at the given indent level. +// The policies map contains policy names (e.g., "allow-only") mapped to their configurations. +// Renders as the last field (no trailing comma) with the given base indent. +func renderGuardPoliciesJSON(yaml *strings.Builder, policies map[string]any, indent string) { + if len(policies) == 0 { + return + } + + // Marshal to JSON with indentation, then re-indent to match the current indent level + jsonBytes, err := json.MarshalIndent(policies, indent, " ") + if err != nil { + mcpRendererLog.Printf("Failed to marshal guard-policies: %v", err) + return + } + + fmt.Fprintf(yaml, "%s\"guard-policies\": %s\n", indent, string(jsonBytes)) +} + // RenderJSONMCPConfig renders MCP configuration in JSON format with the common mcpServers structure. // This shared function extracts the duplicate pattern from Claude, Copilot, and Custom engines. // @@ -952,12 +993,18 @@ func RenderJSONMCPConfig( fmt.Fprintf(&configBuilder, " \"port\": $MCP_GATEWAY_PORT,\n") fmt.Fprintf(&configBuilder, " \"domain\": \"%s\",\n", options.GatewayConfig.Domain) fmt.Fprintf(&configBuilder, " \"apiKey\": \"%s\"", options.GatewayConfig.APIKey) - // Add payloadDir if specified + + // Add optional fields if specified (apiKey always precedes them without a trailing comma) if options.GatewayConfig.PayloadDir != "" { - fmt.Fprintf(&configBuilder, ",\n \"payloadDir\": \"%s\"\n", options.GatewayConfig.PayloadDir) - } else { - configBuilder.WriteString("\n") + fmt.Fprintf(&configBuilder, ",\n \"payloadDir\": \"%s\"", options.GatewayConfig.PayloadDir) + } + if options.GatewayConfig.PayloadPathPrefix != "" { + fmt.Fprintf(&configBuilder, ",\n \"payloadPathPrefix\": \"%s\"", options.GatewayConfig.PayloadPathPrefix) + } + if options.GatewayConfig.PayloadSizeThreshold > 0 { + fmt.Fprintf(&configBuilder, ",\n \"payloadSizeThreshold\": %d", options.GatewayConfig.PayloadSizeThreshold) } + configBuilder.WriteString("\n") configBuilder.WriteString(" }\n") } else { configBuilder.WriteString(" }\n") diff --git a/pkg/workflow/mcp_setup_generator.go b/pkg/workflow/mcp_setup_generator.go index a1d122656e..5900afcef5 100644 --- a/pkg/workflow/mcp_setup_generator.go +++ b/pkg/workflow/mcp_setup_generator.go @@ -513,6 +513,19 @@ func (c *Compiler) generateMCPSetup(yaml *strings.Builder, tools map[string]any, yaml.WriteString(" export MCP_GATEWAY_PAYLOAD_DIR=\"" + payloadDir + "\"\n") yaml.WriteString(" mkdir -p \"${MCP_GATEWAY_PAYLOAD_DIR}\"\n") + // Export payload path prefix if configured + payloadPathPrefix := gatewayConfig.PayloadPathPrefix + if payloadPathPrefix != "" { + yaml.WriteString(" export MCP_GATEWAY_PAYLOAD_PATH_PREFIX=\"" + payloadPathPrefix + "\"\n") + } + + // Export payload size threshold (use default if not configured) + payloadSizeThreshold := gatewayConfig.PayloadSizeThreshold + if payloadSizeThreshold == 0 { + payloadSizeThreshold = constants.DefaultMCPGatewayPayloadSizeThreshold + } + yaml.WriteString(" export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD=\"" + strconv.Itoa(payloadSizeThreshold) + "\"\n") + yaml.WriteString(" export DEBUG=\"*\"\n") yaml.WriteString(" \n") @@ -555,6 +568,10 @@ func (c *Compiler) generateMCPSetup(yaml *strings.Builder, tools map[string]any, containerCmd.WriteString(" -e MCP_GATEWAY_DOMAIN") containerCmd.WriteString(" -e MCP_GATEWAY_API_KEY") containerCmd.WriteString(" -e MCP_GATEWAY_PAYLOAD_DIR") + if payloadPathPrefix != "" { + containerCmd.WriteString(" -e MCP_GATEWAY_PAYLOAD_PATH_PREFIX") + } + containerCmd.WriteString(" -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD") containerCmd.WriteString(" -e DEBUG") // Pass environment variables that MCP servers reference in their config // These are needed because awmg v0.0.12+ validates and resolves ${VAR} patterns at config load time diff --git a/pkg/workflow/metrics_test.go b/pkg/workflow/metrics_test.go index 8ad1a4ad57..72d384997a 100644 --- a/pkg/workflow/metrics_test.go +++ b/pkg/workflow/metrics_test.go @@ -5,8 +5,6 @@ package workflow import ( "encoding/json" "testing" - - "github.com/github/gh-aw/pkg/logger" ) func TestExtractFirstMatch(t *testing.T) { @@ -668,94 +666,6 @@ func TestPrettifyToolName(t *testing.T) { } } -func TestExtractErrorMessage(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "Simple error message", - input: "Failed to connect to server", - expected: "Failed to connect to server", - }, - { - name: "Error with timestamp prefix", - input: "2024-01-01 12:00:00 Connection timeout", - expected: "Connection timeout", - }, - { - name: "Error with timestamp and milliseconds", - input: "2024-01-01 12:00:00.123 Connection refused", - expected: "Connection refused", - }, - { - name: "Error with bracket timestamp", - input: "[12:00:00] Permission denied", - expected: "Permission denied", - }, - { - name: "Error with ERROR prefix", - input: "ERROR: File not found", - expected: "File not found", - }, - { - name: "Error with [ERROR] prefix", - input: "[ERROR] Invalid configuration", - expected: "Invalid configuration", - }, - { - name: "Warning with WARN prefix", - input: "WARN - Deprecated API usage", - expected: "Deprecated API usage", - }, - { - name: "Error with WARNING prefix", - input: "WARNING: Resource limit reached", - expected: "Resource limit reached", - }, - { - name: "Timestamp and log level combined", - input: "2024-01-01 12:00:00 ERROR: Failed to initialize", - expected: "Failed to initialize", - }, - { - name: "Very long message truncation", - input: "This is a very long error message that exceeds the maximum character limit and should be truncated to prevent overly verbose output in the audit report which could make it harder to read and understand the key issues", - expected: "This is a very long error message that exceeds the maximum character limit and should be truncated to prevent overly verbose output in the audit report which could make it harder to read and unders...", - }, - { - name: "Empty string", - input: "", - expected: "", - }, - { - name: "Only whitespace", - input: " \t ", - expected: "", - }, - { - name: "Case insensitive ERROR prefix", - input: "error: Connection failed", - expected: "Connection failed", - }, - { - name: "Mixed case WARNING prefix", - input: "Warning: Low memory", - expected: "Low memory", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := logger.ExtractErrorMessage(tt.input) - if result != tt.expected { - t.Errorf("logger.ExtractErrorMessage(%q) = %q, want %q", tt.input, result, tt.expected) - } - }) - } -} - func TestFinalizeToolMetrics(t *testing.T) { tests := []struct { name string diff --git a/pkg/workflow/multiline_test.go b/pkg/workflow/multiline_test.go index 2e11a40f61..6c4f621c5b 100644 --- a/pkg/workflow/multiline_test.go +++ b/pkg/workflow/multiline_test.go @@ -23,7 +23,7 @@ func TestMultilineStringHandling(t *testing.T) { name: "multiline script in with parameters", stepMap: map[string]any{ "name": "Test Script", - "uses": "actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd", + "uses": "actions/github-script@v7", "with": map[string]any{ "script": `const fs = require('fs'); const data = { @@ -36,7 +36,7 @@ console.log(data);`, }, shouldContain: []string{ "name: Test Script", - "uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd", + "uses: actions/github-script@v7", "with:", "script: |-", // goccy/go-yaml uses |- (literal strip scalar) " const fs = require('fs');", @@ -53,7 +53,7 @@ console.log(data);`, name: "simple single-line with parameters", stepMap: map[string]any{ "name": "Simple Test", - "uses": "actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f", + "uses": "actions/setup-node@v4", "with": map[string]any{ "node-version": "18", "cache": "npm", @@ -61,7 +61,7 @@ console.log(data);`, }, shouldContain: []string{ "name: Simple Test", - "uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f", + "uses: actions/setup-node@v4", "with:", "node-version: \"18\"", // goccy/go-yaml quotes numeric strings "cache: npm", @@ -129,7 +129,7 @@ func TestEngineStepSerialization(t *testing.T) { stepMap := map[string]any{ "name": "Test multiline in engine", - "uses": "actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd", + "uses": "actions/github-script@v7", "with": map[string]any{ "script": `const multiline = 'hello'; This is a multiline diff --git a/pkg/workflow/prompt_constants.go b/pkg/workflow/prompt_constants.go new file mode 100644 index 0000000000..2f9d9c39be --- /dev/null +++ b/pkg/workflow/prompt_constants.go @@ -0,0 +1,28 @@ +package workflow + +import _ "embed" + +// Prompt file paths at runtime (copied by setup action to /opt/gh-aw/prompts) +const ( + promptsDir = "/opt/gh-aw/prompts" + prContextPromptFile = "pr_context_prompt.md" + tempFolderPromptFile = "temp_folder_prompt.md" + playwrightPromptFile = "playwright_prompt.md" + markdownPromptFile = "markdown.md" + xpiaPromptFile = "xpia.md" + cacheMemoryPromptFile = "cache_memory_prompt.md" + cacheMemoryPromptMultiFile = "cache_memory_prompt_multi.md" + repoMemoryPromptFile = "repo_memory_prompt.md" + repoMemoryPromptMultiFile = "repo_memory_prompt_multi.md" + safeOutputsPromptFile = "safe_outputs_prompt.md" + safeOutputsCreatePRFile = "safe_outputs_create_pull_request.md" + safeOutputsPushToBranchFile = "safe_outputs_push_to_pr_branch.md" + safeOutputsAutoCreateIssueFile = "safe_outputs_auto_create_issue.md" +) + +// GitHub context prompt is kept embedded because it contains GitHub Actions expressions +// that need to be extracted at compile time. Moving this to a runtime file would require +// reading and parsing the file during compilation, which is more complex. +// +//go:embed prompts/github_context_prompt.md +var githubContextPromptText string diff --git a/pkg/workflow/prompt_step.go b/pkg/workflow/prompt_step.go deleted file mode 100644 index 1e2341b3e8..0000000000 --- a/pkg/workflow/prompt_step.go +++ /dev/null @@ -1,64 +0,0 @@ -package workflow - -import ( - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var promptStepLog = logger.New("workflow:prompt_step") - -// appendPromptStep generates a workflow step that appends content to the prompt file. -// It encapsulates the common YAML scaffolding for prompt-related steps, reducing duplication -// across multiple prompt generation helpers. -// -// Parameters: -// - yaml: The string builder to write the YAML to -// - stepName: The name of the workflow step (e.g., "Append XPIA security instructions to prompt") -// - renderer: A function that writes the actual prompt content to the YAML -// - condition: Optional condition string to add an 'if:' clause (empty string means no condition) -// - indent: The indentation to use for nested content (typically " ") -func appendPromptStep(yaml *strings.Builder, stepName string, renderer func(*strings.Builder, string), condition string, indent string) { - promptStepLog.Printf("Appending prompt step: name=%s, hasCondition=%v", stepName, condition != "") - - yaml.WriteString(" - name: " + stepName + "\n") - - // Add conditional if provided - if condition != "" { - promptStepLog.Printf("Adding condition: %s", condition) - yaml.WriteString(" if: " + condition + "\n") - } - - yaml.WriteString(" env:\n") - yaml.WriteString(" GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt\n") - yaml.WriteString(" run: |\n") - - // Call the renderer to write the actual content - renderer(yaml, indent) - promptStepLog.Print("Prompt step appended successfully") -} - -// appendPromptStepWithHeredoc generates a workflow step that appends content to the prompt file -// using a heredoc (cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" pattern). -// This is used by compiler functions that need to embed static structured content without variable substitution. -// -// Parameters: -// - yaml: The string builder to write the YAML to -// - stepName: The name of the workflow step -// - renderer: A function that writes the content between the heredoc markers -func appendPromptStepWithHeredoc(yaml *strings.Builder, stepName string, renderer func(*strings.Builder)) { - promptStepLog.Printf("Appending prompt step with heredoc: name=%s", stepName) - - delimiter := GenerateHeredocDelimiter("PROMPT") - yaml.WriteString(" - name: " + stepName + "\n") - yaml.WriteString(" env:\n") - yaml.WriteString(" GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt\n") - yaml.WriteString(" run: |\n") - yaml.WriteString(" cat << '" + delimiter + "' >> \"$GH_AW_PROMPT\"\n") - - // Call the renderer to write the content - renderer(yaml) - - yaml.WriteString(" " + delimiter + "\n") - promptStepLog.Print("Heredoc prompt step appended successfully") -} diff --git a/pkg/workflow/prompt_step_helper_test.go b/pkg/workflow/prompt_step_helper_test.go deleted file mode 100644 index e2f0dc6145..0000000000 --- a/pkg/workflow/prompt_step_helper_test.go +++ /dev/null @@ -1,138 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestGenerateStaticPromptStep(t *testing.T) { - tests := []struct { - name string - description string - promptText string - shouldInclude bool - wantOutput bool - wantInOutput []string - }{ - { - name: "generates step when shouldInclude is true", - description: "Append test instructions to prompt", - promptText: "Test prompt content\nLine 2", - shouldInclude: true, - wantOutput: true, - wantInOutput: []string{ - "- name: Append test instructions to prompt", - "GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt", - `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`, - "Test prompt content", - "Line 2", - "EOF", - }, - }, - { - name: "skips generation when shouldInclude is false", - description: "Append skipped instructions to prompt", - promptText: "This should not appear", - shouldInclude: false, - wantOutput: false, - wantInOutput: []string{}, - }, - { - name: "handles multiline prompt text correctly", - description: "Append multiline instructions to prompt", - promptText: "Line 1\nLine 2\nLine 3\nLine 4", - shouldInclude: true, - wantOutput: true, - wantInOutput: []string{ - "Line 1", - "Line 2", - "Line 3", - "Line 4", - }, - }, - { - name: "handles empty prompt text", - description: "Append empty instructions to prompt", - promptText: "", - shouldInclude: true, - wantOutput: true, - wantInOutput: []string{ - "- name: Append empty instructions to prompt", - `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`, - "EOF", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var yaml strings.Builder - - generateStaticPromptStep(&yaml, tt.description, tt.promptText, tt.shouldInclude) - output := yaml.String() - - if tt.wantOutput { - if output == "" { - t.Error("Expected output to be generated, but got empty string") - } - - // Check that all expected strings are present - for _, want := range tt.wantInOutput { - if !strings.Contains(output, want) { - t.Errorf("Expected output to contain %q, but it didn't.\nGot:\n%s", want, output) - } - } - } else { - if output != "" { - t.Errorf("Expected no output when shouldInclude is false, but got:\n%s", output) - } - } - }) - } -} - -func TestGenerateStaticPromptStepConsistencyWithOriginal(t *testing.T) { - // Test that the new helper produces the same output as the original implementation - // by comparing with a known-good expected structure from appendPromptStep - - tests := []struct { - name string - description string - promptText string - }{ - { - name: "temp folder style prompt", - description: "Append temporary folder instructions to prompt", - promptText: "Use /tmp/gh-aw/agent/ directory", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Generate using new helper - var helperYaml strings.Builder - generateStaticPromptStep(&helperYaml, tt.description, tt.promptText, true) - - // Generate using original pattern - var originalYaml strings.Builder - appendPromptStep(&originalYaml, - tt.description, - func(y *strings.Builder, indent string) { - WritePromptTextToYAML(y, tt.promptText, indent) - }, - "", // no condition - " ") - - helperOutput := helperYaml.String() - originalOutput := originalYaml.String() - - // Compare outputs - if helperOutput != originalOutput { - t.Errorf("Helper output does not match original.\nHelper:\n%s\nOriginal:\n%s", - helperOutput, originalOutput) - } - }) - } -} diff --git a/pkg/workflow/prompt_step_test.go b/pkg/workflow/prompt_step_test.go deleted file mode 100644 index 2dd62173a6..0000000000 --- a/pkg/workflow/prompt_step_test.go +++ /dev/null @@ -1,146 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestAppendPromptStep(t *testing.T) { - tests := []struct { - name string - stepName string - condition string - wantSteps []string - }{ - { - name: "basic step without condition", - stepName: "Append test instructions to prompt", - condition: "", - wantSteps: []string{ - "- name: Append test instructions to prompt", - "env:", - "GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt", - "run: |", - `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`, - "Test prompt content", - "GH_AW_PROMPT_EOF", - }, - }, - { - name: "step with condition", - stepName: "Append conditional instructions to prompt", - condition: "github.event.issue != null", - wantSteps: []string{ - "- name: Append conditional instructions to prompt", - "if: github.event.issue != null", - "env:", - "GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt", - "run: |", - `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`, - "Conditional prompt content", - "GH_AW_PROMPT_EOF", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var yaml strings.Builder - - // Call the helper with a simple renderer - var promptContent string - if tt.condition == "" { - promptContent = "Test prompt content" - } else { - promptContent = "Conditional prompt content" - } - - appendPromptStep(&yaml, tt.stepName, func(y *strings.Builder, indent string) { - WritePromptTextToYAML(y, promptContent, indent) - }, tt.condition, " ") - - result := yaml.String() - - // Check that all expected strings are present - for _, want := range tt.wantSteps { - if !strings.Contains(result, want) { - t.Errorf("Expected output to contain %q, but it didn't.\nGot:\n%s", want, result) - } - } - }) - } -} - -func TestAppendPromptStepWithHeredoc(t *testing.T) { - tests := []struct { - name string - stepName string - content string - wantSteps []string - }{ - { - name: "basic heredoc step", - stepName: "Append structured data to prompt", - content: "Structured content line 1\nStructured content line 2", - wantSteps: []string{ - "- name: Append structured data to prompt", - "env:", - "GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt", - "run: |", - `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`, - "Structured content line 1", - "Structured content line 2", - "GH_AW_PROMPT_EOF", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var yaml strings.Builder - - appendPromptStepWithHeredoc(&yaml, tt.stepName, func(y *strings.Builder) { - y.WriteString(tt.content) - }) - - result := yaml.String() - - // Check that all expected strings are present - for _, want := range tt.wantSteps { - if !strings.Contains(result, want) { - t.Errorf("Expected output to contain %q, but it didn't.\nGot:\n%s", want, result) - } - } - }) - } -} - -func TestPromptStepRefactoringConsistency(t *testing.T) { - // Test that the unified prompt step includes temp folder instructions - // (Previously tested individual prompt steps, now tests unified approach) - - t.Run("unified_prompt_step includes temp_folder", func(t *testing.T) { - var yaml strings.Builder - compiler := &Compiler{} - data := &WorkflowData{ - ParsedTools: NewTools(map[string]any{}), - } - compiler.generateUnifiedPromptStep(&yaml, data) - - result := yaml.String() - - // Verify key elements are present - if !strings.Contains(result, "Create prompt with built-in context") { - t.Error("Expected unified step name not found") - } - if !strings.Contains(result, "GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt") { - t.Error("Expected GH_AW_PROMPT env variable not found") - } - // Verify temp folder instructions are included - if !strings.Contains(result, `cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT"`) { - t.Error("Expected cat command for temp folder prompt file not found") - } - }) -} diff --git a/pkg/workflow/runtime_import_checkout_test.go b/pkg/workflow/runtime_import_checkout_test.go index b1d440561b..3de3a88dfb 100644 --- a/pkg/workflow/runtime_import_checkout_test.go +++ b/pkg/workflow/runtime_import_checkout_test.go @@ -277,3 +277,91 @@ features: // These are the default behaviors of actions/checkout when no parameters are specified // For runtime-imports, this is exactly what we want - minimal checkout with no credentials } + +// TestActivationJobCheckoutWithoutExplicitContentsRead verifies that the activation job +// still gets the checkout step for .github and .agents folders even when the workflow +// does not explicitly specify contents: read permission. This is critical for runtime-imports +// to work correctly, since the activation job always has contents: read added to it. +func TestActivationJobCheckoutWithoutExplicitContentsRead(t *testing.T) { + // This workflow only has issues: read permission, no explicit contents: read + // The activation job should still have the checkout step because it always gets + // contents: read added for GitHub API access and runtime imports + frontmatter := `--- +on: + workflow_dispatch: +permissions: + issues: read +engine: claude +strict: false +---` + markdown := "# Agent\n\nCreate an issue with title \"Test\" and body \"Hello World\"." + + tmpDir := testutil.TempDir(t, "activation-checkout-no-contents-test") + + // Create workflow file + workflowPath := filepath.Join(tmpDir, "test.md") + content := frontmatter + "\n\n" + markdown + if err := os.WriteFile(workflowPath, []byte(content), 0644); err != nil { + t.Fatalf("Failed to write workflow file: %v", err) + } + + // Compile the workflow + compiler := NewCompiler() + if err := compiler.CompileWorkflow(workflowPath); err != nil { + t.Fatalf("Failed to compile workflow: %v", err) + } + + // Calculate the lock file path + lockFile := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml" + + // Read the generated lock file + lockContent, err := os.ReadFile(lockFile) + if err != nil { + t.Fatalf("Failed to read lock file: %v", err) + } + + lockContentStr := string(lockContent) + + // Find the activation job section + activationJobStart := strings.Index(lockContentStr, "activation:") + if activationJobStart == -1 { + t.Fatal("Activation job not found in compiled workflow") + } + + // Find the end of the activation job (next job definition) + activationJobEnd := len(lockContentStr) + nextJobPattern := "\n " + searchStart := activationJobStart + len("activation:") + remaining := lockContentStr[searchStart:] + lines := strings.Split(remaining, "\n") + charCount := 0 + for i, line := range lines { + charCount += len(line) + 1 // +1 for newline + if i > 0 && len(line) > 2 && line[0:2] == " " && line[2] != ' ' && strings.Contains(line, ":") { + activationJobEnd = searchStart + charCount - len(line) - 1 + break + } + } + _ = nextJobPattern // silence unused warning + + activationJobSection := lockContentStr[activationJobStart:activationJobEnd] + + // Verify that the activation job contains the checkout step for .github and .agents folders + if !strings.Contains(activationJobSection, "Checkout .github and .agents folders") { + t.Error("Activation job should contain 'Checkout .github and .agents folders' step even without explicit contents: read permission") + t.Logf("Activation job section:\n%s", activationJobSection) + } + + // Verify the checkout has sparse-checkout configuration + if !strings.Contains(activationJobSection, "sparse-checkout:") { + t.Error("Checkout step should use sparse-checkout") + } + + // Verify both .github and .agents are in the sparse-checkout + if !strings.Contains(activationJobSection, ".github") { + t.Error("Sparse checkout should include .github folder") + } + if !strings.Contains(activationJobSection, ".agents") { + t.Error("Sparse checkout should include .agents folder") + } +} diff --git a/pkg/workflow/runtime_integration_test.go b/pkg/workflow/runtime_integration_test.go index 1ba9eb2144..46b1470f6d 100644 --- a/pkg/workflow/runtime_integration_test.go +++ b/pkg/workflow/runtime_integration_test.go @@ -239,7 +239,7 @@ Test workflow with runtime overrides applied to steps. lockStr := string(lockContent) // Verify that Node.js setup step is included with version 22 - if !strings.Contains(lockStr, "actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238") { + if !strings.Contains(lockStr, "uses: actions/setup-node@") { // SHA varies t.Error("Expected setup-node action in lock file") } if !strings.Contains(lockStr, "node-version: '22'") { @@ -355,7 +355,7 @@ Test workflow that uses Go without go.mod file. if !strings.Contains(lockStr, "Setup Go") { t.Error("Expected 'Setup Go' step in lock file") } - if !strings.Contains(lockStr, "actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5") { + if !strings.Contains(lockStr, "actions/setup-go@") { t.Error("Expected actions/setup-go action in lock file") } if !strings.Contains(lockStr, "go-version: '1.25'") { diff --git a/pkg/workflow/runtime_setup_integration_test.go b/pkg/workflow/runtime_setup_integration_test.go index c25f213615..914cc2a6fb 100644 --- a/pkg/workflow/runtime_setup_integration_test.go +++ b/pkg/workflow/runtime_setup_integration_test.go @@ -3,6 +3,7 @@ package workflow import ( + "fmt" "os" "strings" "testing" @@ -50,7 +51,7 @@ steps: # Test workflow`, expectSetup: []string{ "Setup Node.js", - "actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238", + "uses: actions/setup-node@", // SHA varies "node-version: '24'", }, }, @@ -67,7 +68,7 @@ steps: # Test workflow`, expectSetup: []string{ "Setup Python", - "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065", + "uses: actions/setup-python@", // SHA varies "python-version: '3.12'", }, }, @@ -84,7 +85,7 @@ steps: # Test workflow`, expectSetup: []string{ "Setup uv", - "astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86", + "uses: astral-sh/setup-uv@", // SHA varies }, }, { @@ -112,7 +113,7 @@ on: push engine: copilot steps: - name: Setup Node.js - uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f + uses: actions/setup-node@v4 # SHA will be pinned with: node-version: '20' - name: Install @@ -142,7 +143,7 @@ mcp-servers: # Test workflow`, expectSetup: []string{ "Setup Python", - "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065", + "uses: actions/setup-python@", // SHA varies }, }, { @@ -195,14 +196,29 @@ steps: // Check expected setup steps for _, expected := range tt.expectSetup { if !strings.Contains(lockContent, expected) { - t.Errorf("Expected to find '%s' in lock file but didn't.\nLock file content:\n%s", expected, lockContent) + // Show a snippet of the lock file for context (first 100 lines) + lines := strings.Split(lockContent, "\n") + snippet := strings.Join(lines[:min(100, len(lines))], "\n") + t.Errorf("Expected to find '%s' in lock file but didn't.\nFirst 100 lines:\n%s\n...(truncated)", expected, snippet) } } // Check that unwanted setup steps are not present for _, notExpected := range tt.notExpectSetup { if strings.Contains(lockContent, notExpected) { - t.Errorf("Did not expect to find '%s' in lock file but it was present.\nLock file content:\n%s", notExpected, lockContent) + // Find the line containing the unexpected string for context + lines := strings.Split(lockContent, "\n") + var contextLines []string + for i, line := range lines { + if strings.Contains(line, notExpected) { + start := max(0, i-3) + end := min(len(lines), i+4) + contextLines = append(contextLines, fmt.Sprintf("Lines %d-%d:", start+1, end)) + contextLines = append(contextLines, lines[start:end]...) + break + } + } + t.Errorf("Did not expect to find '%s' in lock file but it was present.\nContext:\n%s", notExpected, strings.Join(contextLines, "\n")) } } }) @@ -260,7 +276,7 @@ on: push engine: copilot steps: - name: Setup Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 + uses: actions/setup-python@v5 # SHA will be pinned with: python-version: '3.9' - name: Run script diff --git a/pkg/workflow/runtime_setup_test.go b/pkg/workflow/runtime_setup_test.go index 38725a8485..e8da50075a 100644 --- a/pkg/workflow/runtime_setup_test.go +++ b/pkg/workflow/runtime_setup_test.go @@ -285,7 +285,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Bun", - "oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3", + "oven-sh/setup-bun@", "bun-version: '1.1'", }, }, @@ -297,7 +297,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Node.js", - "actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238", + "actions/setup-node@", "node-version: '20'", }, }, @@ -309,7 +309,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Python", - "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065", + "actions/setup-python@", "python-version: '3.11'", }, }, @@ -321,7 +321,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup uv", - "astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86", + "astral-sh/setup-uv@", }, }, { @@ -332,7 +332,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, // setup only - PATH inherited via AWF_HOST_PATH in chroot mode checkContent: []string{ "Setup .NET", - "actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9", + "actions/setup-dotnet@", "dotnet-version: '8.0'", }, }, @@ -344,7 +344,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, // setup only - PATH inherited via AWF_HOST_PATH in chroot mode checkContent: []string{ "Setup Java", - "actions/setup-java@c1e323688fd81a25caa38c78aa6df2d33d3e20d9", + "actions/setup-java@", "java-version: '21'", "distribution: temurin", }, @@ -357,7 +357,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Elixir", - "erlef/setup-beam@dff508cca8ce57162e7aa6c4769a4f97c2fed638", + "erlef/setup-beam@", "elixir-version: '1.17'", }, }, @@ -369,7 +369,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Haskell", - "haskell-actions/setup@9cd1b7bf3f36d5a3c3b17abc3545bfb5481912ea", + "haskell-actions/setup@", "ghc-version: '9.10'", }, }, @@ -403,7 +403,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 2, // setup + GOROOT capture for AWF chroot mode checkContent: []string{ "Setup Go", - "actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5", + "actions/setup-go@", "go-version: '1.22'", "Capture GOROOT for AWF chroot mode", }, @@ -416,7 +416,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 2, // setup + GOROOT capture for AWF chroot mode checkContent: []string{ "Setup Go", - "actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5", + "actions/setup-go@", "go-version: '1.25'", "Capture GOROOT for AWF chroot mode", }, @@ -429,7 +429,7 @@ func TestGenerateRuntimeSetupSteps(t *testing.T) { expectSteps: 2, // setup + GOROOT capture for AWF chroot mode checkContent: []string{ "Setup Go", - "actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5", + "actions/setup-go@", "go-version-file: custom/go.mod", "cache: true", "Capture GOROOT for AWF chroot mode", @@ -693,7 +693,7 @@ func TestDeduplicatePreservesUserPythonVersion(t *testing.T) { // and runs a python command, which auto-detects Python runtime customSteps := `steps: - name: Setup Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 with: python-version: '3.9' - name: Run script @@ -744,7 +744,7 @@ func TestDeduplicatePreservesUserPythonVersion(t *testing.T) { } // Verify the user's step still has the SHA reference - if !strings.Contains(deduplicatedSteps, "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065") { + if !strings.Contains(deduplicatedSteps, "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405") { t.Error("Expected deduplicated steps to preserve user's SHA reference") } } @@ -824,7 +824,7 @@ func TestGenerateRuntimeSetupStepsWithIfCondition(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup uv", - "astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86", + "astral-sh/setup-uv@", "if: hashFiles('uv.lock') != ''", }, }, @@ -840,7 +840,7 @@ func TestGenerateRuntimeSetupStepsWithIfCondition(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Python", - "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065", + "actions/setup-python@", "python-version: '3.11'", "if: hashFiles('requirements.txt') != '' || hashFiles('pyproject.toml') != ''", }, @@ -857,7 +857,7 @@ func TestGenerateRuntimeSetupStepsWithIfCondition(t *testing.T) { expectSteps: 1, checkContent: []string{ "Setup Node.js", - "actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238", + "actions/setup-node@", "node-version: '20'", "if: hashFiles('package.json') != ''", }, diff --git a/pkg/workflow/safe_output_builder.go b/pkg/workflow/safe_output_builder.go deleted file mode 100644 index e03d459ddc..0000000000 --- a/pkg/workflow/safe_output_builder.go +++ /dev/null @@ -1,202 +0,0 @@ -package workflow - -import ( - "fmt" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputBuilderLog = logger.New("workflow:safe_output_builder") - -// ====================================== -// Generic Env Var Builders -// ====================================== - -// BuildTargetEnvVar builds a target environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_CLOSE_ISSUE_TARGET". -// Returns an empty slice if target is empty. -func BuildTargetEnvVar(envVarName string, target string) []string { - if target == "" { - return nil - } - return []string{fmt.Sprintf(" %s: %q\n", envVarName, target)} -} - -// BuildRequiredLabelsEnvVar builds a required-labels environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_CLOSE_ISSUE_REQUIRED_LABELS". -// Returns an empty slice if requiredLabels is empty. -func BuildRequiredLabelsEnvVar(envVarName string, requiredLabels []string) []string { - if len(requiredLabels) == 0 { - return nil - } - labelsStr := strings.Join(requiredLabels, ",") - return []string{fmt.Sprintf(" %s: %q\n", envVarName, labelsStr)} -} - -// BuildRequiredTitlePrefixEnvVar builds a required-title-prefix environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_CLOSE_ISSUE_REQUIRED_TITLE_PREFIX". -// Returns an empty slice if requiredTitlePrefix is empty. -func BuildRequiredTitlePrefixEnvVar(envVarName string, requiredTitlePrefix string) []string { - if requiredTitlePrefix == "" { - return nil - } - return []string{fmt.Sprintf(" %s: %q\n", envVarName, requiredTitlePrefix)} -} - -// BuildRequiredCategoryEnvVar builds a required-category environment variable line for discussion safe-output jobs. -// envVarName should be the full env var name like "GH_AW_CLOSE_DISCUSSION_REQUIRED_CATEGORY". -// Returns an empty slice if requiredCategory is empty. -func BuildRequiredCategoryEnvVar(envVarName string, requiredCategory string) []string { - if requiredCategory == "" { - return nil - } - return []string{fmt.Sprintf(" %s: %q\n", envVarName, requiredCategory)} -} - -// BuildMaxCountEnvVar builds a max count environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_CLOSE_ISSUE_MAX_COUNT". -func BuildMaxCountEnvVar(envVarName string, maxCount int) []string { - return []string{fmt.Sprintf(" %s: %d\n", envVarName, maxCount)} -} - -// overrideEnvVarLine replaces the first env var line in lines that starts with keyPrefix -// with newLine. If no match is found, newLine is appended. -func overrideEnvVarLine(lines []string, keyPrefix string, newLine string) []string { - for i, line := range lines { - trimmed := strings.TrimSpace(line) - if strings.HasPrefix(trimmed, keyPrefix) { - lines[i] = newLine - return lines - } - } - return append(lines, newLine) -} - -// BuildAllowedListEnvVar builds an allowed list environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_LABELS_ALLOWED". -// Always outputs the env var, even when empty (empty string means "allow all"). -func BuildAllowedListEnvVar(envVarName string, allowed []string) []string { - allowedStr := strings.Join(allowed, ",") - return []string{fmt.Sprintf(" %s: %q\n", envVarName, allowedStr)} -} - -// ====================================== -// Close Job Env Var Builders -// ====================================== - -// BuildCloseJobEnvVars builds common environment variables for close operations. -// prefix should be like "GH_AW_CLOSE_ISSUE" or "GH_AW_CLOSE_PR". -// Returns a slice of environment variable lines. -func BuildCloseJobEnvVars(prefix string, config CloseJobConfig) []string { - var envVars []string - - // Add target - envVars = append(envVars, BuildTargetEnvVar(prefix+"_TARGET", config.Target)...) - - // Add required labels - envVars = append(envVars, BuildRequiredLabelsEnvVar(prefix+"_REQUIRED_LABELS", config.RequiredLabels)...) - - // Add required title prefix - envVars = append(envVars, BuildRequiredTitlePrefixEnvVar(prefix+"_REQUIRED_TITLE_PREFIX", config.RequiredTitlePrefix)...) - - return envVars -} - -// ====================================== -// List Job Env Var Builders -// ====================================== - -// BuildListJobEnvVars builds common environment variables for list-based operations. -// prefix should be like "GH_AW_LABELS" or "GH_AW_REVIEWERS". -// Returns a slice of environment variable lines. -func BuildListJobEnvVars(prefix string, config ListJobConfig, maxCount int) []string { - var envVars []string - - // Add allowed list - envVars = append(envVars, BuildAllowedListEnvVar(prefix+"_ALLOWED", config.Allowed)...) - - // Add blocked list - envVars = append(envVars, BuildAllowedListEnvVar(prefix+"_BLOCKED", config.Blocked)...) - - // Add max count - envVars = append(envVars, BuildMaxCountEnvVar(prefix+"_MAX_COUNT", maxCount)...) - - // Add target - envVars = append(envVars, BuildTargetEnvVar(prefix+"_TARGET", config.Target)...) - - return envVars -} - -// ====================================== -// List Job Builder Helpers -// ====================================== - -// ListJobBuilderConfig contains parameters for building list-based safe-output jobs -type ListJobBuilderConfig struct { - JobName string // e.g., "add_labels", "assign_milestone" - StepName string // e.g., "Add Labels", "Assign Milestone" - StepID string // e.g., "add_labels", "assign_milestone" - EnvPrefix string // e.g., "GH_AW_LABELS", "GH_AW_MILESTONE" - OutputName string // e.g., "labels_added", "assigned_milestones" - Script string // JavaScript script for the operation - Permissions *Permissions // Job permissions - DefaultMax int // Default max count if not specified in config - ExtraCondition ConditionNode // Additional condition to append (optional) -} - -// BuildListSafeOutputJob builds a list-based safe-output job using shared logic. -// This consolidates the common builder pattern used by add-labels, assign-milestone, and assign-to-user. -func (c *Compiler) BuildListSafeOutputJob(data *WorkflowData, mainJobName string, listJobConfig ListJobConfig, baseSafeOutputConfig BaseSafeOutputConfig, builderConfig ListJobBuilderConfig) (*Job, error) { - safeOutputBuilderLog.Printf("Building list safe-output job: %s", builderConfig.JobName) - - // Handle max count with default – use literal integer if set, else fall back to DefaultMax - maxCount := builderConfig.DefaultMax - if n := templatableIntValue(baseSafeOutputConfig.Max); n > 0 { - maxCount = n - } - safeOutputBuilderLog.Printf("Max count set to: %d", maxCount) - - // Build custom environment variables using shared helpers - customEnvVars := BuildListJobEnvVars(builderConfig.EnvPrefix, listJobConfig, maxCount) - - // If max is a GitHub Actions expression, override with the expression value - if baseSafeOutputConfig.Max != nil && templatableIntValue(baseSafeOutputConfig.Max) == 0 { - exprLine := buildTemplatableIntEnvVar(builderConfig.EnvPrefix+"_MAX_COUNT", baseSafeOutputConfig.Max) - if len(exprLine) > 0 { - prefix := builderConfig.EnvPrefix + "_MAX_COUNT:" - customEnvVars = overrideEnvVarLine(customEnvVars, prefix, exprLine[0]) - } - } - - // Add standard environment variables (metadata + staged/target repo) - customEnvVars = append(customEnvVars, c.buildStandardSafeOutputEnvVars(data, listJobConfig.TargetRepoSlug)...) - - // Create outputs for the job - outputs := map[string]string{ - builderConfig.OutputName: fmt.Sprintf("${{ steps.%s.outputs.%s }}", builderConfig.StepID, builderConfig.OutputName), - } - - // Build base job condition - jobCondition := BuildSafeOutputType(builderConfig.JobName) - - // Add extra condition if provided - if builderConfig.ExtraCondition != nil { - jobCondition = BuildAnd(jobCondition, builderConfig.ExtraCondition) - } - - // Use the shared builder function to create the job - return c.buildSafeOutputJob(data, SafeOutputJobConfig{ - JobName: builderConfig.JobName, - StepName: builderConfig.StepName, - StepID: builderConfig.StepID, - MainJobName: mainJobName, - CustomEnvVars: customEnvVars, - Script: builderConfig.Script, - Permissions: builderConfig.Permissions, - Outputs: outputs, - Condition: jobCondition, - Token: baseSafeOutputConfig.GitHubToken, - TargetRepoSlug: listJobConfig.TargetRepoSlug, - }) -} diff --git a/pkg/workflow/safe_output_helpers_test.go b/pkg/workflow/safe_output_helpers_test.go index 647c3d2d04..c3dbd57f92 100644 --- a/pkg/workflow/safe_output_helpers_test.go +++ b/pkg/workflow/safe_output_helpers_test.go @@ -703,7 +703,7 @@ func TestBuildAgentOutputDownloadSteps(t *testing.T) { expectedComponents := []string{ "- name: Download agent output artifact", "continue-on-error: true", - "uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", + "uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3", "name: agent-output", "path: /tmp/gh-aw/safeoutputs/", "- name: Setup agent output environment variable", diff --git a/pkg/workflow/safe_outputs_app_import_test.go b/pkg/workflow/safe_outputs_app_import_test.go index fc535f33cb..ce97525831 100644 --- a/pkg/workflow/safe_outputs_app_import_test.go +++ b/pkg/workflow/safe_outputs_app_import_test.go @@ -5,7 +5,6 @@ package workflow import ( "os" "path/filepath" - "strings" "testing" "github.com/stretchr/testify/assert" @@ -148,72 +147,3 @@ This workflow overrides the imported app configuration. assert.Equal(t, "${{ secrets.LOCAL_APP_SECRET }}", workflowData.SafeOutputs.App.PrivateKey) assert.Equal(t, []string{"repo2"}, workflowData.SafeOutputs.App.Repositories) } - -// TestSafeOutputsAppImportStepGeneration tests that imported app config generates correct steps -func TestSafeOutputsAppImportStepGeneration(t *testing.T) { - compiler := NewCompilerWithVersion("1.0.0") - - // Create a temporary directory for test files - tmpDir := t.TempDir() - workflowsDir := filepath.Join(tmpDir, ".github", "workflows") - err := os.MkdirAll(workflowsDir, 0755) - require.NoError(t, err, "Failed to create workflows directory") - - // Create a shared workflow with app configuration - sharedWorkflow := `--- -safe-outputs: - app: - app-id: ${{ vars.SHARED_APP_ID }} - private-key: ${{ secrets.SHARED_APP_SECRET }} ---- - -# Shared App Configuration -` - - sharedFile := filepath.Join(workflowsDir, "shared-app.md") - err = os.WriteFile(sharedFile, []byte(sharedWorkflow), 0644) - require.NoError(t, err, "Failed to write shared file") - - // Create main workflow that imports the app configuration - mainWorkflow := `--- -on: issues -permissions: - contents: read -imports: - - ./shared-app.md -safe-outputs: - create-issue: ---- - -# Main Workflow -` - - mainFile := filepath.Join(workflowsDir, "main.md") - err = os.WriteFile(mainFile, []byte(mainWorkflow), 0644) - require.NoError(t, err, "Failed to write main file") - - // Change to the workflows directory for relative path resolution - oldDir, err := os.Getwd() - require.NoError(t, err, "Failed to get current directory") - err = os.Chdir(workflowsDir) - require.NoError(t, err, "Failed to change directory") - defer os.Chdir(oldDir) - - // Parse the workflow - workflowData, err := compiler.ParseWorkflowFile("main.md") - require.NoError(t, err, "Failed to parse workflow") - - // Build the safe_outputs job - job, err := compiler.buildCreateOutputIssueJob(workflowData, "main") - require.NoError(t, err, "Failed to build safe_outputs job") - require.NotNil(t, job, "Job should not be nil") - - // Convert steps to string - stepsStr := strings.Join(job.Steps, "") - - // Verify token minting and invalidation steps are present - assert.Contains(t, stepsStr, "Generate GitHub App token", "Token minting step should be present") - assert.Contains(t, stepsStr, "Invalidate GitHub App token", "Token invalidation step should be present") - assert.Contains(t, stepsStr, "${{ vars.SHARED_APP_ID }}", "Should use imported app ID") - assert.Contains(t, stepsStr, "${{ secrets.SHARED_APP_SECRET }}", "Should use imported secret") -} diff --git a/pkg/workflow/safe_outputs_app_test.go b/pkg/workflow/safe_outputs_app_test.go index ef79165c9d..698091997e 100644 --- a/pkg/workflow/safe_outputs_app_test.go +++ b/pkg/workflow/safe_outputs_app_test.go @@ -85,104 +85,6 @@ Test workflow with minimal app configuration. assert.Empty(t, workflowData.SafeOutputs.App.Repositories) } -// TestSafeOutputsAppTokenMintingStep tests that token minting step is generated -func TestSafeOutputsAppTokenMintingStep(t *testing.T) { - compiler := NewCompilerWithVersion("1.0.0") - - markdown := `--- -on: issues -permissions: - contents: read -safe-outputs: - create-issue: - app: - app-id: ${{ vars.APP_ID }} - private-key: ${{ secrets.APP_PRIVATE_KEY }} ---- - -# Test Workflow - -Test workflow with app token minting. -` - - // Create a temporary test file - tmpDir := t.TempDir() - testFile := filepath.Join(tmpDir, "test.md") - err := os.WriteFile(testFile, []byte(markdown), 0644) - require.NoError(t, err, "Failed to write test file") - - workflowData, err := compiler.ParseWorkflowFile(testFile) - require.NoError(t, err, "Failed to parse markdown content") - - // Build the safe_outputs job - job, err := compiler.buildCreateOutputIssueJob(workflowData, "main") - require.NoError(t, err, "Failed to build safe_outputs job") - require.NotNil(t, job, "Job should not be nil") - - // Convert steps to string for easier assertion - stepsStr := strings.Join(job.Steps, "") - - // Verify token minting step is present - assert.Contains(t, stepsStr, "Generate GitHub App token", "Token minting step should be present") - assert.Contains(t, stepsStr, "actions/create-github-app-token", "Should use create-github-app-token action") - assert.Contains(t, stepsStr, "app-id: ${{ vars.APP_ID }}", "Should use configured app ID") - assert.Contains(t, stepsStr, "private-key: ${{ secrets.APP_PRIVATE_KEY }}", "Should use configured private key") - - // Verify token invalidation step is present - assert.Contains(t, stepsStr, "Invalidate GitHub App token", "Token invalidation step should be present") - assert.Contains(t, stepsStr, "if: always()", "Invalidation step should always run") - assert.Contains(t, stepsStr, "/installation/token", "Should call token invalidation endpoint") - - // Verify token is used in github-script step - assert.Contains(t, stepsStr, "${{ steps.safe-outputs-app-token.outputs.token }}", "Should use app token in github-script") -} - -// TestSafeOutputsAppTokenMintingStepWithRepositories tests token minting with repositories -func TestSafeOutputsAppTokenMintingStepWithRepositories(t *testing.T) { - compiler := NewCompilerWithVersion("1.0.0") - - markdown := `--- -on: issues -permissions: - contents: read -safe-outputs: - create-issue: - app: - app-id: ${{ vars.APP_ID }} - private-key: ${{ secrets.APP_PRIVATE_KEY }} - repositories: - - "repo1" - - "repo2" ---- - -# Test Workflow - -Test workflow with app token minting and repository restrictions. -` - - // Create a temporary test file - tmpDir := t.TempDir() - testFile := filepath.Join(tmpDir, "test.md") - err := os.WriteFile(testFile, []byte(markdown), 0644) - require.NoError(t, err, "Failed to write test file") - - workflowData, err := compiler.ParseWorkflowFile(testFile) - require.NoError(t, err, "Failed to parse markdown content") - - // Build the safe_outputs job - job, err := compiler.buildCreateOutputIssueJob(workflowData, "main") - require.NoError(t, err, "Failed to build safe_outputs job") - require.NotNil(t, job, "Job should not be nil") - - // Convert steps to string for easier assertion - stepsStr := strings.Join(job.Steps, "") - - // Verify repositories are included in the minting step using block scalar format - assert.Contains(t, stepsStr, "repositories: |-", "Should use block scalar format for multiple repositories") - assert.Contains(t, stepsStr, "repo1", "Should include first repository") - assert.Contains(t, stepsStr, "repo2", "Should include second repository") -} - // TestSafeOutputsAppWithoutSafeOutputs tests that app without safe outputs doesn't break func TestSafeOutputsAppWithoutSafeOutputs(t *testing.T) { compiler := NewCompilerWithVersion("1.0.0") @@ -209,57 +111,6 @@ Test workflow without safe outputs. assert.Nil(t, workflowData.SafeOutputs, "SafeOutputs should be nil") } -// TestSafeOutputsAppTokenOrgWide tests org-wide GitHub App token with wildcard -func TestSafeOutputsAppTokenOrgWide(t *testing.T) { - compiler := NewCompilerWithVersion("1.0.0") - - markdown := `--- -on: issues -permissions: - contents: read -safe-outputs: - create-issue: - app: - app-id: ${{ vars.APP_ID }} - private-key: ${{ secrets.APP_PRIVATE_KEY }} - repositories: - - "*" ---- - -# Test Workflow - -Test workflow with org-wide app token. -` - - // Create a temporary test file - tmpDir := t.TempDir() - testFile := filepath.Join(tmpDir, "test.md") - err := os.WriteFile(testFile, []byte(markdown), 0644) - require.NoError(t, err, "Failed to write test file") - - workflowData, err := compiler.ParseWorkflowFile(testFile) - require.NoError(t, err, "Failed to parse markdown content") - - // Build the safe_outputs job - job, err := compiler.buildCreateOutputIssueJob(workflowData, "main") - require.NoError(t, err, "Failed to build safe_outputs job") - require.NotNil(t, job, "Job should not be nil") - - // Convert steps to string for easier assertion - stepsStr := strings.Join(job.Steps, "") - - // Verify token minting step is present - assert.Contains(t, stepsStr, "Generate GitHub App token", "Token minting step should be present") - assert.Contains(t, stepsStr, "actions/create-github-app-token", "Should use create-github-app-token action") - - // Verify repositories field is NOT present (org-wide access) - assert.NotContains(t, stepsStr, "repositories:", "Should not include repositories field for org-wide access") - - // Verify other fields are still present - assert.Contains(t, stepsStr, "owner:", "Should include owner field") - assert.Contains(t, stepsStr, "app-id:", "Should include app-id field") -} - // TestSafeOutputsAppTokenDiscussionsPermission tests that discussions permission is included func TestSafeOutputsAppTokenDiscussionsPermission(t *testing.T) { compiler := NewCompilerWithVersion("1.0.0") diff --git a/pkg/workflow/safe_outputs_env_integration_test.go b/pkg/workflow/safe_outputs_env_integration_test.go deleted file mode 100644 index da3ee53864..0000000000 --- a/pkg/workflow/safe_outputs_env_integration_test.go +++ /dev/null @@ -1,296 +0,0 @@ -//go:build integration - -package workflow - -import ( - "strings" - "testing" - - "github.com/github/gh-aw/pkg/parser" -) - -// parseWorkflowFromContent is a helper function to parse workflow content for testing -func parseWorkflowFromContent(t *testing.T, content string, filename string) *WorkflowData { - t.Helper() - - result, err := parser.ExtractFrontmatterFromContent(content) - if err != nil { - t.Fatalf("Failed to extract frontmatter: %v", err) - } - - compiler := NewCompiler() - safeOutputs := compiler.extractSafeOutputsConfig(result.Frontmatter) - topTools := extractToolsFromFrontmatter(result.Frontmatter) - - workflowData := &WorkflowData{ - Name: filename, - FrontmatterName: extractStringFromMap(result.Frontmatter, "name", nil), - SafeOutputs: safeOutputs, - Tools: topTools, - } - - return workflowData -} - -func TestSafeOutputsEnvIntegration(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - expectedEnvVars []string - expectedSafeOutput string - }{ - { - name: "Create issue job with custom env vars", - frontmatter: map[string]any{ - "name": "Test Workflow", - "on": "push", - "safe-outputs": map[string]any{ - "create-issue": nil, - "env": map[string]any{ - "GITHUB_TOKEN": "${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE": "true", - }, - }, - }, - expectedEnvVars: []string{ - "GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE: true", - }, - expectedSafeOutput: "create-issue", - }, - { - name: "Create pull request job with custom env vars", - frontmatter: map[string]any{ - "name": "Test Workflow", - "on": "push", - "safe-outputs": map[string]any{ - "create-pull-request": nil, - "env": map[string]any{ - "CUSTOM_API_KEY": "${{ secrets.CUSTOM_API_KEY }}", - "ENVIRONMENT": "production", - }, - }, - }, - expectedEnvVars: []string{ - "CUSTOM_API_KEY: ${{ secrets.CUSTOM_API_KEY }}", - "ENVIRONMENT: production", - }, - expectedSafeOutput: "create-pull-request", - }, - { - name: "Add issue comment job with custom env vars", - frontmatter: map[string]any{ - "name": "Test Workflow", - "on": "issues", - "safe-outputs": map[string]any{ - "add-comment": nil, - "env": map[string]any{ - "NOTIFICATION_URL": "${{ secrets.WEBHOOK_URL }}", - "COMMENT_TEMPLATE": "template-v2", - }, - }, - }, - expectedEnvVars: []string{ - "NOTIFICATION_URL: ${{ secrets.WEBHOOK_URL }}", - "COMMENT_TEMPLATE: template-v2", - }, - expectedSafeOutput: "add-comment", - }, - { - name: "Multiple safe outputs with shared env vars", - frontmatter: map[string]any{ - "name": "Test Workflow", - "on": "push", - "safe-outputs": map[string]any{ - "create-issue": nil, - "create-pull-request": nil, - "env": map[string]any{ - "SHARED_TOKEN": "${{ secrets.SHARED_TOKEN }}", - "WORKFLOW_ID": "multi-output-test", - }, - }, - }, - expectedEnvVars: []string{ - "SHARED_TOKEN: ${{ secrets.SHARED_TOKEN }}", - "WORKFLOW_ID: multi-output-test", - }, - expectedSafeOutput: "create-issue,create-pull-request", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - compiler := NewCompiler() - - // Extract the safe outputs configuration - config := compiler.extractSafeOutputsConfig(tt.frontmatter) - if config == nil { - t.Fatal("Expected SafeOutputsConfig to be parsed") - } - - // Verify env configuration is parsed correctly - if config.Env == nil { - t.Fatal("Expected Env to be parsed") - } - - // Build workflow data - data := &WorkflowData{ - Name: "Test", - FrontmatterName: "Test Workflow", - SafeOutputs: config, - } - - // Test job generation for each safe output type - if strings.Contains(tt.expectedSafeOutput, "create-issue") { - job, err := compiler.buildCreateOutputIssueJob(data, "main_job") - if err != nil { - t.Errorf("Error building create issue job: %v", err) - } - - assertEnvVarsInSteps(t, job.Steps, tt.expectedEnvVars) - } - - if strings.Contains(tt.expectedSafeOutput, "create-pull-request") { - job, err := compiler.buildCreateOutputPullRequestJob(data, "main_job") - if err != nil { - t.Errorf("Error building create pull request job: %v", err) - } - - assertEnvVarsInSteps(t, job.Steps, tt.expectedEnvVars) - } - - if strings.Contains(tt.expectedSafeOutput, "add-comment") { - job, err := compiler.buildCreateOutputAddCommentJob(data, "main_job", "", "", "") - if err != nil { - t.Errorf("Error building add issue comment job: %v", err) - } - - assertEnvVarsInSteps(t, job.Steps, tt.expectedEnvVars) - } - }) - } -} - -func TestSafeOutputsEnvFullWorkflowCompilation(t *testing.T) { - workflowContent := `--- -name: Test Environment Variables -on: push -safe-outputs: - create-issue: - title-prefix: "[env-test] " - labels: ["automated", "env-test"] - env: - GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }} - DEBUG_MODE: "true" - CUSTOM_API_KEY: ${{ secrets.CUSTOM_API_KEY }} ---- - -# Environment Variables Test Workflow - -This workflow tests that custom environment variables are properly passed through -to safe output jobs. - -Create an issue with test results. -` - - workflowData := parseWorkflowFromContent(t, workflowContent, "test-env-workflow.md") - - // Verify the SafeOutputs configuration includes our environment variables - if workflowData.SafeOutputs == nil { - t.Fatal("Expected SafeOutputs to be parsed") - } - - if workflowData.SafeOutputs.Env == nil { - t.Fatal("Expected Env to be parsed") - } - - expectedEnvVars := map[string]string{ - "GITHUB_TOKEN": "${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE": "true", - "CUSTOM_API_KEY": "${{ secrets.CUSTOM_API_KEY }}", - } - - for key, expectedValue := range expectedEnvVars { - if actualValue, exists := workflowData.SafeOutputs.Env[key]; !exists { - t.Errorf("Expected env key %s to exist", key) - } else if actualValue != expectedValue { - t.Errorf("Expected env[%s] to be %q, got %q", key, expectedValue, actualValue) - } - } - - // Build the create issue job and verify it includes our environment variables - compiler := NewCompiler() - job, err := compiler.buildCreateOutputIssueJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Failed to build create issue job: %v", err) - } - - jobYAML := strings.Join(job.Steps, "") - - expectedEnvLines := []string{ - "GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE: true", - "CUSTOM_API_KEY: ${{ secrets.CUSTOM_API_KEY }}", - } - - for _, expectedEnvLine := range expectedEnvLines { - if !strings.Contains(jobYAML, expectedEnvLine) { - t.Errorf("Expected environment variable %q not found in job YAML", expectedEnvLine) - } - } - - // Verify issue configuration is present - if !strings.Contains(jobYAML, "GH_AW_ISSUE_TITLE_PREFIX: \"[env-test] \"") { - t.Error("Expected issue title prefix not found in job YAML") - } - - if !strings.Contains(jobYAML, "GH_AW_ISSUE_LABELS: \"automated,env-test\"") { - t.Error("Expected issue labels not found in job YAML") - } -} - -func TestSafeOutputsEnvWithStagedMode(t *testing.T) { - workflowContent := `--- -name: Test Environment Variables with Staged Mode -on: push -safe-outputs: - create-issue: - env: - GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }} - DEBUG_MODE: "true" - staged: true ---- - -# Environment Variables with Staged Mode Test - -This workflow tests that custom environment variables work with staged mode. -` - - workflowData := parseWorkflowFromContent(t, workflowContent, "test-env-staged-workflow.md") - - // Verify staged mode is enabled - if !workflowData.SafeOutputs.Staged { - t.Error("Expected staged mode to be enabled") - } - - // Build the create issue job and verify it includes our environment variables and staged flag - compiler := NewCompiler() - job, err := compiler.buildCreateOutputIssueJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Failed to build create issue job: %v", err) - } - - jobYAML := strings.Join(job.Steps, "") - - expectedEnvVars := []string{ - "GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE: true", - } - - assertEnvVarsInSteps(t, job.Steps, expectedEnvVars) - - // Verify staged mode is enabled - if !strings.Contains(jobYAML, "GH_AW_SAFE_OUTPUTS_STAGED: \"true\"") { - t.Error("Expected staged mode flag not found in job YAML") - } -} diff --git a/pkg/workflow/safe_outputs_env_test.go b/pkg/workflow/safe_outputs_env_test.go deleted file mode 100644 index b16926aca3..0000000000 --- a/pkg/workflow/safe_outputs_env_test.go +++ /dev/null @@ -1,196 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" -) - -func TestSafeOutputsEnvConfiguration(t *testing.T) { - compiler := NewCompiler() - - t.Run("Should parse env configuration in safe-outputs", func(t *testing.T) { - frontmatter := map[string]any{ - "name": "Test Workflow", - "safe-outputs": map[string]any{ - "create-issue": nil, - "env": map[string]any{ - "GITHUB_TOKEN": "${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "CUSTOM_API_KEY": "${{ secrets.CUSTOM_API_KEY }}", - "DEBUG_MODE": "true", - }, - }, - } - - config := compiler.extractSafeOutputsConfig(frontmatter) - if config == nil { - t.Fatal("Expected SafeOutputsConfig to be parsed") - } - - if config.Env == nil { - t.Fatal("Expected Env to be parsed") - } - - expected := map[string]string{ - "GITHUB_TOKEN": "${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "CUSTOM_API_KEY": "${{ secrets.CUSTOM_API_KEY }}", - "DEBUG_MODE": "true", - } - - for key, expectedValue := range expected { - if actualValue, exists := config.Env[key]; !exists { - t.Errorf("Expected env key %s to exist", key) - } else if actualValue != expectedValue { - t.Errorf("Expected env[%s] to be %q, got %q", key, expectedValue, actualValue) - } - } - }) - - t.Run("Should include custom env vars in create-issue job", func(t *testing.T) { - data := &WorkflowData{ - Name: "Test", - FrontmatterName: "Test Workflow", - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{BaseSafeOutputConfig: BaseSafeOutputConfig{Max: strPtr("1")}}, - Env: map[string]string{ - "GITHUB_TOKEN": "${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE": "true", - }, - }, - } - - job, err := compiler.buildCreateOutputIssueJob(data, "main_job") - if err != nil { - t.Fatalf("Failed to build create issue job: %v", err) - } - - expectedEnvVars := []string{ - "GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "DEBUG_MODE: true", - } - assertEnvVarsInSteps(t, job.Steps, expectedEnvVars) - }) - - t.Run("Should include custom env vars in create-pull-request job", func(t *testing.T) { - data := &WorkflowData{ - Name: "Test", - FrontmatterName: "Test Workflow", - SafeOutputs: &SafeOutputsConfig{ - CreatePullRequests: &CreatePullRequestsConfig{BaseSafeOutputConfig: BaseSafeOutputConfig{Max: strPtr("1")}}, - Env: map[string]string{ - "GITHUB_TOKEN": "${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "API_ENDPOINT": "https://api.example.com", - }, - }, - } - - job, err := compiler.buildCreateOutputPullRequestJob(data, "main_job") - if err != nil { - t.Fatalf("Failed to build create pull request job: %v", err) - } - - expectedEnvVars := []string{ - "GITHUB_TOKEN: ${{ secrets.SOME_PAT_FOR_AGENTIC_WORKFLOWS }}", - "API_ENDPOINT: https://api.example.com", - } - assertEnvVarsInSteps(t, job.Steps, expectedEnvVars) - }) - - t.Run("Should work without env configuration", func(t *testing.T) { - frontmatter := map[string]any{ - "name": "Test Workflow", - "safe-outputs": map[string]any{ - "create-issue": nil, - }, - } - - config := compiler.extractSafeOutputsConfig(frontmatter) - if config == nil { - t.Fatal("Expected SafeOutputsConfig to be parsed") - } - - // Env should be nil when not specified - if config.Env != nil { - t.Error("Expected Env to be nil when not configured") - } - - // Job creation should still work - data := &WorkflowData{ - Name: "Test", - FrontmatterName: "Test Workflow", - SafeOutputs: config, - } - - _, err := compiler.buildCreateOutputIssueJob(data, "main_job") - if err != nil { - t.Errorf("Job creation should work without env configuration: %v", err) - } - }) - - t.Run("Should handle empty env configuration", func(t *testing.T) { - frontmatter := map[string]any{ - "name": "Test Workflow", - "safe-outputs": map[string]any{ - "create-issue": nil, - "env": map[string]any{}, - }, - } - - config := compiler.extractSafeOutputsConfig(frontmatter) - if config == nil { - t.Fatal("Expected SafeOutputsConfig to be parsed") - } - - if config.Env == nil { - t.Error("Expected Env to be empty map, not nil") - } - - if len(config.Env) != 0 { - t.Errorf("Expected Env to be empty, got %d entries", len(config.Env)) - } - }) - - t.Run("Should handle non-string env values gracefully", func(t *testing.T) { - frontmatter := map[string]any{ - "name": "Test Workflow", - "safe-outputs": map[string]any{ - "create-issue": nil, - "env": map[string]any{ - "STRING_VALUE": "valid", - "INT_VALUE": 123, // should be ignored - "BOOL_VALUE": true, // should be ignored - "NULL_VALUE": nil, // should be ignored - }, - }, - } - - config := compiler.extractSafeOutputsConfig(frontmatter) - if config == nil { - t.Fatal("Expected SafeOutputsConfig to be parsed") - } - - if config.Env == nil { - t.Fatal("Expected Env to be parsed") - } - - // Only string values should be included - if len(config.Env) != 1 { - t.Errorf("Expected only 1 env var (string values only), got %d", len(config.Env)) - } - - if config.Env["STRING_VALUE"] != "valid" { - t.Error("Expected STRING_VALUE to be preserved") - } - - // Non-string values should be ignored - if _, exists := config.Env["INT_VALUE"]; exists { - t.Error("Expected INT_VALUE to be ignored") - } - if _, exists := config.Env["BOOL_VALUE"]; exists { - t.Error("Expected BOOL_VALUE to be ignored") - } - if _, exists := config.Env["NULL_VALUE"]; exists { - t.Error("Expected NULL_VALUE to be ignored") - } - }) -} diff --git a/pkg/workflow/safe_outputs_integration_test.go b/pkg/workflow/safe_outputs_integration_test.go index e672d9b069..ac15d85231 100644 --- a/pkg/workflow/safe_outputs_integration_test.go +++ b/pkg/workflow/safe_outputs_integration_test.go @@ -7,252 +7,6 @@ import ( "testing" ) -// TestSafeOutputJobsIntegration tests that all safe output job types that have individual -// job builders can be built with proper environment configuration, including the critical -// GH_AW_WORKFLOW_ID variable. This prevents regressions where required environment variables -// are missing from compiled workflows. -func TestSafeOutputJobsIntegration(t *testing.T) { - tests := []struct { - name string - safeOutputType string - configBuilder func() *SafeOutputsConfig - requiredEnvVar string // The critical env var to check (usually GH_AW_WORKFLOW_ID) - jobBuilder func(*Compiler, *WorkflowData, string) (*Job, error) - }{ - { - name: "create_pull_request", - safeOutputType: "create-pull-request", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreatePullRequests: &CreatePullRequestsConfig{ - TitlePrefix: "[Test] ", - Labels: []string{"test"}, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputPullRequestJob(data, mainJobName) - }, - }, - { - name: "create_issue", - safeOutputType: "create-issue", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{ - TitlePrefix: "[Test] ", - Labels: []string{"test"}, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputIssueJob(data, mainJobName) - }, - }, - { - name: "create_discussion", - safeOutputType: "create-discussion", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreateDiscussions: &CreateDiscussionsConfig{ - TitlePrefix: "[Test] ", - Category: "general", - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputDiscussionJob(data, mainJobName, "") - }, - }, - { - name: "add_comment", - safeOutputType: "add-comment", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - AddComments: &AddCommentsConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("5"), - }, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputAddCommentJob(data, mainJobName, "", "", "") - }, - }, - { - name: "add_labels", - safeOutputType: "add-labels", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - AddLabels: &AddLabelsConfig{ - Allowed: []string{"test", "automated"}, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildAddLabelsJob(data, mainJobName) - }, - }, - { - name: "missing_tool", - safeOutputType: "missing-tool", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - MissingTool: &MissingToolConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("10"), - }, - }, - } - }, - requiredEnvVar: "GH_AW_MISSING_TOOL_MAX", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputMissingToolJob(data, mainJobName) - }, - }, - { - name: "create_pr_review_comment", - safeOutputType: "create-pr-review-comment", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreatePullRequestReviewComments: &CreatePullRequestReviewCommentsConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("10"), - }, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputPullRequestReviewCommentJob(data, mainJobName) - }, - }, - { - name: "create_code_scanning_alert", - safeOutputType: "create-code-scanning-alert", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreateCodeScanningAlerts: &CreateCodeScanningAlertsConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("10"), - }, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputCodeScanningAlertJob(data, mainJobName, "test-workflow.md") - }, - }, - { - name: "create_agent_session", - safeOutputType: "create-agent-session", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreateAgentSessions: &CreateAgentSessionConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("5"), - }, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputAgentSessionJob(data, mainJobName) - }, - }, - { - name: "upload_assets", - safeOutputType: "upload-assets", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - UploadAssets: &UploadAssetsConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("10"), - }, - }, - } - }, - requiredEnvVar: "GH_AW_WORKFLOW_ID", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildUploadAssetsJob(data, mainJobName, false) - }, - }, - } - - // Known issue: Individual job builders are missing GH_AW_WORKFLOW_ID - // These job builders need to be fixed to include the environment variable - // Tracked in: https://github.com/github/gh-aw/issues/7023 - knownMissingEnvVar := map[string]bool{ - "create_issue": true, - "create_discussion": true, - "add_comment": true, - "add_labels": true, - "create_pr_review_comment": true, - "create_code_scanning_alert": true, - "create_agent_session": true, - "upload_assets": true, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Skip tests for job builders with known missing GH_AW_WORKFLOW_ID - if knownMissingEnvVar[tt.name] && tt.requiredEnvVar == "GH_AW_WORKFLOW_ID" { - t.Skip("Known issue: GH_AW_WORKFLOW_ID missing from this job builder. Remove this skip when fixed.") - } - - // Create compiler instance - c := NewCompiler() - - // Build workflow data with the specific safe output configuration - workflowData := &WorkflowData{ - Name: "test-workflow", - Source: "test-source", - SafeOutputs: tt.configBuilder(), - } - - // Build the job - job, err := tt.jobBuilder(c, workflowData, "main_job") - if err != nil { - t.Fatalf("Failed to build %s job: %v", tt.name, err) - } - - if job == nil { - t.Fatalf("Job should not be nil for %s", tt.name) - } - - // Verify the job has steps - if len(job.Steps) == 0 { - t.Fatalf("Job should have at least one step for %s", tt.name) - } - - // Convert steps to string for checking environment variables - stepsContent := strings.Join(job.Steps, "") - - // Verify the required environment variable is present - if !strings.Contains(stepsContent, tt.requiredEnvVar) { - t.Errorf("Required environment variable %s not found in %s job steps.\nJob steps:\n%s", - tt.requiredEnvVar, tt.name, stepsContent) - } - - // Log success for debugging - t.Logf("✓ %s job built successfully with required env var %s", tt.name, tt.requiredEnvVar) - }) - } -} - -// TestConsolidatedSafeOutputsJobIntegration tests the consolidated safe outputs job -// which combines multiple safe output operations into a single job with multiple steps. -// Many safe output types (noop, push_to_pull_request_branch, update_issue, update_pull_request, -// update_discussion, close_issue, close_pull_request, close_discussion, add_reviewer, assign_milestone, -// assign_to_agent, assign_to_user, hide_comment, update_release) are built as steps within -// the consolidated job rather than as individual jobs. func TestConsolidatedSafeOutputsJobIntegration(t *testing.T) { tests := []struct { name string @@ -593,183 +347,6 @@ func TestConsolidatedSafeOutputsJobIntegration(t *testing.T) { } } -// TestSafeOutputJobsWithCustomEnvVars tests that custom environment variables -// from safe-outputs.env are properly propagated to all safe output job types. -func TestSafeOutputJobsWithCustomEnvVars(t *testing.T) { - tests := []struct { - name string - safeOutputType string - configBuilder func() *SafeOutputsConfig - customEnvVars map[string]string - jobBuilder func(*Compiler, *WorkflowData, string) (*Job, error) - }{ - { - name: "create_issue_with_custom_env", - safeOutputType: "create-issue", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{ - TitlePrefix: "[Test] ", - }, - Env: map[string]string{ - "CUSTOM_VAR": "custom_value", - "GITHUB_TOKEN": "${{ secrets.CUSTOM_PAT }}", - }, - } - }, - customEnvVars: map[string]string{ - "CUSTOM_VAR": "CUSTOM_VAR: custom_value", - "GITHUB_TOKEN": "GITHUB_TOKEN: ${{ secrets.CUSTOM_PAT }}", - }, - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputIssueJob(data, mainJobName) - }, - }, - { - name: "create_pull_request_with_custom_env", - safeOutputType: "create-pull-request", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - CreatePullRequests: &CreatePullRequestsConfig{ - TitlePrefix: "[Test] ", - }, - Env: map[string]string{ - "DEBUG_MODE": "true", - "API_KEY": "${{ secrets.API_KEY }}", - }, - } - }, - customEnvVars: map[string]string{ - "DEBUG_MODE": "DEBUG_MODE: true", - "API_KEY": "API_KEY: ${{ secrets.API_KEY }}", - }, - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputPullRequestJob(data, mainJobName) - }, - }, - { - name: "add_comment_with_custom_env", - safeOutputType: "add-comment", - configBuilder: func() *SafeOutputsConfig { - return &SafeOutputsConfig{ - AddComments: &AddCommentsConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{ - Max: strPtr("5"), - }, - }, - Env: map[string]string{ - "NOTIFICATION_URL": "${{ secrets.WEBHOOK_URL }}", - "ENVIRONMENT": "production", - }, - } - }, - customEnvVars: map[string]string{ - "NOTIFICATION_URL": "NOTIFICATION_URL: ${{ secrets.WEBHOOK_URL }}", - "ENVIRONMENT": "ENVIRONMENT: production", - }, - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - return c.buildCreateOutputAddCommentJob(data, mainJobName, "", "", "") - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create compiler instance - c := NewCompiler() - - // Build workflow data with custom env vars - workflowData := &WorkflowData{ - Name: "test-workflow", - Source: "test-source", - SafeOutputs: tt.configBuilder(), - } - - // Build the job - job, err := tt.jobBuilder(c, workflowData, "main_job") - if err != nil { - t.Fatalf("Failed to build %s job: %v", tt.name, err) - } - - // Convert steps to string for checking environment variables - stepsContent := strings.Join(job.Steps, "") - - // Verify all custom environment variables are present - for envVarName, expectedContent := range tt.customEnvVars { - if !strings.Contains(stepsContent, expectedContent) { - t.Errorf("Custom environment variable %s not found in %s job.\nExpected: %s\nJob steps:\n%s", - envVarName, tt.name, expectedContent, stepsContent) - } - } - - t.Logf("✓ %s job includes all custom environment variables", tt.name) - }) - } -} - -// TestSafeOutputJobsMissingConfig tests that jobs fail gracefully when required configuration is missing -func TestSafeOutputJobsMissingConfig(t *testing.T) { - tests := []struct { - name string - jobBuilder func(*Compiler, *WorkflowData, string) (*Job, error) - shouldFail bool - }{ - { - name: "missing_tool_without_config", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - // Set SafeOutputs to nil to trigger validation error - data.SafeOutputs = nil - return c.buildCreateOutputMissingToolJob(data, mainJobName) - }, - shouldFail: true, - }, - { - name: "create_issue_without_config", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - // Set SafeOutputs to nil - data.SafeOutputs = nil - return c.buildCreateOutputIssueJob(data, mainJobName) - }, - shouldFail: true, - }, - { - name: "add_labels_without_config", - jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) { - // Set SafeOutputs to nil - data.SafeOutputs = nil - return c.buildAddLabelsJob(data, mainJobName) - }, - shouldFail: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := NewCompiler() - workflowData := &WorkflowData{ - Name: "test-workflow", - Source: "test-source", - } - - job, err := tt.jobBuilder(c, workflowData, "main_job") - - if tt.shouldFail { - if err == nil { - t.Errorf("Expected error for %s, but got none. Job: %v", tt.name, job) - } else { - t.Logf("✓ %s correctly failed with error: %v", tt.name, err) - } - } else { - if err != nil { - t.Errorf("Expected no error for %s, but got: %v", tt.name, err) - } - } - }) - } -} - -// TestConsolidatedSafeOutputsJobWithCustomEnv tests that custom environment variables -// are properly included in the consolidated safe outputs job. func TestConsolidatedSafeOutputsJobWithCustomEnv(t *testing.T) { c := NewCompiler() diff --git a/pkg/workflow/safe_outputs_messages_test.go b/pkg/workflow/safe_outputs_messages_test.go index 73a4e1472c..8f3754077f 100644 --- a/pkg/workflow/safe_outputs_messages_test.go +++ b/pkg/workflow/safe_outputs_messages_test.go @@ -193,55 +193,3 @@ func TestSerializeMessagesConfig(t *testing.T) { } }) } - -func TestMessagesEnvVarInSafeOutputJobs(t *testing.T) { - compiler := NewCompiler() - - t.Run("Should include GH_AW_SAFE_OUTPUT_MESSAGES env var when messages configured", func(t *testing.T) { - data := &WorkflowData{ - Name: "Test", - FrontmatterName: "Test Workflow", - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{BaseSafeOutputConfig: BaseSafeOutputConfig{Max: strPtr("1")}}, - Messages: &SafeOutputMessagesConfig{ - Footer: "> Custom footer [{workflow_name}]({run_url})", - }, - }, - } - - job, err := compiler.buildCreateOutputIssueJob(data, "main_job") - if err != nil { - t.Fatalf("Failed to build create issue job: %v", err) - } - - stepsStr := strings.Join(job.Steps, "") - if !strings.Contains(stepsStr, "GH_AW_SAFE_OUTPUT_MESSAGES:") { - t.Error("Expected GH_AW_SAFE_OUTPUT_MESSAGES to be included in job steps") - } - - // Verify it contains the serialized footer - if !strings.Contains(stepsStr, "Custom footer") { - t.Error("Expected serialized messages to contain the custom footer text") - } - }) - - t.Run("Should not include GH_AW_SAFE_OUTPUT_MESSAGES when messages not configured", func(t *testing.T) { - data := &WorkflowData{ - Name: "Test", - FrontmatterName: "Test Workflow", - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{BaseSafeOutputConfig: BaseSafeOutputConfig{Max: strPtr("1")}}, - }, - } - - job, err := compiler.buildCreateOutputIssueJob(data, "main_job") - if err != nil { - t.Fatalf("Failed to build create issue job: %v", err) - } - - stepsStr := strings.Join(job.Steps, "") - if strings.Contains(stepsStr, "GH_AW_SAFE_OUTPUT_MESSAGES:") { - t.Error("Expected GH_AW_SAFE_OUTPUT_MESSAGES to NOT be included when messages not configured") - } - }) -} diff --git a/pkg/workflow/schemas/mcp-gateway-config.schema.json b/pkg/workflow/schemas/mcp-gateway-config.schema.json index c5bc9bbda8..5fc0eae654 100644 --- a/pkg/workflow/schemas/mcp-gateway-config.schema.json +++ b/pkg/workflow/schemas/mcp-gateway-config.schema.json @@ -93,6 +93,11 @@ "type": "string" }, "default": ["*"] + }, + "guard-policies": { + "type": "object", + "description": "Guard policies for access control at the MCP gateway level. The structure of guard policies is server-specific. For GitHub MCP server, see the GitHub guard policy schema. For other servers (Jira, WorkIQ), different policy schemas will apply.", + "additionalProperties": true } }, "required": ["container"], @@ -137,6 +142,11 @@ "type": "string" }, "default": {} + }, + "guard-policies": { + "type": "object", + "description": "Guard policies for access control at the MCP gateway level. The structure of guard policies is server-specific. For GitHub MCP server, see the GitHub guard policy schema. For other servers (Jira, WorkIQ), different policy schemas will apply.", + "additionalProperties": true } }, "required": ["type", "url"], diff --git a/pkg/workflow/script_registry.go b/pkg/workflow/script_registry.go index 602d9a813a..bb0c6cc026 100644 --- a/pkg/workflow/script_registry.go +++ b/pkg/workflow/script_registry.go @@ -1,60 +1,6 @@ -// This file provides a ScriptRegistry for managing JavaScript script bundling. -// -// # Script Registry Pattern -// -// The ScriptRegistry eliminates the repetitive sync.Once pattern found throughout -// the codebase for lazy script bundling. Instead of declaring separate variables -// and getter functions for each script, register scripts once and retrieve them -// by name with runtime mode verification. -// -// # Before (repetitive pattern): -// -// var ( -// createIssueScript string -// createIssueScriptOnce sync.Once -// ) -// -// func getCreateIssueScript() string { -// createIssueScriptOnce.Do(func() { -// sources := GetJavaScriptSources() -// bundled, err := BundleJavaScriptFromSources(createIssueScriptSource, sources, "") -// if err != nil { -// createIssueScript = createIssueScriptSource -// } else { -// createIssueScript = bundled -// } -// }) -// return createIssueScript -// } -// -// # After (using registry with runtime mode verification): -// -// // Registration at package init -// DefaultScriptRegistry.RegisterWithMode("create_issue", createIssueScriptSource, RuntimeModeGitHubScript) -// -// // Usage anywhere with mode verification -// script := DefaultScriptRegistry.GetWithMode("create_issue", RuntimeModeGitHubScript) -// -// # Benefits -// -// - Eliminates ~15 lines of boilerplate per script (variable pair + getter function) -// - Centralizes bundling logic -// - Consistent error handling -// - Thread-safe lazy initialization -// - Easy to add new scripts -// - Runtime mode verification prevents mismatches between registration and usage -// -// # Runtime Mode Verification -// -// The GetWithMode() method verifies that the requested runtime mode matches the mode -// the script was registered with. This catches configuration errors at compile time -// rather than at runtime. If there's a mismatch, a warning is logged but the script -// is still returned to avoid breaking workflows. - package workflow import ( - "fmt" "strings" "sync" @@ -63,26 +9,12 @@ import ( var registryLog = logger.New("workflow:script_registry") -// scriptEntry holds the source and bundled versions of a script +// scriptEntry holds metadata about a registered script. type scriptEntry struct { - source string - bundled string - mode RuntimeMode // Runtime mode for bundling - actionPath string // Optional path to custom action (e.g., "./actions/create-issue") - once sync.Once + actionPath string // Optional path to custom action (e.g., "./actions/create-issue") } -// ScriptRegistry manages lazy bundling of JavaScript scripts. -// It provides a centralized place to register source scripts and retrieve -// bundled versions on-demand with caching. -// -// Thread-safe: All operations use internal synchronization. -// -// Usage: -// -// registry := NewScriptRegistry() -// registry.Register("my_script", myScriptSource) -// bundled := registry.Get("my_script") +// ScriptRegistry manages script metadata and custom action paths. type ScriptRegistry struct { mu sync.RWMutex scripts map[string]*scriptEntry @@ -96,109 +28,6 @@ func NewScriptRegistry() *ScriptRegistry { } } -// Register adds a script source to the registry. -// The script will be bundled lazily on first access via Get(). -// Scripts registered this way default to RuntimeModeGitHubScript. -// -// Parameters: -// - name: Unique identifier for the script (e.g., "create_issue", "add_comment") -// - source: The raw JavaScript source code (typically from go:embed) -// -// If a script with the same name already exists, it will be overwritten. -// This is useful for testing but should be avoided in production. -// -// Returns an error if validation fails. -func (r *ScriptRegistry) Register(name string, source string) error { - return r.RegisterWithMode(name, source, RuntimeModeGitHubScript) -} - -// RegisterWithMode adds a script source to the registry with a specific runtime mode. -// The script will be bundled lazily on first access via Get(). -// Performs compile-time validation to ensure the script follows runtime mode conventions. -// -// Parameters: -// - name: Unique identifier for the script (e.g., "create_issue", "add_comment") -// - source: The raw JavaScript source code (typically from go:embed) -// - mode: Runtime mode for bundling (GitHub Script or Node.js) -// -// If a script with the same name already exists, it will be overwritten. -// This is useful for testing but should be avoided in production. -// -// Compile-time validations: -// - GitHub Script mode: validates no execSync usage (should use exec instead) -// - Node.js mode: validates no GitHub Actions globals (core.*, exec.*, github.*) -// -// Returns an error if validation fails, allowing the caller to handle gracefully -// instead of crashing the process. -func (r *ScriptRegistry) RegisterWithMode(name string, source string, mode RuntimeMode) error { - r.mu.Lock() - defer r.mu.Unlock() - - if registryLog.Enabled() { - registryLog.Printf("Registering script: %s (%d bytes, mode: %s)", name, len(source), mode) - } - - // Perform compile-time validation based on runtime mode - if err := validateNoExecSync(name, source, mode); err != nil { - return fmt.Errorf("script registration validation failed for %q: %w", name, err) - } - - if err := validateNoGitHubScriptGlobals(name, source, mode); err != nil { - return fmt.Errorf("script registration validation failed for %q: %w", name, err) - } - - r.scripts[name] = &scriptEntry{ - source: source, - mode: mode, - actionPath: "", // No custom action by default - } - - return nil -} - -// RegisterWithAction registers a script with both inline code and a custom action path. -// This allows the compiler to choose between inline mode (using actions/github-script) -// or custom action mode (using the provided action path). -// -// Parameters: -// - name: Unique identifier for the script (e.g., "create_issue") -// - source: The raw JavaScript source code (for inline mode) -// - mode: Runtime mode for bundling (GitHub Script or Node.js) -// - actionPath: Path to custom action (e.g., "./actions/create-issue" for development) -// -// The actionPath should be a relative path from the repository root for development mode. -// In the future, this can be extended to support versioned references like -// "github/gh-aw/.github/actions/create-issue@SHA" for release mode. -// -// Returns an error if validation fails, allowing the caller to handle gracefully -// instead of crashing the process. -func (r *ScriptRegistry) RegisterWithAction(name string, source string, mode RuntimeMode, actionPath string) error { - r.mu.Lock() - defer r.mu.Unlock() - - if registryLog.Enabled() { - registryLog.Printf("Registering script with action: %s (%d bytes, mode: %s, action: %s)", - name, len(source), mode, actionPath) - } - - // Perform compile-time validation based on runtime mode - if err := validateNoExecSync(name, source, mode); err != nil { - return fmt.Errorf("script registration validation failed for %q: %w", name, err) - } - - if err := validateNoGitHubScriptGlobals(name, source, mode); err != nil { - return fmt.Errorf("script registration validation failed for %q: %w", name, err) - } - - r.scripts[name] = &scriptEntry{ - source: source, - mode: mode, - actionPath: actionPath, - } - - return nil -} - // GetActionPath retrieves the custom action path for a script, if registered. // Returns an empty string if the script doesn't have a custom action path. func (r *ScriptRegistry) GetActionPath(name string) string { @@ -218,162 +47,18 @@ func (r *ScriptRegistry) GetActionPath(name string) string { return entry.actionPath } -// Get retrieves a bundled script by name. -// Bundling is performed lazily on first access and cached for subsequent calls. -// -// If bundling fails, the original source is returned as a fallback. -// If the script is not registered, an empty string is returned. -// -// Thread-safe: Multiple goroutines can call Get concurrently. -// -// DEPRECATED: Use GetWithMode instead to specify the expected runtime mode. -// This allows the compiler to verify the runtime mode matches the registered mode. -func (r *ScriptRegistry) Get(name string) string { - r.mu.RLock() - entry, exists := r.scripts[name] - r.mu.RUnlock() - - if !exists { - if registryLog.Enabled() { - registryLog.Printf("Script not found: %s", name) - } - return "" - } - - entry.once.Do(func() { - if registryLog.Enabled() { - registryLog.Printf("Bundling script: %s (mode: %s)", name, entry.mode) - } - - sources := GetJavaScriptSources() - bundled, err := BundleJavaScriptWithMode(entry.source, sources, "", entry.mode) - if err != nil { - registryLog.Printf("Bundling failed for %s, using source as-is: %v", name, err) - entry.bundled = entry.source - } else { - if registryLog.Enabled() { - registryLog.Printf("Successfully bundled %s: %d bytes", name, len(bundled)) - } - entry.bundled = bundled - } - }) - - return entry.bundled -} - -// GetWithMode retrieves a bundled script by name with runtime mode verification. -// Bundling is performed lazily on first access and cached for subsequent calls. -// -// The expectedMode parameter allows the compiler to verify that the registered runtime mode -// matches what the caller expects. If there's a mismatch, a warning is logged but the script -// is still returned to avoid breaking existing workflows. -// -// If bundling fails, the original source is returned as a fallback. -// If the script is not registered, an empty string is returned. -// -// Thread-safe: Multiple goroutines can call GetWithMode concurrently. -func (r *ScriptRegistry) GetWithMode(name string, expectedMode RuntimeMode) string { - r.mu.RLock() - entry, exists := r.scripts[name] - r.mu.RUnlock() - - if !exists { - if registryLog.Enabled() { - registryLog.Printf("Script not found: %s", name) - } - return "" - } - - // Verify the runtime mode matches what the caller expects - if entry.mode != expectedMode { - registryLog.Printf("WARNING: Runtime mode mismatch for script %s: registered as %s but requested as %s", - name, entry.mode, expectedMode) - } - - entry.once.Do(func() { - if registryLog.Enabled() { - registryLog.Printf("Bundling script: %s (mode: %s)", name, entry.mode) - } - - sources := GetJavaScriptSources() - bundled, err := BundleJavaScriptWithMode(entry.source, sources, "", entry.mode) - if err != nil { - registryLog.Printf("Bundling failed for %s, using source as-is: %v", name, err) - entry.bundled = entry.source - } else { - if registryLog.Enabled() { - registryLog.Printf("Successfully bundled %s: %d bytes", name, len(bundled)) - } - entry.bundled = bundled - } - }) - - return entry.bundled -} - -// GetSource retrieves the original (unbundled) source for a script. -// Useful for testing or when bundling is not needed. -func (r *ScriptRegistry) GetSource(name string) string { - r.mu.RLock() - defer r.mu.RUnlock() - - entry, exists := r.scripts[name] - if !exists { - return "" - } - return entry.source -} - -// Has checks if a script is registered in the registry. -func (r *ScriptRegistry) Has(name string) bool { - r.mu.RLock() - defer r.mu.RUnlock() - - _, exists := r.scripts[name] - return exists -} - -// Names returns a list of all registered script names. -// Useful for debugging and testing. -func (r *ScriptRegistry) Names() []string { - r.mu.RLock() - defer r.mu.RUnlock() - - names := make([]string, 0, len(r.scripts)) - for name := range r.scripts { - names = append(names, name) - } - return names -} - // DefaultScriptRegistry is the global script registry used by the workflow package. // Scripts are registered during package initialization via init() functions. var DefaultScriptRegistry = NewScriptRegistry() -// GetScript retrieves a bundled script from the default registry. -// This is a convenience function equivalent to DefaultScriptRegistry.Get(name). -// -// DEPRECATED: Use GetScriptWithMode to specify the expected runtime mode. -func GetScript(name string) string { - return DefaultScriptRegistry.Get(name) -} - -// GetScriptWithMode retrieves a bundled script from the default registry with mode verification. -// This is a convenience function equivalent to DefaultScriptRegistry.GetWithMode(name, mode). -func GetScriptWithMode(name string, mode RuntimeMode) string { - return DefaultScriptRegistry.GetWithMode(name, mode) -} - // GetAllScriptFilenames returns a sorted list of all .cjs filenames from the JavaScript sources. // This is used by the build system to discover which files need to be embedded in custom actions. -// The returned list includes all .cjs files found in pkg/workflow/js/, including dependencies. func GetAllScriptFilenames() []string { registryLog.Print("Getting all script filenames from JavaScript sources") sources := GetJavaScriptSources() filenames := make([]string, 0, len(sources)) for filename := range sources { - // Only include .cjs files (exclude .json and other files) if strings.HasSuffix(filename, ".cjs") { filenames = append(filenames, filename) } @@ -381,10 +66,8 @@ func GetAllScriptFilenames() []string { registryLog.Printf("Found %d .cjs files in JavaScript sources", len(filenames)) - // Sort for consistency sortedFilenames := make([]string, len(filenames)) copy(sortedFilenames, filenames) - // Using a simple sort to avoid importing sort package issues for i := range sortedFilenames { for j := i + 1; j < len(sortedFilenames); j++ { if sortedFilenames[i] > sortedFilenames[j] { diff --git a/pkg/workflow/script_registry_test.go b/pkg/workflow/script_registry_test.go deleted file mode 100644 index 3962d822b5..0000000000 --- a/pkg/workflow/script_registry_test.go +++ /dev/null @@ -1,298 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "sync" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestScriptRegistry_Register(t *testing.T) { - registry := NewScriptRegistry() - - err := registry.Register("test_script", "console.log('hello');") - require.NoError(t, err) - - assert.True(t, registry.Has("test_script"), "registry should have test_script after registration") - assert.False(t, registry.Has("nonexistent"), "registry should not have nonexistent script") -} - -func TestScriptRegistry_Get_NotFound(t *testing.T) { - registry := NewScriptRegistry() - - result := registry.Get("nonexistent") - - assert.Empty(t, result) -} - -func TestScriptRegistry_Get_BundlesOnce(t *testing.T) { - registry := NewScriptRegistry() - - // Register a simple script that doesn't require bundling - source := "console.log('hello');" - err := registry.Register("simple", source) - require.NoError(t, err) - - // Get should bundle and return result - result1 := registry.Get("simple") - result2 := registry.Get("simple") - - // Both calls should return the same result (cached) - assert.Equal(t, result1, result2) - assert.NotEmpty(t, result1) -} - -func TestScriptRegistry_GetSource(t *testing.T) { - registry := NewScriptRegistry() - - source := "const x = 1;" - err := registry.Register("test", source) - require.NoError(t, err) - - // GetSource should return original source - assert.Equal(t, source, registry.GetSource("test")) -} - -func TestScriptRegistry_GetSource_NotFound(t *testing.T) { - registry := NewScriptRegistry() - - result := registry.GetSource("nonexistent") - - assert.Empty(t, result) -} - -func TestScriptRegistry_Names(t *testing.T) { - registry := NewScriptRegistry() - - require.NoError(t, registry.Register("script_a", "a")) - require.NoError(t, registry.Register("script_b", "b")) - require.NoError(t, registry.Register("script_c", "c")) - - names := registry.Names() - - assert.Len(t, names, 3) - assert.Contains(t, names, "script_a") - assert.Contains(t, names, "script_b") - assert.Contains(t, names, "script_c") -} - -func TestScriptRegistry_ConcurrentAccess(t *testing.T) { - registry := NewScriptRegistry() - source := "console.log('concurrent test');" - err := registry.Register("concurrent", source) - require.NoError(t, err) - - // Test concurrent Get calls - var wg sync.WaitGroup - results := make([]string, 10) - - for i := range 10 { - wg.Add(1) - go func(idx int) { - defer wg.Done() - results[idx] = registry.Get("concurrent") - }(i) - } - - wg.Wait() - - // All results should be the same (due to Once semantics) - for i := 1; i < 10; i++ { - assert.Equal(t, results[0], results[i], "concurrent access should return consistent results") - } -} - -func TestScriptRegistry_Overwrite(t *testing.T) { - registry := NewScriptRegistry() - - err := registry.Register("test", "original") - require.NoError(t, err) - assert.Equal(t, "original", registry.GetSource("test")) - - err = registry.Register("test", "updated") - require.NoError(t, err) - assert.Equal(t, "updated", registry.GetSource("test")) -} - -func TestScriptRegistry_Overwrite_AfterGet(t *testing.T) { - registry := NewScriptRegistry() - - // Register initial script - err := registry.Register("test", "console.log('original');") - require.NoError(t, err) - - // Trigger bundling by calling Get() - firstResult := registry.Get("test") - assert.NotEmpty(t, firstResult) - assert.Contains(t, firstResult, "original") - - // Overwrite with new source - err = registry.Register("test", "console.log('updated');") - require.NoError(t, err) - - // Verify GetSource returns new source - assert.Equal(t, "console.log('updated');", registry.GetSource("test")) - - // Verify Get() returns bundled version of new source - secondResult := registry.Get("test") - assert.NotEmpty(t, secondResult) - assert.Contains(t, secondResult, "updated") - assert.NotContains(t, secondResult, "original") -} - -func TestDefaultScriptRegistry_GetScript(t *testing.T) { - // Create a fresh registry for this test to avoid interference - oldRegistry := DefaultScriptRegistry - DefaultScriptRegistry = NewScriptRegistry() - defer func() { DefaultScriptRegistry = oldRegistry }() - - // Register a test script - err := DefaultScriptRegistry.Register("test_global", "global test") - require.NoError(t, err) - - // GetScript should use DefaultScriptRegistry - result := GetScript("test_global") - require.NotEmpty(t, result) -} - -func TestScriptRegistry_Has(t *testing.T) { - registry := NewScriptRegistry() - - assert.False(t, registry.Has("missing"), "registry should not have missing script") - - err := registry.Register("present", "code") - require.NoError(t, err) - - assert.True(t, registry.Has("present"), "registry should have present script after registration") - assert.False(t, registry.Has("still_missing"), "registry should not have still_missing script") -} - -func TestScriptRegistry_RegisterWithMode(t *testing.T) { - // Create a custom registry for testing to avoid side effects - registry := NewScriptRegistry() - - // Test that bundling respects runtime mode - // In GitHub Script mode: module.exports should be removed - // In Node.js mode: module.exports should be preserved - - scriptWithExports := `function test() { - return 42; -} - -module.exports = { test }; -` - - // Register with GitHub Script mode (default) - err := registry.Register("github_mode", scriptWithExports) - require.NoError(t, err) - githubResult := registry.Get("github_mode") - - // Should not contain module.exports in GitHub Script mode - assert.NotContains(t, githubResult, "module.exports", - "GitHub Script mode should remove module.exports") - assert.Contains(t, githubResult, "function test()", - "Should still contain the function") - - // Register with Node.js mode - err = registry.RegisterWithMode("nodejs_mode", scriptWithExports, RuntimeModeNodeJS) - require.NoError(t, err) - nodejsResult := registry.Get("nodejs_mode") - - // Should contain module.exports in Node.js mode - assert.Contains(t, nodejsResult, "module.exports", - "Node.js mode should preserve module.exports") - assert.Contains(t, nodejsResult, "function test()", - "Should still contain the function") -} - -func TestScriptRegistry_RegisterWithMode_PreservesDifference(t *testing.T) { - registry := NewScriptRegistry() - - source := `function helper() { - return "value"; -} - -module.exports = { helper };` - - // Register same source with different modes - err := registry.RegisterWithMode("github_mode", source, RuntimeModeGitHubScript) - require.NoError(t, err) - err = registry.RegisterWithMode("nodejs_mode", source, RuntimeModeNodeJS) - require.NoError(t, err) - - githubResult := registry.Get("github_mode") - nodejsResult := registry.Get("nodejs_mode") - - // GitHub Script mode should remove module.exports - assert.NotContains(t, githubResult, "module.exports", - "GitHub Script mode should remove module.exports") - assert.Contains(t, githubResult, "function helper()", - "Should contain the function in GitHub mode") - - // Node.js mode should preserve module.exports - assert.Contains(t, nodejsResult, "module.exports", - "Node.js mode should preserve module.exports") - assert.Contains(t, nodejsResult, "function helper()", - "Should contain the function in Node.js mode") -} - -func TestScriptRegistry_GetWithMode(t *testing.T) { - registry := NewScriptRegistry() - - source := `function helper() { - return "value"; -} - -module.exports = { helper };` - - // Register with GitHub Script mode - err := registry.RegisterWithMode("test_script", source, RuntimeModeGitHubScript) - require.NoError(t, err) - - // Test GetWithMode with matching mode - should work without warning - result := registry.GetWithMode("test_script", RuntimeModeGitHubScript) - assert.NotEmpty(t, result, "Should return bundled script") - assert.NotContains(t, result, "module.exports", "GitHub Script mode should remove module.exports") - - // Test GetWithMode with mismatched mode - should log warning but still work - result2 := registry.GetWithMode("test_script", RuntimeModeNodeJS) - assert.NotEmpty(t, result2, "Should return bundled script even with mode mismatch") - // The script was bundled with GitHub Script mode, so module.exports should still be removed - assert.NotContains(t, result2, "module.exports", "Script was bundled with GitHub Script mode") -} - -func TestScriptRegistry_GetWithMode_ModeMismatch(t *testing.T) { - registry := NewScriptRegistry() - - source := `function test() { return 42; } -module.exports = { test };` - - // Register with Node.js mode - err := registry.RegisterWithMode("nodejs_script", source, RuntimeModeNodeJS) - require.NoError(t, err) - - // Request with GitHub Script mode - should log warning - result := registry.GetWithMode("nodejs_script", RuntimeModeGitHubScript) - - // Script was bundled with Node.js mode, so module.exports should be preserved - assert.Contains(t, result, "module.exports", "Node.js mode should preserve module.exports") -} - -func TestGetScriptWithMode(t *testing.T) { - // Create a fresh registry for this test - oldRegistry := DefaultScriptRegistry - DefaultScriptRegistry = NewScriptRegistry() - defer func() { DefaultScriptRegistry = oldRegistry }() - - // Register a test script - err := DefaultScriptRegistry.RegisterWithMode("test_helper", "function test() { return 1; }", RuntimeModeGitHubScript) - require.NoError(t, err) - - // Test GetScriptWithMode - result := GetScriptWithMode("test_helper", RuntimeModeGitHubScript) - require.NotEmpty(t, result) - assert.Contains(t, result, "function test()") -} diff --git a/pkg/workflow/setup_action_paths.go b/pkg/workflow/setup_action_paths.go new file mode 100644 index 0000000000..16736ac89d --- /dev/null +++ b/pkg/workflow/setup_action_paths.go @@ -0,0 +1,5 @@ +package workflow + +// SetupActionDestination is the path where the setup action copies script files +// on the agent runner (e.g. /opt/gh-aw/actions). +const SetupActionDestination = "/opt/gh-aw/actions" diff --git a/pkg/workflow/sh.go b/pkg/workflow/sh.go deleted file mode 100644 index b79dd0ce7b..0000000000 --- a/pkg/workflow/sh.go +++ /dev/null @@ -1,152 +0,0 @@ -package workflow - -import ( - _ "embed" - "fmt" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var shLog = logger.New("workflow:sh") - -// Prompt file paths at runtime (copied by setup action) -const ( - promptsDir = "/opt/gh-aw/prompts" - prContextPromptFile = "pr_context_prompt.md" - tempFolderPromptFile = "temp_folder_prompt.md" - playwrightPromptFile = "playwright_prompt.md" - markdownPromptFile = "markdown.md" - xpiaPromptFile = "xpia.md" - cacheMemoryPromptFile = "cache_memory_prompt.md" - cacheMemoryPromptMultiFile = "cache_memory_prompt_multi.md" - repoMemoryPromptFile = "repo_memory_prompt.md" - repoMemoryPromptMultiFile = "repo_memory_prompt_multi.md" - safeOutputsPromptFile = "safe_outputs_prompt.md" - safeOutputsCreatePRFile = "safe_outputs_create_pull_request.md" - safeOutputsPushToBranchFile = "safe_outputs_push_to_pr_branch.md" - safeOutputsAutoCreateIssueFile = "safe_outputs_auto_create_issue.md" -) - -// GitHub context prompt is kept embedded because it contains GitHub Actions expressions -// that need to be extracted at compile time. Moving this to a runtime file would require -// reading and parsing the file during compilation, which is more complex. -// -//go:embed prompts/github_context_prompt.md -var githubContextPromptText string - -// WritePromptFileToYAML writes a shell command to cat a prompt file from /opt/gh-aw/prompts/ -// This replaces the previous approach of embedding prompt text in the binary. -func WritePromptFileToYAML(yaml *strings.Builder, filename string, indent string) { - shLog.Printf("Writing prompt file reference to YAML: file=%s", filename) - promptPath := fmt.Sprintf("%s/%s", promptsDir, filename) - yaml.WriteString(indent + fmt.Sprintf("cat \"%s\" >> \"$GH_AW_PROMPT\"\n", promptPath)) -} - -// WriteShellScriptToYAML writes a shell script with proper indentation to a strings.Builder -func WriteShellScriptToYAML(yaml *strings.Builder, script string, indent string) { - scriptLines := strings.SplitSeq(script, "\n") - for line := range scriptLines { - // Skip empty lines at the beginning or end - if strings.TrimSpace(line) != "" { - fmt.Fprintf(yaml, "%s%s\n", indent, line) - } - } -} - -// WritePromptTextToYAML writes static prompt text to a YAML heredoc with proper indentation. -// Use this function for prompt text that contains NO variable placeholders or expressions. -// It chunks the text into groups of lines of less than MaxPromptChunkSize characters, with a maximum of MaxPromptChunks chunks. -// Each chunk is written as a separate heredoc to avoid GitHub Actions step size limits (21KB). -// -// For prompt text with variable placeholders that need substitution, use WritePromptTextToYAMLWithPlaceholders instead. -func WritePromptTextToYAML(yaml *strings.Builder, text string, indent string) { - shLog.Printf("Writing prompt text to YAML: text_size=%d bytes, chunks=%d", len(text), len(strings.Split(text, "\n"))) - textLines := strings.Split(text, "\n") - chunks := chunkLines(textLines, indent, MaxPromptChunkSize, MaxPromptChunks) - shLog.Printf("Created %d chunks for prompt text", len(chunks)) - - delimiter := GenerateHeredocDelimiter("PROMPT") - // Write each chunk as a separate heredoc - // For static prompt text without variables, use direct cat to file - for _, chunk := range chunks { - yaml.WriteString(indent + "cat << '" + delimiter + "' >> \"$GH_AW_PROMPT\"\n") - for _, line := range chunk { - fmt.Fprintf(yaml, "%s%s\n", indent, line) - } - yaml.WriteString(indent + delimiter + "\n") - } -} - -// WritePromptTextToYAMLWithPlaceholders writes prompt text with variable placeholders to a YAML heredoc with proper indentation. -// Use this function for prompt text containing __VAR__ placeholders that will be substituted with sed commands. -// The caller is responsible for adding the sed substitution commands after calling this function. -// It uses placeholder format (__VAR__) instead of shell variable expansion, to prevent template injection. -// -// For static prompt text without variables, use WritePromptTextToYAML instead. -func WritePromptTextToYAMLWithPlaceholders(yaml *strings.Builder, text string, indent string) { - textLines := strings.Split(text, "\n") - chunks := chunkLines(textLines, indent, MaxPromptChunkSize, MaxPromptChunks) - - delimiter := GenerateHeredocDelimiter("PROMPT") - // Write each chunk as a separate heredoc - // Use direct cat to file (append mode) - placeholders will be substituted with sed - for _, chunk := range chunks { - yaml.WriteString(indent + "cat << '" + delimiter + "' >> \"$GH_AW_PROMPT\"\n") - for _, line := range chunk { - fmt.Fprintf(yaml, "%s%s\n", indent, line) - } - yaml.WriteString(indent + delimiter + "\n") - } -} - -// chunkLines splits lines into chunks where each chunk's total size (including indent) is less than maxSize. -// Returns at most maxChunks chunks. If content exceeds the limit, it truncates at the last chunk. -func chunkLines(lines []string, indent string, maxSize int, maxChunks int) [][]string { - shLog.Printf("Chunking lines: total_lines=%d, max_size=%d, max_chunks=%d", len(lines), maxSize, maxChunks) - if len(lines) == 0 { - return [][]string{{}} - } - - var chunks [][]string - var currentChunk []string - currentSize := 0 - - for _, line := range lines { - // Calculate size including indent and newline - lineSize := len(indent) + len(line) + 1 - - // If adding this line would exceed the limit, start a new chunk - if currentSize+lineSize > maxSize && len(currentChunk) > 0 { - // Check if we've reached the maximum number of chunks - if len(chunks) >= maxChunks-1 { - // We're at the last allowed chunk, so add remaining lines to current chunk - currentChunk = append(currentChunk, line) - currentSize += lineSize - continue - } - - // Start a new chunk - shLog.Printf("Starting new chunk: previous_chunk_size=%d, chunks_so_far=%d", currentSize, len(chunks)) - chunks = append(chunks, currentChunk) - currentChunk = []string{line} - currentSize = lineSize - } else { - currentChunk = append(currentChunk, line) - currentSize += lineSize - } - } - - // Add the last chunk if there's content - if len(currentChunk) > 0 { - chunks = append(chunks, currentChunk) - } - - // If we still have no chunks, return an empty chunk - if len(chunks) == 0 { - return [][]string{{}} - } - - shLog.Printf("Chunking complete: created %d chunks", len(chunks)) - return chunks -} diff --git a/pkg/workflow/sh_integration_test.go b/pkg/workflow/sh_integration_test.go deleted file mode 100644 index b84bb42f11..0000000000 --- a/pkg/workflow/sh_integration_test.go +++ /dev/null @@ -1,371 +0,0 @@ -//go:build integration - -package workflow - -import ( - "strings" - "testing" -) - -// TestWritePromptTextToYAML_IntegrationWithCompiler verifies that WritePromptTextToYAML -// correctly handles large prompt text that would be used in actual workflow compilation. -// This test simulates what would happen if an embedded prompt file was very large. -func TestWritePromptTextToYAML_IntegrationWithCompiler(t *testing.T) { - // Create a realistic scenario: a very long help text or documentation - // that might be included as prompt instructions - section := strings.Repeat("This is an important instruction line that provides guidance to the AI agent on how to perform its task correctly. ", 10) - - // Create 200 lines to ensure we exceed 20KB - lines := make([]string, 200) - for i := range lines { - lines[i] = section - } - largePromptText := strings.Join(lines, "\n") - - // Calculate total size - totalSize := len(largePromptText) - if totalSize < 20000 { - t.Fatalf("Test setup error: prompt text should be at least 20000 bytes, got %d", totalSize) - } - - var yaml strings.Builder - indent := " " // Standard indent used in workflow generation - - // Call the function as it would be called in real compilation - WritePromptTextToYAML(&yaml, largePromptText, indent) - - result := yaml.String() - - // Verify multiple heredoc blocks were created - heredocCount := strings.Count(result, `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`) - if heredocCount < 2 { - t.Errorf("Expected multiple heredoc blocks for large text (%d bytes), got %d", totalSize, heredocCount) - } - - // Verify we didn't exceed 5 chunks - if heredocCount > 5 { - t.Errorf("Expected at most 5 heredoc blocks (max limit), got %d", heredocCount) - } - - // Verify each heredoc is closed - eofCount := strings.Count(result, indent+"GH_AW_PROMPT_EOF") - if eofCount != heredocCount { - t.Errorf("Expected %d EOF markers to match %d heredoc blocks, got %d", heredocCount, heredocCount, eofCount) - } - - // Verify the content is preserved (check first and last sections) - firstSection := section[:100] - lastSection := section[len(section)-100:] - if !strings.Contains(result, firstSection) { - t.Error("Expected to find beginning of original text in output") - } - if !strings.Contains(result, lastSection) { - t.Error("Expected to find end of original text in output") - } - - // Verify the YAML structure is valid (basic check) - if !strings.Contains(result, `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`) { - t.Error("Expected proper heredoc syntax in output") - } - - t.Logf("Successfully chunked %d bytes into %d heredoc blocks", totalSize, heredocCount) - - // Verify no lines are lost - extract content from heredoc blocks and compare - extractedLines := extractLinesFromYAML(result, indent) - originalLines := strings.Split(largePromptText, "\n") - - if len(extractedLines) != len(originalLines) { - t.Errorf("Line count mismatch: expected %d lines, got %d lines", len(originalLines), len(extractedLines)) - } - - // Verify content integrity by checking line-by-line - mismatchCount := 0 - for i := 0; i < len(originalLines) && i < len(extractedLines); i++ { - if originalLines[i] != extractedLines[i] { - mismatchCount++ - if mismatchCount <= 3 { // Only report first 3 mismatches - t.Errorf("Line %d mismatch:\nExpected: %q\nGot: %q", i+1, originalLines[i], extractedLines[i]) - } - } - } - - if mismatchCount > 0 { - t.Errorf("Total line mismatches: %d", mismatchCount) - } -} - -// TestWritePromptTextToYAML_RealWorldSizeSimulation simulates various real-world scenarios -// to ensure chunking works correctly across different text sizes. -func TestWritePromptTextToYAML_RealWorldSizeSimulation(t *testing.T) { - tests := []struct { - name string - textSize int // approximate size in bytes - linesCount int // number of lines - expectedChunks int // expected number of chunks - maxChunks int // should not exceed this - }{ - { - name: "small prompt (< 1KB)", - textSize: 500, - linesCount: 10, - expectedChunks: 1, - maxChunks: 1, - }, - { - name: "medium prompt (~10KB)", - textSize: 10000, - linesCount: 100, - expectedChunks: 1, - maxChunks: 1, - }, - { - name: "large prompt (~25KB)", - textSize: 25000, - linesCount: 250, - expectedChunks: 2, - maxChunks: 2, - }, - { - name: "very large prompt (~50KB)", - textSize: 50000, - linesCount: 500, - expectedChunks: 3, - maxChunks: 3, - }, - { - name: "extremely large prompt (~120KB)", - textSize: 120000, - linesCount: 1200, - expectedChunks: 5, - maxChunks: 5, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create text of approximately the desired size - // Account for newlines: total size = linesCount * (lineSize + 1) - 1 (no trailing newline) - lineSize := (tt.textSize + 1) / tt.linesCount // Adjust for newlines - if lineSize < 1 { - lineSize = 1 - } - line := strings.Repeat("x", lineSize) - lines := make([]string, tt.linesCount) - for i := range lines { - lines[i] = line - } - text := strings.Join(lines, "\n") - - var yaml strings.Builder - indent := " " - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - heredocCount := strings.Count(result, `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`) - - if heredocCount < tt.expectedChunks { - t.Errorf("Expected at least %d chunks for %s, got %d", tt.expectedChunks, tt.name, heredocCount) - } - - if heredocCount > tt.maxChunks { - t.Errorf("Expected at most %d chunks for %s, got %d", tt.maxChunks, tt.name, heredocCount) - } - - eofCount := strings.Count(result, indent+"GH_AW_PROMPT_EOF") - if eofCount != heredocCount { - t.Errorf("EOF count (%d) doesn't match heredoc count (%d) for %s", eofCount, heredocCount, tt.name) - } - - t.Logf("%s: %d bytes chunked into %d blocks", tt.name, len(text), heredocCount) - - // Verify no lines are lost - extractedLines := extractLinesFromYAML(result, indent) - originalLines := strings.Split(text, "\n") - - if len(extractedLines) != len(originalLines) { - t.Errorf("%s: Line count mismatch - expected %d lines, got %d lines", tt.name, len(originalLines), len(extractedLines)) - } - }) - } -} - -// extractLinesFromYAML extracts the actual content lines from a YAML heredoc output -// by parsing the heredoc blocks and removing the indent -func extractLinesFromYAML(yamlOutput string, indent string) []string { - var lines []string - inHeredoc := false - - for _, line := range strings.Split(yamlOutput, "\n") { - // Check if we're starting a heredoc block - if strings.Contains(line, `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`) { - inHeredoc = true - continue - } - - // Check if we're ending a heredoc block - if strings.TrimSpace(line) == "GH_AW_PROMPT_EOF" { - inHeredoc = false - continue - } - - // If we're in a heredoc block, extract the content line - if inHeredoc { - // Remove the indent from the line - if strings.HasPrefix(line, indent) { - contentLine := strings.TrimPrefix(line, indent) - lines = append(lines, contentLine) - } - } - } - - return lines -} - -// TestWritePromptTextToYAML_NoDataLoss verifies that no lines or chunks are lost -// during the chunking process, even with edge cases. -func TestWritePromptTextToYAML_NoDataLoss(t *testing.T) { - tests := []struct { - name string - lines []string - expectLoss bool - }{ - { - name: "single line", - lines: []string{"Single line of text"}, - expectLoss: false, - }, - { - name: "multiple short lines", - lines: []string{"Line 1", "Line 2", "Line 3", "Line 4", "Line 5"}, - expectLoss: false, - }, - { - name: "empty lines", - lines: []string{"Line 1", "", "Line 3", "", "Line 5"}, - expectLoss: false, - }, - { - name: "very long single line", - lines: []string{strings.Repeat("x", 25000)}, - expectLoss: false, - }, - { - name: "exactly at chunk boundary", - lines: func() []string { - // Create lines that total exactly 20000 bytes with indent - line := strings.Repeat("x", 100) - lines := make([]string, 180) - for i := range lines { - lines[i] = line - } - return lines - }(), - expectLoss: false, - }, - { - name: "large number of lines requiring max chunks", - lines: func() []string { - line := strings.Repeat("y", 1000) - lines := make([]string, 600) - for i := range lines { - lines[i] = line - } - return lines - }(), - expectLoss: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - text := strings.Join(tt.lines, "\n") - var yaml strings.Builder - indent := " " - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Extract lines from the YAML output - extractedLines := extractLinesFromYAML(result, indent) - - // Verify line count - if len(extractedLines) != len(tt.lines) { - t.Errorf("Line count mismatch: expected %d lines, got %d lines", len(tt.lines), len(extractedLines)) - t.Logf("Original lines: %d", len(tt.lines)) - t.Logf("Extracted lines: %d", len(extractedLines)) - } - - // Verify content integrity - mismatchCount := 0 - for i := 0; i < len(tt.lines) && i < len(extractedLines); i++ { - if tt.lines[i] != extractedLines[i] { - mismatchCount++ - if mismatchCount <= 3 { - t.Errorf("Line %d mismatch:\nExpected: %q\nGot: %q", i+1, tt.lines[i], extractedLines[i]) - } - } - } - - if mismatchCount > 0 { - t.Errorf("Total line mismatches: %d", mismatchCount) - } - }) - } -} - -// TestWritePromptTextToYAML_ChunkIntegrity verifies that chunks are properly formed -// and that the chunking process maintains data integrity. -func TestWritePromptTextToYAML_ChunkIntegrity(t *testing.T) { - // Create a large text that will require multiple chunks - line := strings.Repeat("Test line with some content. ", 50) - lines := make([]string, 300) - for i := range lines { - lines[i] = line - } - text := strings.Join(lines, "\n") - - var yaml strings.Builder - indent := " " - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Count heredoc blocks - heredocCount := strings.Count(result, `cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"`) - - t.Logf("Created %d heredoc blocks for %d lines (%d bytes)", heredocCount, len(lines), len(text)) - - // Verify we have multiple chunks but not exceeding max - if heredocCount < 2 { - t.Errorf("Expected multiple chunks for large text, got %d", heredocCount) - } - - if heredocCount > MaxPromptChunks { - t.Errorf("Expected at most %d chunks, got %d", MaxPromptChunks, heredocCount) - } - - // Verify all heredocs are properly closed - eofCount := strings.Count(result, indent+"GH_AW_PROMPT_EOF") - if eofCount != heredocCount { - t.Errorf("Heredoc closure mismatch: %d opens, %d closes", heredocCount, eofCount) - } - - // Verify no data loss - extractedLines := extractLinesFromYAML(result, indent) - if len(extractedLines) != len(lines) { - t.Errorf("Line count mismatch: expected %d, got %d", len(lines), len(extractedLines)) - } - - // Verify content integrity by checking a few random samples - sampleIndices := []int{0, len(lines) / 4, len(lines) / 2, len(lines) * 3 / 4, len(lines) - 1} - for _, idx := range sampleIndices { - if idx < len(lines) && idx < len(extractedLines) { - if lines[idx] != extractedLines[idx] { - t.Errorf("Content mismatch at line %d:\nExpected: %q\nGot: %q", idx+1, lines[idx], extractedLines[idx]) - } - } - } -} diff --git a/pkg/workflow/sh_test.go b/pkg/workflow/sh_test.go deleted file mode 100644 index f527d02916..0000000000 --- a/pkg/workflow/sh_test.go +++ /dev/null @@ -1,309 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestWritePromptTextToYAML_SmallText(t *testing.T) { - var yaml strings.Builder - text := "This is a small text\nWith a few lines\nThat doesn't need chunking" - indent := " " - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Get the expected delimiter - delimiter := GenerateHeredocDelimiter("PROMPT") - expectedHeredoc := `cat << '` + delimiter + `' >> "$GH_AW_PROMPT"` - - // Should have exactly one heredoc block - if strings.Count(result, expectedHeredoc) != 1 { - t.Errorf("Expected 1 heredoc block for small text, got %d", strings.Count(result, expectedHeredoc)) - } - - // Should contain all original lines - if !strings.Contains(result, "This is a small text") { - t.Error("Expected to find original text in output") - } - if !strings.Contains(result, "With a few lines") { - t.Error("Expected to find original text in output") - } - if !strings.Contains(result, "That doesn't need chunking") { - t.Error("Expected to find original text in output") - } - - // Should have proper EOF markers - if strings.Count(result, indent+delimiter) != 1 { - t.Errorf("Expected 1 EOF marker, got %d", strings.Count(result, indent+delimiter)) - } -} - -func TestWritePromptTextToYAML_LargeText(t *testing.T) { - var yaml strings.Builder - // Create text that exceeds 20000 characters - longLine := strings.Repeat("This is a very long line of content that will be repeated many times to exceed the character limit. ", 10) - lines := make([]string, 50) - for i := range lines { - lines[i] = longLine - } - text := strings.Join(lines, "\n") - indent := " " - - // Calculate expected size - totalSize := 0 - for _, line := range lines { - totalSize += len(indent) + len(line) + 1 - } - - // This should create multiple chunks since each line is ~1000 chars and we have 50 lines - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Get the expected delimiter - delimiter := GenerateHeredocDelimiter("PROMPT") - expectedHeredoc := `cat << '` + delimiter + `' >> "$GH_AW_PROMPT"` - - // Should have multiple heredoc blocks - heredocCount := strings.Count(result, expectedHeredoc) - if heredocCount < 2 { - t.Errorf("Expected at least 2 heredoc blocks for large text (total size ~%d bytes), got %d", totalSize, heredocCount) - } - - // Should not exceed 5 chunks (max limit) - if heredocCount > 5 { - t.Errorf("Expected at most 5 heredoc blocks, got %d", heredocCount) - } - - // Should have matching EOF markers - eofCount := strings.Count(result, indent+delimiter) - if eofCount != heredocCount { - t.Errorf("Expected %d EOF markers to match %d heredoc blocks, got %d", heredocCount, heredocCount, eofCount) - } - - // Should contain original content (or at least the beginning if truncated) - firstLine := strings.Split(text, "\n")[0] - if !strings.Contains(result, firstLine[:50]) { - t.Error("Expected to find beginning of original text in output") - } -} - -func TestWritePromptTextToYAML_ExactChunkBoundary(t *testing.T) { - var yaml strings.Builder - indent := " " - - // Create text that's exactly at the 20000 character boundary - // Each line: indent (10) + line (100) + newline (1) = 111 bytes - // 180 lines = 19,980 bytes (just under 20000) - line := strings.Repeat("x", 100) - lines := make([]string, 180) - for i := range lines { - lines[i] = line - } - text := strings.Join(lines, "\n") - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Get the expected delimiter - delimiter := GenerateHeredocDelimiter("PROMPT") - expectedHeredoc := `cat << '` + delimiter + `' >> "$GH_AW_PROMPT"` - - // Should have exactly 1 heredoc block since we're just under the limit - heredocCount := strings.Count(result, expectedHeredoc) - if heredocCount != 1 { - t.Errorf("Expected 1 heredoc block for text just under limit, got %d", heredocCount) - } -} - -func TestWritePromptTextToYAML_MaxChunksLimit(t *testing.T) { - var yaml strings.Builder - indent := " " - - // Create text that would need more than 5 chunks (if we allowed it) - // Each line: indent (10) + line (1000) + newline (1) = 1011 bytes - // 600 lines = ~606,600 bytes - // At 20000 bytes per chunk, this would need ~31 chunks, but we limit to 5 - line := strings.Repeat("y", 1000) - lines := make([]string, 600) - for i := range lines { - lines[i] = line - } - text := strings.Join(lines, "\n") - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Get the expected delimiter - delimiter := GenerateHeredocDelimiter("PROMPT") - expectedHeredoc := `cat << '` + delimiter + `' >> "$GH_AW_PROMPT"` - - // Should have exactly 5 heredoc blocks (the maximum) - heredocCount := strings.Count(result, expectedHeredoc) - if heredocCount != 5 { - t.Errorf("Expected exactly 5 heredoc blocks (max limit), got %d", heredocCount) - } - - // Should have matching EOF markers - eofCount := strings.Count(result, indent+delimiter) - if eofCount != 5 { - t.Errorf("Expected 5 EOF markers, got %d", eofCount) - } -} - -func TestWritePromptTextToYAML_EmptyText(t *testing.T) { - var yaml strings.Builder - text := "" - indent := " " - - WritePromptTextToYAML(&yaml, text, indent) - - result := yaml.String() - - // Get the expected delimiter - delimiter := GenerateHeredocDelimiter("PROMPT") - expectedHeredoc := `cat << '` + delimiter + `' >> "$GH_AW_PROMPT"` - - // Should have at least one heredoc block (even for empty text) - if strings.Count(result, expectedHeredoc) < 1 { - t.Error("Expected at least 1 heredoc block even for empty text") - } - - // Should have matching EOF markers - if strings.Count(result, indent+delimiter) < 1 { - t.Error("Expected at least 1 EOF marker") - } -} - -func TestChunkLines_SmallInput(t *testing.T) { - lines := []string{"line1", "line2", "line3"} - indent := " " - maxSize := 20000 - maxChunks := 5 - - chunks := chunkLines(lines, indent, maxSize, maxChunks) - - if len(chunks) != 1 { - t.Errorf("Expected 1 chunk for small input, got %d", len(chunks)) - } - - if len(chunks[0]) != 3 { - t.Errorf("Expected chunk to contain 3 lines, got %d", len(chunks[0])) - } -} - -func TestChunkLines_ExceedsSize(t *testing.T) { - // Create lines that will exceed maxSize - line := strings.Repeat("x", 1000) - lines := make([]string, 50) - for i := range lines { - lines[i] = line - } - - indent := " " - maxSize := 20000 - maxChunks := 5 - - chunks := chunkLines(lines, indent, maxSize, maxChunks) - - // Should have multiple chunks - if len(chunks) < 2 { - t.Errorf("Expected at least 2 chunks, got %d", len(chunks)) - } - - // Verify each chunk (except possibly the last) stays within size limit - for i, chunk := range chunks { - size := 0 - for _, line := range chunk { - size += len(indent) + len(line) + 1 - } - - // Last chunk might exceed if we hit maxChunks limit - if i < len(chunks)-1 && size > maxSize { - t.Errorf("Chunk %d exceeds size limit: %d > %d", i, size, maxSize) - } - } - - // Verify total lines are preserved - totalLines := 0 - for _, chunk := range chunks { - totalLines += len(chunk) - } - if totalLines != len(lines) { - t.Errorf("Expected %d total lines, got %d", len(lines), totalLines) - } -} - -func TestChunkLines_MaxChunksEnforced(t *testing.T) { - // Create many lines that would need more than maxChunks - line := strings.Repeat("x", 1000) - lines := make([]string, 600) - for i := range lines { - lines[i] = line - } - - indent := " " - maxSize := 20000 - maxChunks := 5 - - chunks := chunkLines(lines, indent, maxSize, maxChunks) - - // Should have exactly maxChunks - if len(chunks) != maxChunks { - t.Errorf("Expected exactly %d chunks (max limit), got %d", maxChunks, len(chunks)) - } - - // Verify all lines are included (even if last chunk is large) - totalLines := 0 - for _, chunk := range chunks { - totalLines += len(chunk) - } - if totalLines != len(lines) { - t.Errorf("Expected %d total lines, got %d", len(lines), totalLines) - } -} - -func TestChunkLines_EmptyInput(t *testing.T) { - lines := []string{} - indent := " " - maxSize := 20000 - maxChunks := 5 - - chunks := chunkLines(lines, indent, maxSize, maxChunks) - - // Should return at least one empty chunk - if len(chunks) != 1 { - t.Errorf("Expected 1 chunk for empty input, got %d", len(chunks)) - } - - if len(chunks[0]) != 0 { - t.Errorf("Expected empty chunk, got %d lines", len(chunks[0])) - } -} - -func TestChunkLines_SingleLineExceedsLimit(t *testing.T) { - // Single line that exceeds maxSize - line := strings.Repeat("x", 25000) - lines := []string{line} - - indent := " " - maxSize := 20000 - maxChunks := 5 - - chunks := chunkLines(lines, indent, maxSize, maxChunks) - - // Should still have one chunk with that single line - if len(chunks) != 1 { - t.Errorf("Expected 1 chunk, got %d", len(chunks)) - } - - if len(chunks[0]) != 1 { - t.Errorf("Expected 1 line in chunk, got %d", len(chunks[0])) - } -} diff --git a/pkg/workflow/staged_add_issue_labels_test.go b/pkg/workflow/staged_add_issue_labels_test.go deleted file mode 100644 index c20aa5bd76..0000000000 --- a/pkg/workflow/staged_add_issue_labels_test.go +++ /dev/null @@ -1,73 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestAddLabelsJobWithStagedFlag(t *testing.T) { - // Create a compiler instance - c := NewCompiler() - - // Test with staged: true - workflowData := &WorkflowData{ - Name: "test-workflow", - SafeOutputs: &SafeOutputsConfig{ - AddLabels: &AddLabelsConfig{}, - Staged: true, - }, - } - - job, err := c.buildAddLabelsJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Unexpected error building add labels job: %v", err) - } - - // Convert steps to a single string for testing - stepsContent := strings.Join(job.Steps, "") - - // Check that GH_AW_SAFE_OUTPUTS_STAGED is included in the env section - if !strings.Contains(stepsContent, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") { - t.Error("Expected GH_AW_SAFE_OUTPUTS_STAGED environment variable to be set to true in add-labels job") - } - - // Test with staged: false - workflowData.SafeOutputs.Staged = false - - job, err = c.buildAddLabelsJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Unexpected error building add labels job: %v", err) - } - - stepsContent = strings.Join(job.Steps, "") - - // Check that GH_AW_SAFE_OUTPUTS_STAGED is not included in the env section when false - // We need to be specific to avoid matching the JavaScript code that references the variable - if strings.Contains(stepsContent, " GH_AW_SAFE_OUTPUTS_STAGED:") { - t.Error("Expected GH_AW_SAFE_OUTPUTS_STAGED environment variable not to be set when staged is false") - } - -} - -func TestAddLabelsJobWithNilSafeOutputs(t *testing.T) { - // Create a compiler instance - c := NewCompiler() - - // Test with no SafeOutputs config - this should fail - workflowData := &WorkflowData{ - Name: "test-workflow", - SafeOutputs: nil, - } - - _, err := c.buildAddLabelsJob(workflowData, "main_job") - if err == nil { - t.Error("Expected error when SafeOutputs is nil") - } - - expectedError := "safe-outputs configuration is required" - if !strings.Contains(err.Error(), expectedError) { - t.Errorf("Expected error message to contain '%s', got: %v", expectedError, err) - } -} diff --git a/pkg/workflow/staged_create_issue_test.go b/pkg/workflow/staged_create_issue_test.go deleted file mode 100644 index a332d87785..0000000000 --- a/pkg/workflow/staged_create_issue_test.go +++ /dev/null @@ -1,88 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestCreateIssueJobWithStagedFlag(t *testing.T) { - // Create a compiler instance - c := NewCompiler() - - // Test with staged: true - workflowData := &WorkflowData{ - Name: "test-workflow", - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{}, - Staged: true, // pointer to true - }, - } - - job, err := c.buildCreateOutputIssueJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Unexpected error building create issue job: %v", err) - } - - // Convert steps to a single string for testing - stepsContent := strings.Join(job.Steps, "") - - // Check that GH_AW_SAFE_OUTPUTS_STAGED is included in the env section - if !strings.Contains(stepsContent, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") { - t.Error("Expected GH_AW_SAFE_OUTPUTS_STAGED environment variable to be set to true in create-issue job") - } - - // Test with staged: false - workflowData.SafeOutputs.Staged = false // pointer to false - - job, err = c.buildCreateOutputIssueJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Unexpected error building create issue job: %v", err) - } - - stepsContent = strings.Join(job.Steps, "") - - // Check that GH_AW_SAFE_OUTPUTS_STAGED is not included in the env section when false - // We need to be specific to avoid matching the JavaScript code that references the variable - if strings.Contains(stepsContent, " GH_AW_SAFE_OUTPUTS_STAGED:") { - t.Error("Expected GH_AW_SAFE_OUTPUTS_STAGED environment variable not to be set when staged is false") - } - -} - -func TestCreateIssueJobWithoutSafeOutputs(t *testing.T) { - // Create a compiler instance - c := NewCompiler() - - // Test with no SafeOutputs config - this should fail - workflowData := &WorkflowData{ - Name: "test-workflow", - SafeOutputs: nil, - } - - _, err := c.buildCreateOutputIssueJob(workflowData, "main_job") - if err == nil { - t.Error("Expected error when SafeOutputs is nil") - } - - expectedError := "safe-outputs.create-issue configuration is required" - if !strings.Contains(err.Error(), expectedError) { - t.Errorf("Expected error message to contain '%s', got: %v", expectedError, err) - } - - // Test with SafeOutputs but no CreateIssues config - this should also fail - workflowData.SafeOutputs = &SafeOutputsConfig{ - CreatePullRequests: &CreatePullRequestsConfig{}, - Staged: true, - } - - _, err = c.buildCreateOutputIssueJob(workflowData, "main_job") - if err == nil { - t.Error("Expected error when CreateIssues is nil") - } - - if !strings.Contains(err.Error(), expectedError) { - t.Errorf("Expected error message to contain '%s', got: %v", expectedError, err) - } -} diff --git a/pkg/workflow/staged_pull_request_test.go b/pkg/workflow/staged_pull_request_test.go deleted file mode 100644 index 6ca6880cd3..0000000000 --- a/pkg/workflow/staged_pull_request_test.go +++ /dev/null @@ -1,88 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "strings" - "testing" -) - -func TestCreatePullRequestJobWithStagedFlag(t *testing.T) { - // Create a compiler instance - c := NewCompiler() - - // Test with staged: true - workflowData := &WorkflowData{ - Name: "test-workflow", - SafeOutputs: &SafeOutputsConfig{ - CreatePullRequests: &CreatePullRequestsConfig{}, - Staged: true, - }, - } - - job, err := c.buildCreateOutputPullRequestJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Unexpected error building create pull request job: %v", err) - } - - // Convert steps to a single string for testing - stepsContent := strings.Join(job.Steps, "") - - // Check that GH_AW_SAFE_OUTPUTS_STAGED is included in the env section - if !strings.Contains(stepsContent, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") { - t.Error("Expected GH_AW_SAFE_OUTPUTS_STAGED environment variable to be set to true in create-pull-request job") - } - - // Test with staged: false - workflowData.SafeOutputs.Staged = false // pointer to false - - job, err = c.buildCreateOutputPullRequestJob(workflowData, "main_job") - if err != nil { - t.Fatalf("Unexpected error building create pull request job: %v", err) - } - - stepsContent = strings.Join(job.Steps, "") - - // Check that GH_AW_SAFE_OUTPUTS_STAGED is not included in the env section when false - // We need to be specific to avoid matching the JavaScript code that references the variable - if strings.Contains(stepsContent, " GH_AW_SAFE_OUTPUTS_STAGED:") { - t.Error("Expected GH_AW_SAFE_OUTPUTS_STAGED environment variable not to be set when staged is false") - } - -} - -func TestCreatePullRequestJobWithoutSafeOutputs(t *testing.T) { - // Create a compiler instance - c := NewCompiler() - - // Test with no SafeOutputs config - this should fail - workflowData := &WorkflowData{ - Name: "test-workflow", - SafeOutputs: nil, - } - - _, err := c.buildCreateOutputPullRequestJob(workflowData, "main_job") - if err == nil { - t.Error("Expected error when SafeOutputs is nil") - } - - expectedError := "safe-outputs.create-pull-request configuration is required" - if !strings.Contains(err.Error(), expectedError) { - t.Errorf("Expected error message to contain '%s', got: %v", expectedError, err) - } - - // Test with SafeOutputs but no CreatePullRequests config - this should also fail - workflowData.SafeOutputs = &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{}, - Staged: true, - } - - _, err = c.buildCreateOutputPullRequestJob(workflowData, "main_job") - if err == nil { - t.Error("Expected error when CreatePullRequests is nil") - } - - if !strings.Contains(err.Error(), expectedError) { - t.Errorf("Expected error message to contain '%s', got: %v", expectedError, err) - } -} diff --git a/pkg/workflow/template_rendering_test.go b/pkg/workflow/template_rendering_test.go index 35595ecf4e..d3d003a6d6 100644 --- a/pkg/workflow/template_rendering_test.go +++ b/pkg/workflow/template_rendering_test.go @@ -77,7 +77,7 @@ Normal content here. t.Error("Compiled workflow should contain interpolation and template rendering step") } - if !strings.Contains(compiledStr, "uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd") { + if !strings.Contains(compiledStr, "uses: actions/github-script@") { // SHA varies t.Error("Interpolation and template rendering step should use github-script action") } diff --git a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden index 10a8fa4caf..fe734c3d3f 100644 --- a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden +++ b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden @@ -177,7 +177,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -312,10 +312,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -336,7 +337,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -347,7 +349,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -430,7 +432,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -475,7 +477,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden index 1ab0a9b48e..24b1b412ca 100644 --- a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden +++ b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden @@ -261,7 +261,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -297,7 +297,7 @@ jobs: with: persist-credentials: false - name: Setup Go for CLI build - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version-file: go.mod cache: true @@ -327,7 +327,7 @@ jobs: build-args: | BINARY=dist/gh-aw-linux-amd64 - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 with: go-version: '1.25' - name: Capture GOROOT for AWF chroot mode @@ -461,10 +461,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -512,7 +513,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -523,7 +525,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -606,7 +608,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -651,7 +653,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden index 72c4ef3cde..dd3bf5e9e6 100644 --- a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden +++ b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden @@ -180,7 +180,7 @@ jobs: run: bash /opt/gh-aw/actions/print_prompt_summary.sh - name: Upload prompt artifact if: success() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts/prompt.txt @@ -315,10 +315,11 @@ jobs: export MCP_GATEWAY_API_KEY export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" export DEBUG="*" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.6' mkdir -p /home/runner/.copilot cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -339,7 +340,8 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", + "payloadSizeThreshold": 524288 } } GH_AW_MCP_CONFIG_EOF @@ -350,7 +352,7 @@ jobs: const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); await generateWorkflowOverview(core); - name: Download prompt artifact - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0 with: name: prompt path: /tmp/gh-aw/aw-prompts @@ -433,7 +435,7 @@ jobs: SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload engine output files - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent_outputs path: | @@ -478,7 +480,7 @@ jobs: - name: Upload agent artifacts if: always() continue-on-error: true - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: agent-artifacts path: | diff --git a/pkg/workflow/threat_detection_test.go b/pkg/workflow/threat_detection_test.go index 6145ddc39f..6b8ca75dbc 100644 --- a/pkg/workflow/threat_detection_test.go +++ b/pkg/workflow/threat_detection_test.go @@ -632,7 +632,7 @@ func TestBuildUploadDetectionLogStep(t *testing.T) { expectedComponents := []string{ "name: Upload threat detection log", "if: always()", - "uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f", + "uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f", "name: threat-detection.log", "path: /tmp/gh-aw/threat-detection/detection.log", "if-no-files-found: ignore", diff --git a/pkg/workflow/tools_parser.go b/pkg/workflow/tools_parser.go index 8998a39044..810b458e96 100644 --- a/pkg/workflow/tools_parser.go +++ b/pkg/workflow/tools_parser.go @@ -234,6 +234,14 @@ func parseGitHubTool(val any) *GitHubToolConfig { config.App = parseAppConfig(app) } + // Parse guard policy fields (flat syntax: repos and min-integrity directly under github:) + if repos, ok := configMap["repos"]; ok { + config.Repos = repos // Store as-is, validation will happen later + } + if integrity, ok := configMap["min-integrity"].(string); ok { + config.MinIntegrity = GitHubIntegrityLevel(integrity) + } + return config } diff --git a/pkg/workflow/tools_types.go b/pkg/workflow/tools_types.go index 12c30a6bfd..32df7e391f 100644 --- a/pkg/workflow/tools_types.go +++ b/pkg/workflow/tools_types.go @@ -152,6 +152,11 @@ func mcpServerConfigToMap(config MCPServerConfig) map[string]any { result["mounts"] = config.Mounts } + // Add guard policies if set + if len(config.GuardPolicies) > 0 { + result["guard-policies"] = config.GuardPolicies + } + // Add custom fields (these override standard fields if there are conflicts) maps.Copy(result, config.CustomFields) @@ -257,6 +262,24 @@ func (g GitHubToolsets) ToStringSlice() []string { return result } +// GitHubIntegrityLevel represents the minimum integrity level required for repository access +type GitHubIntegrityLevel string + +const ( + // GitHubIntegrityNone allows access with no integrity requirements + GitHubIntegrityNone GitHubIntegrityLevel = "none" + // GitHubIntegrityReader requires read-level integrity + GitHubIntegrityReader GitHubIntegrityLevel = "reader" + // GitHubIntegrityWriter requires write-level integrity + GitHubIntegrityWriter GitHubIntegrityLevel = "writer" + // GitHubIntegrityMerged requires merged-level integrity + GitHubIntegrityMerged GitHubIntegrityLevel = "merged" +) + +// GitHubReposScope represents the repository scope for guard policy enforcement +// Can be one of: "all", "public", or an array of repository patterns +type GitHubReposScope any // string or []any (YAML-parsed arrays are []any) + // GitHubToolConfig represents the configuration for the GitHub tool // Can be nil (enabled with defaults), string, or an object with specific settings type GitHubToolConfig struct { @@ -269,6 +292,13 @@ type GitHubToolConfig struct { Toolset GitHubToolsets `yaml:"toolsets,omitempty"` Lockdown bool `yaml:"lockdown,omitempty"` App *GitHubAppConfig `yaml:"app,omitempty"` // GitHub App configuration for token minting + + // Guard policy fields (flat syntax under github:) + // Repos defines the access scope for policy enforcement. + // Supports: "all", "public", or an array of patterns ["owner/repo", "owner/*"] (lowercase) + Repos GitHubReposScope `yaml:"repos,omitempty"` + // MinIntegrity defines the minimum integrity level required: "none", "reader", "writer", "merged" + MinIntegrity GitHubIntegrityLevel `yaml:"min-integrity,omitempty"` } // PlaywrightToolConfig represents the configuration for the Playwright tool @@ -339,6 +369,12 @@ type MCPServerConfig struct { Mode string `yaml:"mode,omitempty"` // MCP server mode (stdio, http, remote, local) Toolsets []string `yaml:"toolsets,omitempty"` // Toolsets to enable + // Guard policies for access control at the MCP gateway level + // This is a general field that can hold server-specific policy configurations + // For GitHub: policies are represented via GitHubAllowOnlyPolicy on GitHubToolConfig + // For Jira/WorkIQ: define similar server-specific policy types + GuardPolicies map[string]any `yaml:"guard-policies,omitempty"` + // For truly dynamic configuration (server-specific fields not covered above) CustomFields map[string]any `yaml:",inline"` } @@ -348,17 +384,19 @@ type MCPServerConfig struct { // Per MCP Gateway Specification v1.0.0: All stdio-based MCP servers MUST be containerized. // Direct command execution is not supported. type MCPGatewayRuntimeConfig struct { - Container string `yaml:"container,omitempty"` // Container image for the gateway (required) - Version string `yaml:"version,omitempty"` // Optional version/tag for the container - Entrypoint string `yaml:"entrypoint,omitempty"` // Optional entrypoint override for the container - Args []string `yaml:"args,omitempty"` // Arguments for docker run - EntrypointArgs []string `yaml:"entrypointArgs,omitempty"` // Arguments passed to container entrypoint - Env map[string]string `yaml:"env,omitempty"` // Environment variables for the gateway - Port int `yaml:"port,omitempty"` // Port for the gateway HTTP server (default: 8080) - APIKey string `yaml:"api-key,omitempty"` // API key for gateway authentication - Domain string `yaml:"domain,omitempty"` // Domain for gateway URL (localhost or host.docker.internal) - Mounts []string `yaml:"mounts,omitempty"` // Volume mounts for the gateway container (format: "source:dest:mode") - PayloadDir string `yaml:"payload-dir,omitempty"` // Directory path for storing large payload JSON files (must be absolute path) + Container string `yaml:"container,omitempty"` // Container image for the gateway (required) + Version string `yaml:"version,omitempty"` // Optional version/tag for the container + Entrypoint string `yaml:"entrypoint,omitempty"` // Optional entrypoint override for the container + Args []string `yaml:"args,omitempty"` // Arguments for docker run + EntrypointArgs []string `yaml:"entrypointArgs,omitempty"` // Arguments passed to container entrypoint + Env map[string]string `yaml:"env,omitempty"` // Environment variables for the gateway + Port int `yaml:"port,omitempty"` // Port for the gateway HTTP server (default: 8080) + APIKey string `yaml:"api-key,omitempty"` // API key for gateway authentication + Domain string `yaml:"domain,omitempty"` // Domain for gateway URL (localhost or host.docker.internal) + Mounts []string `yaml:"mounts,omitempty"` // Volume mounts for the gateway container (format: "source:dest:mode") + PayloadDir string `yaml:"payload-dir,omitempty"` // Directory path for storing large payload JSON files (must be absolute path) + PayloadPathPrefix string `yaml:"payload-path-prefix,omitempty"` // Path prefix to remap payload paths for agent containers (e.g., /workspace/payloads) + PayloadSizeThreshold int `yaml:"payload-size-threshold,omitempty"` // Size threshold in bytes for storing payloads to disk (default: 524288 = 512KB) } // HasTool checks if a tool is present in the configuration diff --git a/pkg/workflow/tools_validation.go b/pkg/workflow/tools_validation.go index 17a06f4efe..49ea7ad3d2 100644 --- a/pkg/workflow/tools_validation.go +++ b/pkg/workflow/tools_validation.go @@ -87,6 +87,171 @@ func validateGitHubToolConfig(tools *Tools, workflowName string) error { return nil } +// validateGitHubGuardPolicy validates the GitHub guard policy configuration. +// Guard policy fields (repos, min-integrity) are specified flat under github:. +// Both fields must be present if either is specified. +func validateGitHubGuardPolicy(tools *Tools, workflowName string) error { + if tools == nil || tools.GitHub == nil { + return nil + } + + github := tools.GitHub + hasRepos := github.Repos != nil + hasMinIntegrity := github.MinIntegrity != "" + + // No guard policy fields present - nothing to validate + if !hasRepos && !hasMinIntegrity { + return nil + } + + // Validate repos field (required when min-integrity is set) + if !hasRepos { + toolsValidationLog.Printf("Missing repos in guard policy for workflow: %s", workflowName) + return errors.New("invalid guard policy: 'github.repos' is required. Use 'all', 'public', or an array of repository patterns (e.g., ['owner/repo', 'owner/*'])") + } + + // Validate repos format + if err := validateReposScope(github.Repos, workflowName); err != nil { + return err + } + + // Validate min-integrity field (required when repos is set) + if !hasMinIntegrity { + toolsValidationLog.Printf("Missing min-integrity in guard policy for workflow: %s", workflowName) + return errors.New("invalid guard policy: 'github.min-integrity' is required. Valid values: 'none', 'reader', 'writer', 'merged'") + } + + // Validate min-integrity value + validIntegrityLevels := map[GitHubIntegrityLevel]bool{ + GitHubIntegrityNone: true, + GitHubIntegrityReader: true, + GitHubIntegrityWriter: true, + GitHubIntegrityMerged: true, + } + + if !validIntegrityLevels[github.MinIntegrity] { + toolsValidationLog.Printf("Invalid min-integrity level '%s' in workflow: %s", github.MinIntegrity, workflowName) + return errors.New("invalid guard policy: 'github.min-integrity' must be one of: 'none', 'reader', 'writer', 'merged'. Got: '" + string(github.MinIntegrity) + "'") + } + + return nil +} + +// validateReposScope validates the repos field in the guard policy +func validateReposScope(repos any, workflowName string) error { + // Case 1: String value ("all" or "public") + if reposStr, ok := repos.(string); ok { + if reposStr != "all" && reposStr != "public" { + toolsValidationLog.Printf("Invalid repos string '%s' in workflow: %s", reposStr, workflowName) + return errors.New("invalid guard policy: 'github.repos' string must be 'all' or 'public'. Got: '" + reposStr + "'") + } + return nil + } + + // Case 2a: Array of patterns from YAML parsing ([]any) + if reposArray, ok := repos.([]any); ok { + if len(reposArray) == 0 { + toolsValidationLog.Printf("Empty repos array in workflow: %s", workflowName) + return errors.New("invalid guard policy: 'github.repos' array cannot be empty. Provide at least one repository pattern") + } + + for i, item := range reposArray { + pattern, ok := item.(string) + if !ok { + toolsValidationLog.Printf("Non-string item in repos array at index %d in workflow: %s", i, workflowName) + return errors.New("invalid guard policy: 'github.repos' array must contain only strings") + } + + if err := validateRepoPattern(pattern, workflowName); err != nil { + return err + } + } + + return nil + } + + // Case 2b: Array of patterns from programmatic construction ([]string) + if reposArray, ok := repos.([]string); ok { + if len(reposArray) == 0 { + toolsValidationLog.Printf("Empty repos array in workflow: %s", workflowName) + return errors.New("invalid guard policy: 'github.repos' array cannot be empty. Provide at least one repository pattern") + } + + for _, pattern := range reposArray { + if err := validateRepoPattern(pattern, workflowName); err != nil { + return err + } + } + + return nil + } + + // Invalid type + toolsValidationLog.Printf("Invalid repos type in workflow: %s", workflowName) + return errors.New("invalid guard policy: 'github.repos' must be 'all', 'public', or an array of repository patterns") +} + +// validateRepoPattern validates a single repository pattern +func validateRepoPattern(pattern string, workflowName string) error { + // Pattern must be lowercase + if strings.ToLower(pattern) != pattern { + toolsValidationLog.Printf("Repository pattern '%s' is not lowercase in workflow: %s", pattern, workflowName) + return errors.New("invalid guard policy: repository pattern '" + pattern + "' must be lowercase") + } + + // Check for valid pattern formats: + // 1. owner/repo (exact match) + // 2. owner/* (owner wildcard) + // 3. owner/re* (repository prefix wildcard) + parts := strings.Split(pattern, "/") + if len(parts) != 2 { + toolsValidationLog.Printf("Invalid repository pattern '%s' in workflow: %s", pattern, workflowName) + return errors.New("invalid guard policy: repository pattern '" + pattern + "' must be in format 'owner/repo', 'owner/*', or 'owner/prefix*'") + } + + owner := parts[0] + repo := parts[1] + + // Validate owner part (must be non-empty and contain only valid characters) + if owner == "" { + return errors.New("invalid guard policy: repository pattern '" + pattern + "' has empty owner") + } + + if !isValidOwnerOrRepo(owner) { + return errors.New("invalid guard policy: repository pattern '" + pattern + "' has invalid owner. Must contain only lowercase letters, numbers, hyphens, and underscores") + } + + // Validate repo part + if repo == "" { + return errors.New("invalid guard policy: repository pattern '" + pattern + "' has empty repository name") + } + + // Allow wildcard '*' or prefix with trailing '*' + if repo != "*" && !isValidOwnerOrRepo(strings.TrimSuffix(repo, "*")) { + return errors.New("invalid guard policy: repository pattern '" + pattern + "' has invalid repository name. Must contain only lowercase letters, numbers, hyphens, underscores, or be '*' or 'prefix*'") + } + + // Validate that wildcard is only at the end (not in the middle) + if strings.Contains(strings.TrimSuffix(repo, "*"), "*") { + return errors.New("invalid guard policy: repository pattern '" + pattern + "' has wildcard in the middle. Wildcards only allowed at the end (e.g., 'prefix*')") + } + + return nil +} + +// isValidOwnerOrRepo checks if a string contains only valid GitHub owner/repo characters +func isValidOwnerOrRepo(s string) bool { + if s == "" { + return false + } + for _, ch := range s { + if (ch < 'a' || ch > 'z') && (ch < '0' || ch > '9') && ch != '-' && ch != '_' { + return false + } + } + return true +} + // Note: validateGitToolForSafeOutputs was removed because git commands are automatically // injected by the compiler when safe-outputs needs them (see compiler_safe_outputs.go). // The validation was misleading - it would fail even though the compiler would add the diff --git a/pkg/workflow/tools_validation_test.go b/pkg/workflow/tools_validation_test.go index 14097da97d..ee34344ed1 100644 --- a/pkg/workflow/tools_validation_test.go +++ b/pkg/workflow/tools_validation_test.go @@ -355,3 +355,206 @@ func TestValidateGitHubToolConfig(t *testing.T) { }) } } + +func TestValidateGitHubGuardPolicy(t *testing.T) { + tests := []struct { + name string + toolsMap map[string]any + shouldError bool + errorMsg string + }{ + { + name: "nil tools is valid", + toolsMap: nil, + shouldError: false, + }, + { + name: "no github tool is valid", + toolsMap: map[string]any{"bash": true}, + shouldError: false, + }, + { + name: "github tool without guard policy fields is valid", + toolsMap: map[string]any{"github": map[string]any{"mode": "remote"}}, + shouldError: false, + }, + { + name: "valid guard policy with repos=all", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": "all", + "min-integrity": "reader", + }, + }, + shouldError: false, + }, + { + name: "valid guard policy with repos=public", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": "public", + "min-integrity": "writer", + }, + }, + shouldError: false, + }, + { + name: "valid guard policy with repos array ([]any)", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": []any{"owner/repo", "owner/*"}, + "min-integrity": "merged", + }, + }, + shouldError: false, + }, + { + name: "valid guard policy with min-integrity=none", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": "all", + "min-integrity": "none", + }, + }, + shouldError: false, + }, + { + name: "missing repos field", + toolsMap: map[string]any{ + "github": map[string]any{ + "min-integrity": "reader", + }, + }, + shouldError: true, + errorMsg: "'github.repos' is required", + }, + { + name: "missing min-integrity field", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": "all", + }, + }, + shouldError: true, + errorMsg: "'github.min-integrity' is required", + }, + { + name: "invalid min-integrity value", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": "all", + "min-integrity": "superuser", + }, + }, + shouldError: true, + errorMsg: "'github.min-integrity' must be one of", + }, + { + name: "invalid repos string value", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": "private", + "min-integrity": "reader", + }, + }, + shouldError: true, + errorMsg: "'github.repos' string must be 'all' or 'public'", + }, + { + name: "empty repos array", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": []any{}, + "min-integrity": "reader", + }, + }, + shouldError: true, + errorMsg: "'github.repos' array cannot be empty", + }, + { + name: "repos array with uppercase pattern", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": []any{"Owner/repo"}, + "min-integrity": "reader", + }, + }, + shouldError: true, + errorMsg: "must be lowercase", + }, + { + name: "repos array with invalid pattern format", + toolsMap: map[string]any{ + "github": map[string]any{ + "repos": []any{"just-a-name"}, + "min-integrity": "reader", + }, + }, + shouldError: true, + errorMsg: "must be in format", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tools := NewTools(tt.toolsMap) + err := validateGitHubGuardPolicy(tools, "test-workflow") + + if tt.shouldError { + require.Error(t, err, "Expected error for %s", tt.name) + if tt.errorMsg != "" { + assert.Contains(t, err.Error(), tt.errorMsg, "Error message should contain expected text") + } + } else { + assert.NoError(t, err, "Expected no error for %s", tt.name) + } + }) + } +} + +func TestValidateReposScopeWithStringSlice(t *testing.T) { + tests := []struct { + name string + repos any + shouldError bool + errorMsg string + }{ + { + name: "valid []string repos array", + repos: []string{"owner/repo", "owner/*"}, + shouldError: false, + }, + { + name: "valid []any repos array", + repos: []any{"owner/repo", "owner/*"}, + shouldError: false, + }, + { + name: "empty []string repos array", + repos: []string{}, + shouldError: true, + errorMsg: "array cannot be empty", + }, + { + name: "[]string with invalid pattern", + repos: []string{"Owner/Repo"}, + shouldError: true, + errorMsg: "must be lowercase", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateReposScope(tt.repos, "test-workflow") + + if tt.shouldError { + require.Error(t, err, "Expected error for %s", tt.name) + if tt.errorMsg != "" { + assert.Contains(t, err.Error(), tt.errorMsg, "Error message should contain expected text") + } + } else { + assert.NoError(t, err, "Expected no error for %s", tt.name) + } + }) + } +} diff --git a/pkg/workflow/unified_prompt_step.go b/pkg/workflow/unified_prompt_step.go index 69b5fa275c..3a2b3ae816 100644 --- a/pkg/workflow/unified_prompt_step.go +++ b/pkg/workflow/unified_prompt_step.go @@ -764,42 +764,3 @@ func buildSafeOutputsSections(safeOutputs *SafeOutputsConfig) []PromptSection { return sections } - -var promptStepHelperLog = logger.New("workflow:prompt_step_helper") - -// generateStaticPromptStep is a helper function that generates a workflow step -// for appending static prompt text to the prompt file. It encapsulates the common -// pattern used across multiple prompt generators (XPIA, temp folder, playwright, edit tool, etc.) -// to reduce code duplication and ensure consistency. -// -// Parameters: -// - yaml: The string builder to write the YAML to -// - description: The name of the workflow step (e.g., "Append XPIA security instructions to prompt") -// - promptText: The static text content to append to the prompt (used for backward compatibility) -// - shouldInclude: Whether to generate the step (false means skip generation entirely) -// -// Example usage: -// -// generateStaticPromptStep(yaml, -// "Append XPIA security instructions to prompt", -// xpiaPromptText, -// data.SafetyPrompt) -// -// Deprecated: This function is kept for backward compatibility with inline prompts. -// Use generateStaticPromptStepFromFile for new code. -func generateStaticPromptStep(yaml *strings.Builder, description string, promptText string, shouldInclude bool) { - promptStepHelperLog.Printf("Generating static prompt step: description=%s, shouldInclude=%t", description, shouldInclude) - // Skip generation if guard condition is false - if !shouldInclude { - return - } - - // Use the existing appendPromptStep helper with a renderer that writes the prompt text - appendPromptStep(yaml, - description, - func(y *strings.Builder, indent string) { - WritePromptTextToYAML(y, promptText, indent) - }, - "", // no condition - " ") -} diff --git a/scratchpad/guard-policies-specification.md b/scratchpad/guard-policies-specification.md new file mode 100644 index 0000000000..f44e18e341 --- /dev/null +++ b/scratchpad/guard-policies-specification.md @@ -0,0 +1,307 @@ +# Guard Policies Integration Proposal + +## Executive Summary + +This document proposes an extensible guard policies framework for the MCP Gateway, starting with GitHub-specific policies. Guard policies enable fine-grained access control at the MCP gateway level, restricting which repositories and operations AI agents can access through MCP servers. + +## Problem Statement + +The user requested support for guard policies in the MCP gateway configuration, with the following requirements: + +1. Support GitHub-specific guard policies with flat frontmatter syntax: + - `repos` (scope): Repository access patterns + - `min-integrity` (minintegrity): Minimum min-integrity level required + +2. Design an extensible system that can support future MCP servers (Jira, WorkIQ) with different policy schemas + +3. Expose these parameters through workflow frontmatter in an intuitive way + +## Proposed Solution + +### 1. Type Hierarchy + +``` +GitHubToolConfig (GitHub-specific) + ├── Repos: GitHubReposScope (string or []any) + └── MinIntegrity: GitHubIntegrityLevel (enum) + +MCPServerConfig (general) + └── GuardPolicies: map[string]any (extensible for all servers) +``` + +### 2. GitHub Guard Policy Schema + +Based on the provided JSON schema, the implementation supports: + +**Repos Scope:** +- `"all"` - All repositories accessible by the token +- `"public"` - Public repositories only +- Array of patterns: + - `"owner/repo"` - Exact repository match + - `"owner/*"` - All repositories under owner + - `"owner/prefix*"` - Repositories with name prefix under owner + +**Integrity Levels:** +- `"none"` - No min-integrity requirements +- `"reader"` - Read-level integrity +- `"writer"` - Write-level integrity +- `"merged"` - Merged-level integrity + +### 3. Frontmatter Syntax + +**Minimal Example:** +```yaml +tools: + github: + mode: remote + toolsets: [default] + repos: "all" + min-integrity: reader +``` + +**With Repository Patterns:** +```yaml +tools: + github: + mode: remote + toolsets: [default] + repos: + - "myorg/*" + - "partner/shared-repo" + - "docs/api-*" + min-integrity: writer +``` + +**Public Repositories Only:** +```yaml +tools: + github: + repos: "public" + min-integrity: none +``` + +### 4. MCP Gateway Configuration Flow + +1. **Frontmatter Parsing** (`tools_parser.go`): + - Extracts `repos` and `min-integrity` directly from GitHub tool config + - Stores them as fields on `GitHubToolConfig` + - Validates structure and types + +2. **Validation** (`tools_validation.go`): + - Validates repos format (all/public or valid patterns) + - Validates min-integrity level (none/reader/writer/merged) + - Validates repository pattern syntax (lowercase, valid characters, wildcard placement) + - Called during workflow compilation + +3. **Compilation**: + - Guard policy fields (repos, min-integrity) included in compiled GitHub tool configuration + - Passed through to MCP Gateway configuration + +4. **Runtime (MCP Gateway)**: + - Gateway receives guard policies in server configuration + - Enforces policies on all tool invocations + - Blocks unauthorized repository access + +### 5. Extensibility for Future Servers + +The design supports future MCP servers (Jira, WorkIQ) through: + +1. **Server-Specific Policy Fields:** + ```go + type JiraToolConfig struct { + // ... other fields ... + // Guard policy fields (flat syntax under jira:) + Projects []string `yaml:"projects,omitempty"` + IssueTypes []string `yaml:"issue-types,omitempty"` + } + ``` + +2. **General MCPServerConfig Field:** + ```go + type MCPServerConfig struct { + // ... + GuardPolicies map[string]any `yaml:"guard-policies,omitempty"` + } + ``` + +3. **Frontmatter Configuration:** + ```yaml + tools: + jira: + mode: remote + projects: ["PROJ-*", "SHARED"] + issue-types: ["Bug", "Story"] + ``` + +## Implementation Details + +### Files Modified + +1. **pkg/workflow/tools_types.go** + - Added `GitHubIntegrityLevel` enum type + - Added `GitHubReposScope` type alias + - Extended `GitHubToolConfig` with flat `Repos` and `MinIntegrity` fields + - Extended `MCPServerConfig` with `GuardPolicies` field + +2. **pkg/workflow/schemas/mcp-gateway-config.schema.json** + - Added `guard-policies` field to `stdioServerConfig` + - Added `guard-policies` field to `httpServerConfig` + - Set `additionalProperties: true` for server-specific schemas + +3. **pkg/workflow/tools_parser.go** + - Extended `parseGitHubTool()` to extract `repos` and `min-integrity` directly + +4. **pkg/workflow/tools_validation.go** + - Updated `validateGitHubGuardPolicy()` function (validates flat fields) + - Added `validateReposScope()` function + - Added `validateRepoPattern()` function + - Added `isValidOwnerOrRepo()` helper function + +5. **pkg/workflow/compiler_orchestrator_workflow.go** + - Added call to `validateGitHubGuardPolicy()` + +6. **pkg/workflow/compiler_string_api.go** + - Added call to `validateGitHubGuardPolicy()` + +### Validation Rules + +**Repository Patterns:** +- Must be lowercase +- Format: `owner/repo`, `owner/*`, or `owner/prefix*` +- Owner and repo parts must contain only: lowercase letters, numbers, hyphens, underscores +- Wildcards only allowed at end of repo name +- Empty arrays not allowed + +**Integrity Levels:** +- Must be one of: `none`, `reader`, `writer`, `merged` +- Case-sensitive + +**Required Fields:** +- Both `repos` and `min-integrity` are required when either is specified under `github:` + +## Error Messages + +The implementation provides clear, actionable error messages: + +``` +invalid guard policy: 'github.repos' is required. +Use 'all', 'public', or an array of repository patterns (e.g., ['owner/repo', 'owner/*']) + +invalid guard policy: repository pattern 'Owner/Repo' must be lowercase + +invalid guard policy: repository pattern 'owner/re*po' has wildcard in the middle. +Wildcards only allowed at the end (e.g., 'prefix*') + +invalid guard policy: 'github.min-integrity' must be one of: 'none', 'reader', 'writer', 'merged'. +Got: 'admin' +``` + +## Usage Examples + +### Example 1: Restrict to Organization + +```yaml +tools: + github: + mode: remote + toolsets: [default] + repos: + - "myorg/*" + min-integrity: reader +``` + +### Example 2: Multiple Organizations + +```yaml +tools: + github: + mode: remote + toolsets: [default] + repos: + - "frontend-org/*" + - "backend-org/*" + - "shared/infrastructure" + min-integrity: writer +``` + +### Example 3: Public Repositories Only + +```yaml +tools: + github: + mode: remote + toolsets: [repos, issues] + repos: "public" + min-integrity: none +``` + +### Example 4: Prefix Matching + +```yaml +tools: + github: + mode: remote + toolsets: [default] + repos: + - "myorg/api-*" # Matches api-gateway, api-service, etc. + - "myorg/web-*" # Matches web-frontend, web-backend, etc. + min-integrity: writer +``` + +## Testing Strategy + +1. **Unit Tests** (Complete): + - `TestValidateGitHubGuardPolicy`: 14 cases covering valid/invalid repos values, invalid min-integrity, missing fields + - `TestValidateReposScopeWithStringSlice`: 4 cases covering `[]string` and `[]any` input types + - Tests live in `pkg/workflow/tools_validation_test.go` + +2. **Integration Tests** (Pending): + - Test end-to-end workflow compilation with guard policies + - Test that guard policies appear in compiled workflow YAML + - Test that guard policies are passed to MCP gateway configuration + +## Next Steps + +1. **Write Comprehensive Tests**: + - Unit tests for parsing functions + - Unit tests for validation functions + - Integration tests for end-to-end workflow compilation + +2. **Update Documentation**: + - Add guard policies section to MCP gateway documentation + - Add examples to GitHub MCP server documentation + - Update frontmatter configuration reference + +3. **Runtime Implementation** (Separate from this PR): + - MCP Gateway enforcement of guard policies + - Repository pattern matching logic + - Integrity level verification + - Access control logging + +## Benefits + +1. **Security**: Restrict AI agent access to specific repositories +2. **Compliance**: Enforce minimum min-integrity requirements +3. **Flexibility**: Support diverse repository patterns and wildcards +4. **Extensibility**: Easy to add policies for Jira, WorkIQ, etc. +5. **Clarity**: Clear error messages and validation +6. **Documentation**: Self-documenting through type system + +## Open Questions + +1. Should we support negative patterns (e.g., exclude certain repos)? +2. Should we support combining multiple policies (AND/OR logic)? +3. How should conflicts between lockdown and guard policies be resolved? +4. Should we add a "dry-run" mode to test policies before enforcement? + +## Conclusion + +This implementation provides a solid foundation for guard policies in the MCP gateway. The design is: + +- **Type-safe**: Strongly-typed structs with validation +- **Extensible**: Easy to add new servers and policy types +- **User-friendly**: Intuitive frontmatter syntax +- **Well-validated**: Comprehensive validation with clear error messages +- **Forward-compatible**: Supports future enhancements + +The implementation follows established patterns in the codebase and integrates seamlessly with the existing compilation and validation infrastructure. From 07fd42b35a012c4e0db784e1490b27b6b87ae44d Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 13:37:15 +0000 Subject: [PATCH 06/15] Show full GITHUB_WORKSPACE path and cwd annotation in checkout prompt Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .github/workflows/smoke-codex.lock.yml | 2 +- pkg/workflow/checkout_manager.go | 29 ++++++++++++++++++-------- pkg/workflow/checkout_manager_test.go | 16 ++++++++------ 3 files changed, 31 insertions(+), 16 deletions(-) diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index 6ddd935de2..d70fe5c252 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -189,7 +189,7 @@ jobs: - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ {{/if}} - **checkouts**: The following repositories have been checked out and are available in the workspace: - - `.` → `__GH_AW_GITHUB_REPOSITORY__` + - `$GITHUB_WORKSPACE` → `__GH_AW_GITHUB_REPOSITORY__` (cwd) GH_AW_PROMPT_EOF diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index fd1dd9a158..a7fb87ac22 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -563,10 +563,12 @@ func getCurrentCheckoutRepository(checkouts []*CheckoutConfig) string { // checkouts for inclusion in the GitHub context prompt. // Returns an empty string when no checkouts are configured. // -// The generated content may include "${{ github.repository }}" for any checkout that does -// not have an explicit repository configured (defaulting to the triggering repository). -// Callers must ensure these expressions are processed by an ExpressionExtractor so the -// placeholder substitution step can resolve them at runtime. +// Each checkout is shown with its full absolute path relative to $GITHUB_WORKSPACE. +// The root checkout (path == "") is annotated as "(cwd)" since that is the working +// directory of the agent process. The generated content may include +// "${{ github.repository }}" for any checkout that does not have an explicit repository +// configured; callers must ensure these expressions are processed by an ExpressionExtractor +// so the placeholder substitution step can resolve them at runtime. func buildCheckoutsPromptContent(checkouts []*CheckoutConfig) string { if len(checkouts) == 0 { return "" @@ -580,10 +582,16 @@ func buildCheckoutsPromptContent(checkouts []*CheckoutConfig) string { continue } - // Determine human-readable path label - path := cfg.Path - if path == "" { - path = "." + // Build the full absolute path using $GITHUB_WORKSPACE as root. + // Normalize the path: strip "./" prefix; bare "." and "" both mean root. + relPath := strings.TrimPrefix(cfg.Path, "./") + if relPath == "." { + relPath = "" + } + isRoot := relPath == "" + absPath := "$GITHUB_WORKSPACE" + if !isRoot { + absPath += "/" + relPath } // Determine repo: use configured value or fall back to the triggering repository expression @@ -592,7 +600,10 @@ func buildCheckoutsPromptContent(checkouts []*CheckoutConfig) string { repo = "${{ github.repository }}" } - line := fmt.Sprintf(" - `%s` → `%s`", path, repo) + line := fmt.Sprintf(" - `%s` → `%s`", absPath, repo) + if isRoot { + line += " (cwd)" + } if cfg.Current { line += " (**current** - this is the repository you are working on; use this as the target for all GitHub operations unless otherwise specified)" } diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index f5c520e5c6..90857407de 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -460,11 +460,12 @@ func TestBuildCheckoutsPromptContent(t *testing.T) { assert.Empty(t, buildCheckoutsPromptContent([]*CheckoutConfig{}), "empty slice should return empty string") }) - t.Run("default checkout with no repo uses github.repository expression", func(t *testing.T) { + t.Run("default checkout with no repo uses github.repository expression and cwd", func(t *testing.T) { content := buildCheckoutsPromptContent([]*CheckoutConfig{ {}, }) - assert.Contains(t, content, "`.`", "should show '.' path for empty path") + assert.Contains(t, content, "$GITHUB_WORKSPACE", "should show full workspace path for root checkout") + assert.Contains(t, content, "(cwd)", "root checkout should be marked as cwd") assert.Contains(t, content, "${{ github.repository }}", "should reference github.repository expression for default checkout") }) @@ -474,13 +475,14 @@ func TestBuildCheckoutsPromptContent(t *testing.T) { assert.Equal(t, emptyContent, dotContent, "empty path and '.' should produce identical output") }) - t.Run("checkout with explicit repo shows repo", func(t *testing.T) { + t.Run("checkout with explicit repo shows full path", func(t *testing.T) { content := buildCheckoutsPromptContent([]*CheckoutConfig{ {Repository: "owner/target", Path: "./target"}, }) - assert.Contains(t, content, "./target", "should show the configured path") + assert.Contains(t, content, "$GITHUB_WORKSPACE/target", "should show full workspace path") assert.Contains(t, content, "owner/target", "should show the configured repo") assert.NotContains(t, content, "github.repository", "should not include github.repository expression for explicit repo") + assert.NotContains(t, content, "(cwd)", "non-root checkout should not be marked as cwd") }) t.Run("current checkout is marked", func(t *testing.T) { @@ -504,9 +506,11 @@ func TestBuildCheckoutsPromptContent(t *testing.T) { {Repository: "owner/target", Path: "./target", Current: true}, {Repository: "owner/libs", Path: "./libs"}, }) - assert.Contains(t, content, "./target", "should include target checkout") + assert.Contains(t, content, "$GITHUB_WORKSPACE", "should include workspace root for root checkout") + assert.Contains(t, content, "(cwd)", "root checkout should be marked as cwd") + assert.Contains(t, content, "$GITHUB_WORKSPACE/target", "should include full path for target checkout") assert.Contains(t, content, "owner/target", "should include target repo") - assert.Contains(t, content, "./libs", "should include libs checkout") + assert.Contains(t, content, "$GITHUB_WORKSPACE/libs", "should include full path for libs checkout") assert.Contains(t, content, "owner/libs", "should include libs repo") assert.Contains(t, content, "**current**", "current checkout should be marked") }) From 9573d09173ce5e9729f602bb549bee0e7468ebb4 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 13:47:15 +0000 Subject: [PATCH 07/15] fix: use token: not github-token: in generated actions/checkout YAML Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- pkg/workflow/checkout_manager.go | 10 ++++++++-- pkg/workflow/checkout_manager_test.go | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index a7fb87ac22..4a793e8aa6 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -39,6 +39,10 @@ type CheckoutConfig struct { // GitHubToken overrides the default GITHUB_TOKEN for authentication. // Use ${{ secrets.MY_TOKEN }} to reference a repository secret. + // + // Frontmatter key: "github-token" (user-facing name used here and in the schema) + // Generated YAML key: "token" (the actual input name for actions/checkout) + // The compiler maps frontmatter "github-token" → lock.yml "token" during step generation. GitHubToken string `json:"github-token,omitempty"` // FetchDepth controls the number of commits to fetch. @@ -254,7 +258,8 @@ func (cm *CheckoutManager) GenerateDefaultCheckoutStep( fmt.Fprintf(&sb, " ref: %s\n", override.ref) } if override.token != "" { - fmt.Fprintf(&sb, " github-token: %s\n", override.token) + // actions/checkout input is "token", not "github-token" + fmt.Fprintf(&sb, " token: %s\n", override.token) } if override.fetchDepth != nil { fmt.Fprintf(&sb, " fetch-depth: %d\n", *override.fetchDepth) @@ -297,7 +302,8 @@ func generateCheckoutStepLines(entry *resolvedCheckout, getActionPin func(string fmt.Fprintf(&sb, " path: %s\n", entry.key.path) } if entry.token != "" { - fmt.Fprintf(&sb, " github-token: %s\n", entry.token) + // actions/checkout input is "token", not "github-token" + fmt.Fprintf(&sb, " token: %s\n", entry.token) } if entry.fetchDepth != nil { fmt.Fprintf(&sb, " fetch-depth: %d\n", *entry.fetchDepth) diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index 90857407de..9b001d08c7 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -124,7 +124,7 @@ func TestGenerateDefaultCheckoutStep(t *testing.T) { }) lines := cm.GenerateDefaultCheckoutStep(false, "", getPin) combined := strings.Join(lines, "") - assert.Contains(t, combined, "github-token: ${{ secrets.MY_TOKEN }}", "should include custom token") + assert.Contains(t, combined, "token: ${{ secrets.MY_TOKEN }}", "should include custom token") assert.Contains(t, combined, "persist-credentials: false", "must always have persist-credentials: false even with custom token") }) From 5f7b7b84b87d5f126adf217aaa942805a04fadef Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 14:17:02 +0000 Subject: [PATCH 08/15] Merge main into branch; recompile all workflows Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .../agent-performance-analyzer.lock.yml | 3 +- .../workflows/agent-persona-explorer.lock.yml | 3 +- .github/workflows/ai-moderator.lock.yml | 3 +- .github/workflows/archie.lock.yml | 3 +- .github/workflows/artifacts-summary.lock.yml | 3 +- .github/workflows/audit-workflows.lock.yml | 3 +- .github/workflows/auto-triage-issues.lock.yml | 3 +- .github/workflows/blog-auditor.lock.yml | 3 +- .github/workflows/bot-detection.lock.yml | 3 +- .github/workflows/brave.lock.yml | 3 +- .../breaking-change-checker.lock.yml | 3 +- .github/workflows/changeset.lock.yml | 3 +- .../workflows/chroma-issue-indexer.lock.yml | 3 +- .github/workflows/ci-coach.lock.yml | 3 +- .github/workflows/ci-doctor.lock.yml | 3 +- .../claude-code-user-docs-review.lock.yml | 3 +- .../cli-consistency-checker.lock.yml | 3 +- .../workflows/cli-version-checker.lock.yml | 3 +- .github/workflows/cloclo.lock.yml | 3 +- .../workflows/code-scanning-fixer.lock.yml | 3 +- .github/workflows/code-simplifier.lock.yml | 3 +- .../codex-github-remote-mcp-test.lock.yml | 3 +- .../commit-changes-analyzer.lock.yml | 3 +- .github/workflows/contribution-check.lock.yml | 3 +- .../workflows/copilot-agent-analysis.lock.yml | 3 +- .../copilot-cli-deep-research.lock.yml | 3 +- .../copilot-pr-merged-report.lock.yml | 3 +- .../copilot-pr-nlp-analysis.lock.yml | 3 +- .../copilot-pr-prompt-analysis.lock.yml | 3 +- .../copilot-session-insights.lock.yml | 3 +- .github/workflows/craft.lock.yml | 3 +- .../daily-architecture-diagram.lock.yml | 3 +- .../daily-assign-issue-to-user.lock.yml | 3 +- .github/workflows/daily-choice-test.lock.yml | 3 +- .../workflows/daily-cli-performance.lock.yml | 3 +- .../workflows/daily-cli-tools-tester.lock.yml | 3 +- .github/workflows/daily-code-metrics.lock.yml | 3 +- .../workflows/daily-compiler-quality.lock.yml | 3 +- .../daily-copilot-token-report.lock.yml | 3 +- .github/workflows/daily-doc-healer.lock.yml | 3 +- .github/workflows/daily-doc-updater.lock.yml | 3 +- .github/workflows/daily-fact.lock.yml | 3 +- .github/workflows/daily-file-diet.lock.yml | 3 +- .../workflows/daily-firewall-report.lock.yml | 3 +- .../workflows/daily-issues-report.lock.yml | 3 +- .../daily-malicious-code-scan.lock.yml | 3 +- .../daily-mcp-concurrency-analysis.lock.yml | 3 +- .../daily-multi-device-docs-tester.lock.yml | 3 +- .github/workflows/daily-news.lock.yml | 3 +- .../daily-observability-report.lock.yml | 3 +- .../daily-performance-summary.lock.yml | 3 +- .github/workflows/daily-regulatory.lock.yml | 3 +- .../daily-rendering-scripts-verifier.lock.yml | 3 +- .../workflows/daily-repo-chronicle.lock.yml | 3 +- .../daily-safe-output-optimizer.lock.yml | 3 +- .../daily-safe-outputs-conformance.lock.yml | 3 +- .../workflows/daily-secrets-analysis.lock.yml | 3 +- .../daily-security-red-team.lock.yml | 3 +- .github/workflows/daily-semgrep-scan.lock.yml | 3 +- .../daily-syntax-error-quality.lock.yml | 3 +- .../daily-team-evolution-insights.lock.yml | 3 +- .github/workflows/daily-team-status.lock.yml | 3 +- .../daily-testify-uber-super-expert.lock.yml | 3 +- .../workflows/daily-workflow-updater.lock.yml | 3 +- .github/workflows/deep-report.lock.yml | 3 +- .github/workflows/delight.lock.yml | 3 +- .github/workflows/delight.md | 34 +- .github/workflows/dependabot-burner.lock.yml | 3 +- .../workflows/dependabot-go-checker.lock.yml | 3 +- .github/workflows/dev-hawk.lock.yml | 3 +- .github/workflows/dev.lock.yml | 3 +- .../developer-docs-consolidator.lock.yml | 3 +- .github/workflows/dictation-prompt.lock.yml | 3 +- .../workflows/discussion-task-miner.lock.yml | 3 +- .github/workflows/docs-noob-tester.lock.yml | 3 +- .github/workflows/draft-pr-cleanup.lock.yml | 3 +- .../duplicate-code-detector.lock.yml | 3 +- .../example-custom-error-patterns.lock.yml | 3 +- .../example-permissions-warning.lock.yml | 3 +- .../example-workflow-analyzer.lock.yml | 3 +- .github/workflows/firewall-escape.lock.yml | 3 +- .github/workflows/firewall.lock.yml | 3 +- .../workflows/functional-pragmatist.lock.yml | 3 +- .../github-mcp-structural-analysis.lock.yml | 3 +- .../github-mcp-tools-report.lock.yml | 3 +- .../github-remote-mcp-auth-test.lock.yml | 3 +- .../workflows/glossary-maintainer.lock.yml | 3 +- .github/workflows/go-fan.lock.yml | 3 +- .github/workflows/go-logger.lock.yml | 3 +- .../workflows/go-pattern-detector.lock.yml | 3 +- .github/workflows/gpclean.lock.yml | 3 +- .github/workflows/grumpy-reviewer.lock.yml | 3 +- .github/workflows/hourly-ci-cleaner.lock.yml | 3 +- .../workflows/instructions-janitor.lock.yml | 3 +- .github/workflows/issue-arborist.lock.yml | 3 +- .github/workflows/issue-monster.lock.yml | 3 +- .github/workflows/issue-triage-agent.lock.yml | 3 +- .github/workflows/jsweep.lock.yml | 3 +- .../workflows/layout-spec-maintainer.lock.yml | 3 +- .github/workflows/lockfile-stats.lock.yml | 3 +- .github/workflows/mcp-inspector.lock.yml | 3 +- .github/workflows/mergefest.lock.yml | 3 +- .github/workflows/metrics-collector.lock.yml | 3 +- .../workflows/notion-issue-summary.lock.yml | 3 +- .github/workflows/org-health-report.lock.yml | 3 +- .github/workflows/pdf-summary.lock.yml | 3 +- .github/workflows/plan.lock.yml | 3 +- .github/workflows/poem-bot.lock.yml | 3 +- .github/workflows/portfolio-analyst.lock.yml | 3 +- .../workflows/pr-nitpick-reviewer.lock.yml | 3 +- .github/workflows/pr-triage-agent.lock.yml | 3 +- .../prompt-clustering-analysis.lock.yml | 3 +- .github/workflows/python-data-charts.lock.yml | 3 +- .github/workflows/q.lock.yml | 3 +- .github/workflows/refiner.lock.yml | 3 +- .github/workflows/release.lock.yml | 3 +- .../workflows/repo-audit-analyzer.lock.yml | 3 +- .github/workflows/repo-tree-map.lock.yml | 3 +- .../repository-quality-improver.lock.yml | 3 +- .github/workflows/research.lock.yml | 3 +- .github/workflows/safe-output-health.lock.yml | 3 +- .../schema-consistency-checker.lock.yml | 3 +- .github/workflows/scout.lock.yml | 3 +- ...ecurity-alert-burndown.campaign.g.lock.yml | 3 +- .../workflows/security-compliance.lock.yml | 3 +- .github/workflows/security-review.lock.yml | 3 +- .../semantic-function-refactor.lock.yml | 3 +- .github/workflows/sergo.lock.yml | 3 +- .../workflows/slide-deck-maintainer.lock.yml | 3 +- .github/workflows/smoke-agent.lock.yml | 3 +- .github/workflows/smoke-claude.lock.yml | 3 +- .github/workflows/smoke-codex.lock.yml | 3 +- .github/workflows/smoke-copilot-arm.lock.yml | 3 +- .github/workflows/smoke-copilot.lock.yml | 3 +- .github/workflows/smoke-gemini.lock.yml | 3 +- .github/workflows/smoke-multi-pr.lock.yml | 3 +- .github/workflows/smoke-project.lock.yml | 3 +- .github/workflows/smoke-temporary-id.lock.yml | 3 +- .github/workflows/smoke-test-tools.lock.yml | 3 +- .../workflows/smoke-workflow-call.lock.yml | 3 +- .../workflows/stale-repo-identifier.lock.yml | 3 +- .../workflows/static-analysis-report.lock.yml | 3 +- .../workflows/step-name-alignment.lock.yml | 3 +- .github/workflows/sub-issue-closer.lock.yml | 3 +- .github/workflows/super-linter.lock.yml | 3 +- .../workflows/technical-doc-writer.lock.yml | 3 +- .github/workflows/terminal-stylist.lock.yml | 3 +- .../test-create-pr-error-handling.lock.yml | 3 +- .github/workflows/test-dispatcher.lock.yml | 3 +- .../test-project-url-default.lock.yml | 3 +- .github/workflows/test-workflow.lock.yml | 3 +- .github/workflows/tidy.lock.yml | 3 +- .github/workflows/typist.lock.yml | 3 +- .../workflows/ubuntu-image-analyzer.lock.yml | 3 +- .github/workflows/unbloat-docs.lock.yml | 3 +- .github/workflows/video-analyzer.lock.yml | 3 +- .../weekly-editors-health-check.lock.yml | 3 +- .../workflows/weekly-issue-summary.lock.yml | 3 +- .../weekly-safe-outputs-spec-review.lock.yml | 3 +- .github/workflows/workflow-generator.lock.yml | 3 +- .../workflow-health-manager.lock.yml | 3 +- .../workflows/workflow-normalizer.lock.yml | 3 +- .../workflow-skill-extractor.lock.yml | 3 +- DEADCODE.md | 179 +- .../setup/js/create_missing_data_issue.cjs | 1 + .../setup/js/create_missing_tool_issue.cjs | 1 + actions/setup/js/missing_issue_helpers.cjs | 6 +- .../setup/js/missing_issue_helpers.test.cjs | 42 + .../schemas/mcp-gateway-config.schema.json | 10 + .../docs/reference/cross-repository.md | 1 + pkg/cli/audit_report_helpers_test.go | 71 - pkg/cli/completions.go | 37 - pkg/cli/docker_images.go | 20 - pkg/cli/docker_images_test.go | 13 +- pkg/cli/mcp_tool_table.go | 115 -- pkg/cli/mcp_tool_table_test.go | 112 - pkg/cli/preconditions.go | 82 - pkg/console/console.go | 47 - pkg/console/console_formatting_test.go | 92 - pkg/console/console_test.go | 265 --- pkg/console/console_wasm.go | 24 +- pkg/console/golden_test.go | 117 -- pkg/console/input.go | 55 - pkg/console/input_test.go | 50 - pkg/console/progress.go | 25 - pkg/console/progress_test.go | 83 - pkg/console/render.go | 59 - pkg/console/spinner.go | 2 - pkg/console/spinner_test.go | 21 +- pkg/console/terminal.go | 16 - pkg/console/terminal_test.go | 173 -- pkg/constants/constants.go | 66 - pkg/constants/constants_test.go | 102 - pkg/fileutil/fileutil.go | 19 - pkg/logger/slog_adapter.go | 14 - pkg/logger/slog_adapter_test.go | 85 - pkg/parser/frontmatter_benchmark_test.go | 88 - pkg/parser/frontmatter_includes_test.go | 571 ------ pkg/parser/frontmatter_syntax_errors_test.go | 691 ------- pkg/parser/import_syntax_test.go | 83 - pkg/parser/include_expander.go | 14 - pkg/parser/include_processor.go | 5 - pkg/parser/mcp.go | 49 - pkg/parser/mcp_test.go | 57 - .../schema_additional_properties_test.go | 283 --- pkg/parser/schema_oneof_test.go | 334 --- .../schema_passthrough_validation_test.go | 572 ------ pkg/parser/schema_test.go | 1804 ----------------- pkg/parser/schema_utilities_test.go | 138 -- pkg/parser/schema_validation.go | 54 - pkg/parser/schema_validation_test.go | 74 - pkg/parser/yaml_error.go | 146 -- pkg/parser/yaml_error_test.go | 272 --- pkg/repoutil/repoutil.go | 29 - pkg/repoutil/repoutil_test.go | 163 -- pkg/sliceutil/sliceutil.go | 16 - pkg/sliceutil/sliceutil_test.go | 232 --- pkg/stringutil/identifiers.go | 26 - pkg/stringutil/identifiers_test.go | 107 - pkg/stringutil/pat_validation.go | 15 - pkg/stringutil/pat_validation_test.go | 21 - pkg/stringutil/stringutil.go | 24 - pkg/stringutil/stringutil_test.go | 157 -- pkg/workflow/agentic_engine.go | 12 +- .../agentic_engine_interfaces_test.go | 9 +- pkg/workflow/artifact_manager.go | 383 ---- .../artifact_manager_integration_test.go | 280 --- pkg/workflow/artifact_manager_test.go | 866 -------- ...fact_manager_workflows_integration_test.go | 566 ------ pkg/workflow/checkout_manager.go | 5 - pkg/workflow/checkout_manager_test.go | 4 +- pkg/workflow/claude_engine.go | 1 - pkg/workflow/codex_engine.go | 1 - pkg/workflow/compiler_safe_outputs_config.go | 8 - pkg/workflow/copilot_engine.go | 1 - pkg/workflow/domains.go | 62 - pkg/workflow/domains_protocol_test.go | 6 +- pkg/workflow/domains_sort_test.go | 6 +- pkg/workflow/domains_test.go | 82 - pkg/workflow/engine_firewall_support.go | 35 +- pkg/workflow/engine_firewall_support_test.go | 38 +- pkg/workflow/engine_helpers.go | 156 -- pkg/workflow/engine_helpers_test.go | 323 --- pkg/workflow/expression_builder.go | 99 - pkg/workflow/expression_coverage_test.go | 79 - pkg/workflow/expression_nodes.go | 39 - pkg/workflow/expressions_test.go | 338 +-- .../firewall_disable_integration_test.go | 6 +- pkg/workflow/frontmatter_types.go | 22 - pkg/workflow/gemini_engine.go | 1 - pkg/workflow/gemini_engine_test.go | 1 - pkg/workflow/http_mcp_domains_test.go | 10 +- pkg/workflow/js.go | 1 - pkg/workflow/known_needs_expressions.go | 77 - pkg/workflow/known_needs_expressions_test.go | 97 - pkg/workflow/map_helpers.go | 225 -- pkg/workflow/mcp_renderer.go | 7 +- pkg/workflow/permissions_factory.go | 47 - pkg/workflow/safe_inputs_firewall_test.go | 8 +- pkg/workflow/safe_output_parser.go | 79 - .../schemas/mcp-gateway-config.schema.json | 10 + pkg/workflow/validation_helpers.go | 89 - pkg/workflow/validation_helpers_test.go | 406 ---- 263 files changed, 466 insertions(+), 12569 deletions(-) delete mode 100644 pkg/console/terminal_test.go delete mode 100644 pkg/parser/frontmatter_syntax_errors_test.go delete mode 100644 pkg/parser/schema_additional_properties_test.go delete mode 100644 pkg/parser/schema_oneof_test.go delete mode 100644 pkg/parser/schema_passthrough_validation_test.go delete mode 100644 pkg/parser/schema_validation_test.go delete mode 100644 pkg/workflow/artifact_manager_integration_test.go delete mode 100644 pkg/workflow/artifact_manager_test.go delete mode 100644 pkg/workflow/artifact_manager_workflows_integration_test.go diff --git a/.github/workflows/agent-performance-analyzer.lock.yml b/.github/workflows/agent-performance-analyzer.lock.yml index 82d70f8344..cbf0aa7882 100644 --- a/.github/workflows/agent-performance-analyzer.lock.yml +++ b/.github/workflows/agent-performance-analyzer.lock.yml @@ -833,8 +833,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/agent-persona-explorer.lock.yml b/.github/workflows/agent-persona-explorer.lock.yml index 6dc915117a..e908528530 100644 --- a/.github/workflows/agent-persona-explorer.lock.yml +++ b/.github/workflows/agent-persona-explorer.lock.yml @@ -721,8 +721,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml index 73155c020c..fb889c903a 100644 --- a/.github/workflows/ai-moderator.lock.yml +++ b/.github/workflows/ai-moderator.lock.yml @@ -762,8 +762,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/archie.lock.yml b/.github/workflows/archie.lock.yml index b0b69150eb..42b5bae762 100644 --- a/.github/workflows/archie.lock.yml +++ b/.github/workflows/archie.lock.yml @@ -675,8 +675,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/artifacts-summary.lock.yml b/.github/workflows/artifacts-summary.lock.yml index ad9ddf1bd8..9b6f6d89a9 100644 --- a/.github/workflows/artifacts-summary.lock.yml +++ b/.github/workflows/artifacts-summary.lock.yml @@ -630,8 +630,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml index 69f0015abd..c746baba30 100644 --- a/.github/workflows/audit-workflows.lock.yml +++ b/.github/workflows/audit-workflows.lock.yml @@ -804,8 +804,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/auto-triage-issues.lock.yml b/.github/workflows/auto-triage-issues.lock.yml index 461289b45f..80a0a7a651 100644 --- a/.github/workflows/auto-triage-issues.lock.yml +++ b/.github/workflows/auto-triage-issues.lock.yml @@ -683,8 +683,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/blog-auditor.lock.yml b/.github/workflows/blog-auditor.lock.yml index 520f4e6019..293d64e35e 100644 --- a/.github/workflows/blog-auditor.lock.yml +++ b/.github/workflows/blog-auditor.lock.yml @@ -658,8 +658,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/bot-detection.lock.yml b/.github/workflows/bot-detection.lock.yml index 2336d08fba..61a5f54220 100644 --- a/.github/workflows/bot-detection.lock.yml +++ b/.github/workflows/bot-detection.lock.yml @@ -784,8 +784,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/brave.lock.yml b/.github/workflows/brave.lock.yml index ccb2bc9f8c..6c499509b3 100644 --- a/.github/workflows/brave.lock.yml +++ b/.github/workflows/brave.lock.yml @@ -662,8 +662,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/breaking-change-checker.lock.yml b/.github/workflows/breaking-change-checker.lock.yml index 1f1616b806..e93ce91404 100644 --- a/.github/workflows/breaking-change-checker.lock.yml +++ b/.github/workflows/breaking-change-checker.lock.yml @@ -654,8 +654,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/changeset.lock.yml b/.github/workflows/changeset.lock.yml index 5905cdc523..4e32a6759a 100644 --- a/.github/workflows/changeset.lock.yml +++ b/.github/workflows/changeset.lock.yml @@ -777,8 +777,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/chroma-issue-indexer.lock.yml b/.github/workflows/chroma-issue-indexer.lock.yml index b69dbe4575..e32d4f24d1 100644 --- a/.github/workflows/chroma-issue-indexer.lock.yml +++ b/.github/workflows/chroma-issue-indexer.lock.yml @@ -410,8 +410,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/ci-coach.lock.yml b/.github/workflows/ci-coach.lock.yml index 1eb3d514da..5224bea48d 100644 --- a/.github/workflows/ci-coach.lock.yml +++ b/.github/workflows/ci-coach.lock.yml @@ -723,8 +723,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index 7d07b0756f..6c8954a4fe 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -869,8 +869,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/claude-code-user-docs-review.lock.yml b/.github/workflows/claude-code-user-docs-review.lock.yml index 7e3d34e0dd..09fceb7b78 100644 --- a/.github/workflows/claude-code-user-docs-review.lock.yml +++ b/.github/workflows/claude-code-user-docs-review.lock.yml @@ -649,8 +649,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/cli-consistency-checker.lock.yml b/.github/workflows/cli-consistency-checker.lock.yml index d30c4578d4..3120b1ff89 100644 --- a/.github/workflows/cli-consistency-checker.lock.yml +++ b/.github/workflows/cli-consistency-checker.lock.yml @@ -643,8 +643,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/cli-version-checker.lock.yml b/.github/workflows/cli-version-checker.lock.yml index d7f84c8873..bdc04d7fca 100644 --- a/.github/workflows/cli-version-checker.lock.yml +++ b/.github/workflows/cli-version-checker.lock.yml @@ -682,8 +682,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/cloclo.lock.yml b/.github/workflows/cloclo.lock.yml index 32c1ab68d3..64f65fdb6a 100644 --- a/.github/workflows/cloclo.lock.yml +++ b/.github/workflows/cloclo.lock.yml @@ -912,8 +912,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/code-scanning-fixer.lock.yml b/.github/workflows/code-scanning-fixer.lock.yml index 5575202d02..bb445367e1 100644 --- a/.github/workflows/code-scanning-fixer.lock.yml +++ b/.github/workflows/code-scanning-fixer.lock.yml @@ -716,8 +716,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/code-simplifier.lock.yml b/.github/workflows/code-simplifier.lock.yml index 6db2f6f004..068a29a672 100644 --- a/.github/workflows/code-simplifier.lock.yml +++ b/.github/workflows/code-simplifier.lock.yml @@ -662,8 +662,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/codex-github-remote-mcp-test.lock.yml b/.github/workflows/codex-github-remote-mcp-test.lock.yml index d5d110da67..d2e1999793 100644 --- a/.github/workflows/codex-github-remote-mcp-test.lock.yml +++ b/.github/workflows/codex-github-remote-mcp-test.lock.yml @@ -381,8 +381,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/commit-changes-analyzer.lock.yml b/.github/workflows/commit-changes-analyzer.lock.yml index 11117f4b15..da0ef632ab 100644 --- a/.github/workflows/commit-changes-analyzer.lock.yml +++ b/.github/workflows/commit-changes-analyzer.lock.yml @@ -640,8 +640,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/contribution-check.lock.yml b/.github/workflows/contribution-check.lock.yml index 4f16cf255a..4ac2c675d0 100644 --- a/.github/workflows/contribution-check.lock.yml +++ b/.github/workflows/contribution-check.lock.yml @@ -720,8 +720,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/copilot-agent-analysis.lock.yml b/.github/workflows/copilot-agent-analysis.lock.yml index 5bb91c02c9..93199826ae 100644 --- a/.github/workflows/copilot-agent-analysis.lock.yml +++ b/.github/workflows/copilot-agent-analysis.lock.yml @@ -694,8 +694,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/copilot-cli-deep-research.lock.yml b/.github/workflows/copilot-cli-deep-research.lock.yml index 4a0fb05325..e1701438b9 100644 --- a/.github/workflows/copilot-cli-deep-research.lock.yml +++ b/.github/workflows/copilot-cli-deep-research.lock.yml @@ -652,8 +652,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/copilot-pr-merged-report.lock.yml b/.github/workflows/copilot-pr-merged-report.lock.yml index fe8ebd728c..2d64f978cf 100644 --- a/.github/workflows/copilot-pr-merged-report.lock.yml +++ b/.github/workflows/copilot-pr-merged-report.lock.yml @@ -781,8 +781,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml index 6b2a9177fc..83b4eef1e5 100644 --- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml +++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml @@ -750,8 +750,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/copilot-pr-prompt-analysis.lock.yml b/.github/workflows/copilot-pr-prompt-analysis.lock.yml index 0c09391e4c..4756bcf3dd 100644 --- a/.github/workflows/copilot-pr-prompt-analysis.lock.yml +++ b/.github/workflows/copilot-pr-prompt-analysis.lock.yml @@ -687,8 +687,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml index 596f391d55..93236ede56 100644 --- a/.github/workflows/copilot-session-insights.lock.yml +++ b/.github/workflows/copilot-session-insights.lock.yml @@ -760,8 +760,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/craft.lock.yml b/.github/workflows/craft.lock.yml index e25c19c26c..a3ca2419fb 100644 --- a/.github/workflows/craft.lock.yml +++ b/.github/workflows/craft.lock.yml @@ -697,8 +697,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-architecture-diagram.lock.yml b/.github/workflows/daily-architecture-diagram.lock.yml index 861aae4bba..c3c3efd88e 100644 --- a/.github/workflows/daily-architecture-diagram.lock.yml +++ b/.github/workflows/daily-architecture-diagram.lock.yml @@ -664,8 +664,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-assign-issue-to-user.lock.yml b/.github/workflows/daily-assign-issue-to-user.lock.yml index b37655d585..9641957c5e 100644 --- a/.github/workflows/daily-assign-issue-to-user.lock.yml +++ b/.github/workflows/daily-assign-issue-to-user.lock.yml @@ -658,8 +658,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-choice-test.lock.yml b/.github/workflows/daily-choice-test.lock.yml index ddccfb378e..26f5a199c6 100644 --- a/.github/workflows/daily-choice-test.lock.yml +++ b/.github/workflows/daily-choice-test.lock.yml @@ -605,8 +605,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-cli-performance.lock.yml b/.github/workflows/daily-cli-performance.lock.yml index 20f2e77078..bcbcc7d59c 100644 --- a/.github/workflows/daily-cli-performance.lock.yml +++ b/.github/workflows/daily-cli-performance.lock.yml @@ -852,8 +852,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-cli-tools-tester.lock.yml b/.github/workflows/daily-cli-tools-tester.lock.yml index e2a353a799..53ccdb37da 100644 --- a/.github/workflows/daily-cli-tools-tester.lock.yml +++ b/.github/workflows/daily-cli-tools-tester.lock.yml @@ -716,8 +716,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml index 0a00af3082..69269a7679 100644 --- a/.github/workflows/daily-code-metrics.lock.yml +++ b/.github/workflows/daily-code-metrics.lock.yml @@ -734,8 +734,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-compiler-quality.lock.yml b/.github/workflows/daily-compiler-quality.lock.yml index 721ef96eb6..e45b98d282 100644 --- a/.github/workflows/daily-compiler-quality.lock.yml +++ b/.github/workflows/daily-compiler-quality.lock.yml @@ -657,8 +657,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml index 731fce7ded..22dec2ca6f 100644 --- a/.github/workflows/daily-copilot-token-report.lock.yml +++ b/.github/workflows/daily-copilot-token-report.lock.yml @@ -757,8 +757,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-doc-healer.lock.yml b/.github/workflows/daily-doc-healer.lock.yml index 1eec16a031..890ac97a3d 100644 --- a/.github/workflows/daily-doc-healer.lock.yml +++ b/.github/workflows/daily-doc-healer.lock.yml @@ -743,8 +743,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-doc-updater.lock.yml b/.github/workflows/daily-doc-updater.lock.yml index 54a5b71b0c..255da58866 100644 --- a/.github/workflows/daily-doc-updater.lock.yml +++ b/.github/workflows/daily-doc-updater.lock.yml @@ -669,8 +669,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-fact.lock.yml b/.github/workflows/daily-fact.lock.yml index b642dcc344..d4c825d057 100644 --- a/.github/workflows/daily-fact.lock.yml +++ b/.github/workflows/daily-fact.lock.yml @@ -622,8 +622,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml index 1c98b9f5ee..5af0133209 100644 --- a/.github/workflows/daily-file-diet.lock.yml +++ b/.github/workflows/daily-file-diet.lock.yml @@ -670,8 +670,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml index 58f64763ab..3745907c32 100644 --- a/.github/workflows/daily-firewall-report.lock.yml +++ b/.github/workflows/daily-firewall-report.lock.yml @@ -776,8 +776,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml index 247eddc88e..93cb05e980 100644 --- a/.github/workflows/daily-issues-report.lock.yml +++ b/.github/workflows/daily-issues-report.lock.yml @@ -823,8 +823,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-malicious-code-scan.lock.yml b/.github/workflows/daily-malicious-code-scan.lock.yml index 65fa01fece..2c258d55ab 100644 --- a/.github/workflows/daily-malicious-code-scan.lock.yml +++ b/.github/workflows/daily-malicious-code-scan.lock.yml @@ -666,8 +666,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml index 2742244a42..a7011cb136 100644 --- a/.github/workflows/daily-mcp-concurrency-analysis.lock.yml +++ b/.github/workflows/daily-mcp-concurrency-analysis.lock.yml @@ -710,8 +710,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml index 2dd98e38ef..730494f8aa 100644 --- a/.github/workflows/daily-multi-device-docs-tester.lock.yml +++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml @@ -722,8 +722,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml index 32a88d5898..6d37804914 100644 --- a/.github/workflows/daily-news.lock.yml +++ b/.github/workflows/daily-news.lock.yml @@ -817,8 +817,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-observability-report.lock.yml b/.github/workflows/daily-observability-report.lock.yml index 4d68bb8954..002b8611b9 100644 --- a/.github/workflows/daily-observability-report.lock.yml +++ b/.github/workflows/daily-observability-report.lock.yml @@ -800,8 +800,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml index 455e7b0202..5ad52d8fd3 100644 --- a/.github/workflows/daily-performance-summary.lock.yml +++ b/.github/workflows/daily-performance-summary.lock.yml @@ -1275,8 +1275,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-regulatory.lock.yml b/.github/workflows/daily-regulatory.lock.yml index 8d776f3eee..8cd84002ec 100644 --- a/.github/workflows/daily-regulatory.lock.yml +++ b/.github/workflows/daily-regulatory.lock.yml @@ -1166,8 +1166,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-rendering-scripts-verifier.lock.yml b/.github/workflows/daily-rendering-scripts-verifier.lock.yml index d3039d3418..a7639f4eab 100644 --- a/.github/workflows/daily-rendering-scripts-verifier.lock.yml +++ b/.github/workflows/daily-rendering-scripts-verifier.lock.yml @@ -749,8 +749,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml index 4b6b811fe4..8267a732ab 100644 --- a/.github/workflows/daily-repo-chronicle.lock.yml +++ b/.github/workflows/daily-repo-chronicle.lock.yml @@ -706,8 +706,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-safe-output-optimizer.lock.yml b/.github/workflows/daily-safe-output-optimizer.lock.yml index ae23fe5086..1b8f1ff8a1 100644 --- a/.github/workflows/daily-safe-output-optimizer.lock.yml +++ b/.github/workflows/daily-safe-output-optimizer.lock.yml @@ -757,8 +757,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-safe-outputs-conformance.lock.yml b/.github/workflows/daily-safe-outputs-conformance.lock.yml index 84e51e68f1..c62df92f56 100644 --- a/.github/workflows/daily-safe-outputs-conformance.lock.yml +++ b/.github/workflows/daily-safe-outputs-conformance.lock.yml @@ -657,8 +657,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-secrets-analysis.lock.yml b/.github/workflows/daily-secrets-analysis.lock.yml index 06cf1d9f8a..4d413d1f9b 100644 --- a/.github/workflows/daily-secrets-analysis.lock.yml +++ b/.github/workflows/daily-secrets-analysis.lock.yml @@ -690,8 +690,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-security-red-team.lock.yml b/.github/workflows/daily-security-red-team.lock.yml index 21bd8828a3..75f05fb3b5 100644 --- a/.github/workflows/daily-security-red-team.lock.yml +++ b/.github/workflows/daily-security-red-team.lock.yml @@ -661,8 +661,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-semgrep-scan.lock.yml b/.github/workflows/daily-semgrep-scan.lock.yml index 3dc37bcfd6..e2dc7e6f1e 100644 --- a/.github/workflows/daily-semgrep-scan.lock.yml +++ b/.github/workflows/daily-semgrep-scan.lock.yml @@ -686,8 +686,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-syntax-error-quality.lock.yml b/.github/workflows/daily-syntax-error-quality.lock.yml index 684f2e920c..9bb881d558 100644 --- a/.github/workflows/daily-syntax-error-quality.lock.yml +++ b/.github/workflows/daily-syntax-error-quality.lock.yml @@ -651,8 +651,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-team-evolution-insights.lock.yml b/.github/workflows/daily-team-evolution-insights.lock.yml index c75ccf901d..2e54ffb02a 100644 --- a/.github/workflows/daily-team-evolution-insights.lock.yml +++ b/.github/workflows/daily-team-evolution-insights.lock.yml @@ -638,8 +638,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-team-status.lock.yml b/.github/workflows/daily-team-status.lock.yml index 26e069ceea..d4fa964d64 100644 --- a/.github/workflows/daily-team-status.lock.yml +++ b/.github/workflows/daily-team-status.lock.yml @@ -664,8 +664,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-testify-uber-super-expert.lock.yml b/.github/workflows/daily-testify-uber-super-expert.lock.yml index a2b6140719..724c124835 100644 --- a/.github/workflows/daily-testify-uber-super-expert.lock.yml +++ b/.github/workflows/daily-testify-uber-super-expert.lock.yml @@ -692,8 +692,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/daily-workflow-updater.lock.yml b/.github/workflows/daily-workflow-updater.lock.yml index 5f4f074b99..4f93f67183 100644 --- a/.github/workflows/daily-workflow-updater.lock.yml +++ b/.github/workflows/daily-workflow-updater.lock.yml @@ -643,8 +643,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml index a14e296dcb..d7353c99c0 100644 --- a/.github/workflows/deep-report.lock.yml +++ b/.github/workflows/deep-report.lock.yml @@ -892,8 +892,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/delight.lock.yml b/.github/workflows/delight.lock.yml index ce8003b13c..5238a1390f 100644 --- a/.github/workflows/delight.lock.yml +++ b/.github/workflows/delight.lock.yml @@ -731,8 +731,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/delight.md b/.github/workflows/delight.md index 7bc4dda978..11d972f622 100644 --- a/.github/workflows/delight.md +++ b/.github/workflows/delight.md @@ -258,12 +258,14 @@ For each selected item: ### Step 4: Create Improvement Report +**Report Formatting**: Use h3 (###) or lower for all headers in the report to maintain proper document hierarchy. Wrap long sections in `
Section Name` tags to improve readability. + Create a focused analysis report: ```markdown -# User Experience Analysis Report - [DATE] +### User Experience Analysis Report - [DATE] -## Executive Summary +### Executive Summary Today's analysis focused on: - [N] documentation file(s) @@ -275,18 +277,18 @@ Today's analysis focused on: **Key Finding**: [One-sentence summary of most impactful improvement opportunity] -## Quality Highlights ✅ +### Quality Highlights ✅ [1-2 examples of aspects that demonstrate good user experience] -### Example 1: [Title] +#### Example 1: [Title] - **File**: `[path/to/file.ext]` - **What works well**: [Specific quality factors] - **Quote/Reference**: "[Actual example text or reference]" -## Improvement Opportunities 💡 +### Improvement Opportunities 💡 -### High Priority +#### High Priority #### Opportunity 1: [Title] - Single File Improvement - **File**: `[path/to/specific/file.ext]` @@ -296,25 +298,25 @@ Today's analysis focused on: - **Suggested Change**: [Concrete, single-file improvement] - **Design Principle**: [Which principle applies] -### Medium Priority +#### Medium Priority [Repeat structure for additional opportunities if identified] -## Files Reviewed +### Files Reviewed -### Documentation +#### Documentation - `[file path]` - Rating: [✅/⚠️/❌] -### CLI Commands +#### CLI Commands - `gh aw [command]` - Rating: [✅/⚠️/❌] -### Workflow Messages +#### Workflow Messages - `[workflow-name]` - Rating: [✅/⚠️/❌] -### Validation Code +#### Validation Code - `[file path]` - Rating: [✅/⚠️/❌] -## Metrics +### Metrics - **Files Analyzed**: [N] - **Quality Distribution**: @@ -334,11 +336,11 @@ For the **top 1-2 highest-impact improvement opportunities**, create actionable Add an "Actionable Tasks" section to the discussion report with this format: ```markdown -## 🎯 Actionable Tasks +### 🎯 Actionable Tasks Here are 1-2 targeted improvement tasks, each affecting a single file: -### Task 1: [Title] - Improve [Specific File] +#### Task 1: [Title] - Improve [Specific File] **File to Modify**: `[exact/path/to/single/file.ext]` @@ -383,7 +385,7 @@ Here are 1-2 targeted improvement tasks, each affecting a single file: --- -### Task 2: [Title] - Improve [Different Specific File] +#### Task 2: [Title] - Improve [Different Specific File] **File to Modify**: `[exact/path/to/different/file.ext]` diff --git a/.github/workflows/dependabot-burner.lock.yml b/.github/workflows/dependabot-burner.lock.yml index 4abcc0f902..6cb5fa0d8b 100644 --- a/.github/workflows/dependabot-burner.lock.yml +++ b/.github/workflows/dependabot-burner.lock.yml @@ -649,8 +649,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/dependabot-go-checker.lock.yml b/.github/workflows/dependabot-go-checker.lock.yml index c1fe52eb85..abaeed9f1c 100644 --- a/.github/workflows/dependabot-go-checker.lock.yml +++ b/.github/workflows/dependabot-go-checker.lock.yml @@ -690,8 +690,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml index 07e84605b3..eeea92a43c 100644 --- a/.github/workflows/dev-hawk.lock.yml +++ b/.github/workflows/dev-hawk.lock.yml @@ -710,8 +710,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml index f27282bbb4..af5649029e 100644 --- a/.github/workflows/dev.lock.yml +++ b/.github/workflows/dev.lock.yml @@ -640,8 +640,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/developer-docs-consolidator.lock.yml b/.github/workflows/developer-docs-consolidator.lock.yml index 272a7c9997..58cd5b05e3 100644 --- a/.github/workflows/developer-docs-consolidator.lock.yml +++ b/.github/workflows/developer-docs-consolidator.lock.yml @@ -750,8 +750,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/dictation-prompt.lock.yml b/.github/workflows/dictation-prompt.lock.yml index 637cec40ec..4c901f9c72 100644 --- a/.github/workflows/dictation-prompt.lock.yml +++ b/.github/workflows/dictation-prompt.lock.yml @@ -649,8 +649,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/discussion-task-miner.lock.yml b/.github/workflows/discussion-task-miner.lock.yml index 690d382522..9fbfa1fac1 100644 --- a/.github/workflows/discussion-task-miner.lock.yml +++ b/.github/workflows/discussion-task-miner.lock.yml @@ -716,8 +716,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml index 45a604941c..65666298d6 100644 --- a/.github/workflows/docs-noob-tester.lock.yml +++ b/.github/workflows/docs-noob-tester.lock.yml @@ -667,8 +667,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/draft-pr-cleanup.lock.yml b/.github/workflows/draft-pr-cleanup.lock.yml index d11481d09f..3c144bdb22 100644 --- a/.github/workflows/draft-pr-cleanup.lock.yml +++ b/.github/workflows/draft-pr-cleanup.lock.yml @@ -671,8 +671,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/duplicate-code-detector.lock.yml b/.github/workflows/duplicate-code-detector.lock.yml index e1abd5713c..b9da5c28f7 100644 --- a/.github/workflows/duplicate-code-detector.lock.yml +++ b/.github/workflows/duplicate-code-detector.lock.yml @@ -721,8 +721,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/example-custom-error-patterns.lock.yml b/.github/workflows/example-custom-error-patterns.lock.yml index 409753768d..3f6f64e0a4 100644 --- a/.github/workflows/example-custom-error-patterns.lock.yml +++ b/.github/workflows/example-custom-error-patterns.lock.yml @@ -376,8 +376,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/example-permissions-warning.lock.yml b/.github/workflows/example-permissions-warning.lock.yml index 366dd8aa44..b164447860 100644 --- a/.github/workflows/example-permissions-warning.lock.yml +++ b/.github/workflows/example-permissions-warning.lock.yml @@ -358,8 +358,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/example-workflow-analyzer.lock.yml b/.github/workflows/example-workflow-analyzer.lock.yml index 8d49c7225b..fcfe63f223 100644 --- a/.github/workflows/example-workflow-analyzer.lock.yml +++ b/.github/workflows/example-workflow-analyzer.lock.yml @@ -701,8 +701,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml index 3ed3c61553..17955a0149 100644 --- a/.github/workflows/firewall-escape.lock.yml +++ b/.github/workflows/firewall-escape.lock.yml @@ -682,8 +682,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/firewall.lock.yml b/.github/workflows/firewall.lock.yml index 18d0a5eb7c..eb2b883868 100644 --- a/.github/workflows/firewall.lock.yml +++ b/.github/workflows/firewall.lock.yml @@ -361,8 +361,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/functional-pragmatist.lock.yml b/.github/workflows/functional-pragmatist.lock.yml index 0bca5c39dd..0094990d19 100644 --- a/.github/workflows/functional-pragmatist.lock.yml +++ b/.github/workflows/functional-pragmatist.lock.yml @@ -656,8 +656,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml index f339bb53ed..03ee33608f 100644 --- a/.github/workflows/github-mcp-structural-analysis.lock.yml +++ b/.github/workflows/github-mcp-structural-analysis.lock.yml @@ -713,8 +713,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/github-mcp-tools-report.lock.yml b/.github/workflows/github-mcp-tools-report.lock.yml index db5169f77e..9c04bedb3f 100644 --- a/.github/workflows/github-mcp-tools-report.lock.yml +++ b/.github/workflows/github-mcp-tools-report.lock.yml @@ -732,8 +732,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/github-remote-mcp-auth-test.lock.yml b/.github/workflows/github-remote-mcp-auth-test.lock.yml index ed04fd2294..93d53ec98b 100644 --- a/.github/workflows/github-remote-mcp-auth-test.lock.yml +++ b/.github/workflows/github-remote-mcp-auth-test.lock.yml @@ -639,8 +639,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/glossary-maintainer.lock.yml b/.github/workflows/glossary-maintainer.lock.yml index 9f3331ebb2..54835fe758 100644 --- a/.github/workflows/glossary-maintainer.lock.yml +++ b/.github/workflows/glossary-maintainer.lock.yml @@ -700,8 +700,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/go-fan.lock.yml b/.github/workflows/go-fan.lock.yml index bede913819..40880935ae 100644 --- a/.github/workflows/go-fan.lock.yml +++ b/.github/workflows/go-fan.lock.yml @@ -674,8 +674,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/go-logger.lock.yml b/.github/workflows/go-logger.lock.yml index 59becb5d34..f75ae1e33f 100644 --- a/.github/workflows/go-logger.lock.yml +++ b/.github/workflows/go-logger.lock.yml @@ -825,8 +825,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml index bab1013725..e744767a7b 100644 --- a/.github/workflows/go-pattern-detector.lock.yml +++ b/.github/workflows/go-pattern-detector.lock.yml @@ -668,8 +668,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/gpclean.lock.yml b/.github/workflows/gpclean.lock.yml index 210ff687be..ac90b93795 100644 --- a/.github/workflows/gpclean.lock.yml +++ b/.github/workflows/gpclean.lock.yml @@ -670,8 +670,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/grumpy-reviewer.lock.yml b/.github/workflows/grumpy-reviewer.lock.yml index 735e62d46e..2d3e7a10fb 100644 --- a/.github/workflows/grumpy-reviewer.lock.yml +++ b/.github/workflows/grumpy-reviewer.lock.yml @@ -747,8 +747,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/hourly-ci-cleaner.lock.yml b/.github/workflows/hourly-ci-cleaner.lock.yml index 383aa7aabc..ebf10b9f04 100644 --- a/.github/workflows/hourly-ci-cleaner.lock.yml +++ b/.github/workflows/hourly-ci-cleaner.lock.yml @@ -706,8 +706,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/instructions-janitor.lock.yml b/.github/workflows/instructions-janitor.lock.yml index 12bd43cb47..8ac90921d5 100644 --- a/.github/workflows/instructions-janitor.lock.yml +++ b/.github/workflows/instructions-janitor.lock.yml @@ -668,8 +668,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/issue-arborist.lock.yml b/.github/workflows/issue-arborist.lock.yml index 3af88247e6..9a6d955bde 100644 --- a/.github/workflows/issue-arborist.lock.yml +++ b/.github/workflows/issue-arborist.lock.yml @@ -788,8 +788,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml index 4a0c6dfa7d..ed927e6c19 100644 --- a/.github/workflows/issue-monster.lock.yml +++ b/.github/workflows/issue-monster.lock.yml @@ -693,8 +693,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/issue-triage-agent.lock.yml b/.github/workflows/issue-triage-agent.lock.yml index 92cff153eb..cfb848e41e 100644 --- a/.github/workflows/issue-triage-agent.lock.yml +++ b/.github/workflows/issue-triage-agent.lock.yml @@ -641,8 +641,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/jsweep.lock.yml b/.github/workflows/jsweep.lock.yml index 178fc48474..b546b67174 100644 --- a/.github/workflows/jsweep.lock.yml +++ b/.github/workflows/jsweep.lock.yml @@ -685,8 +685,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/layout-spec-maintainer.lock.yml b/.github/workflows/layout-spec-maintainer.lock.yml index 6f2d4b9d14..3ba2c34d44 100644 --- a/.github/workflows/layout-spec-maintainer.lock.yml +++ b/.github/workflows/layout-spec-maintainer.lock.yml @@ -656,8 +656,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/lockfile-stats.lock.yml b/.github/workflows/lockfile-stats.lock.yml index 051abc5352..d5a167c2ff 100644 --- a/.github/workflows/lockfile-stats.lock.yml +++ b/.github/workflows/lockfile-stats.lock.yml @@ -652,8 +652,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml index bf76db17ac..89bfc4d58d 100644 --- a/.github/workflows/mcp-inspector.lock.yml +++ b/.github/workflows/mcp-inspector.lock.yml @@ -1020,8 +1020,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/mergefest.lock.yml b/.github/workflows/mergefest.lock.yml index 961d740462..a08dc355bc 100644 --- a/.github/workflows/mergefest.lock.yml +++ b/.github/workflows/mergefest.lock.yml @@ -657,8 +657,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/metrics-collector.lock.yml b/.github/workflows/metrics-collector.lock.yml index 0b9fcbae34..94220fb131 100644 --- a/.github/workflows/metrics-collector.lock.yml +++ b/.github/workflows/metrics-collector.lock.yml @@ -459,8 +459,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml index b89e2825d4..f0c87bc0d1 100644 --- a/.github/workflows/notion-issue-summary.lock.yml +++ b/.github/workflows/notion-issue-summary.lock.yml @@ -612,8 +612,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml index 99b23aa2b8..1d9a97dde8 100644 --- a/.github/workflows/org-health-report.lock.yml +++ b/.github/workflows/org-health-report.lock.yml @@ -714,8 +714,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/pdf-summary.lock.yml b/.github/workflows/pdf-summary.lock.yml index 3b83af20b7..63e40f302c 100644 --- a/.github/workflows/pdf-summary.lock.yml +++ b/.github/workflows/pdf-summary.lock.yml @@ -760,8 +760,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/plan.lock.yml b/.github/workflows/plan.lock.yml index 339c06f377..5c2ae877b1 100644 --- a/.github/workflows/plan.lock.yml +++ b/.github/workflows/plan.lock.yml @@ -736,8 +736,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml index dc51b0c8ef..555819efa2 100644 --- a/.github/workflows/poem-bot.lock.yml +++ b/.github/workflows/poem-bot.lock.yml @@ -1305,8 +1305,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml index 7e5e58227c..1ef92ae22c 100644 --- a/.github/workflows/portfolio-analyst.lock.yml +++ b/.github/workflows/portfolio-analyst.lock.yml @@ -787,8 +787,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/pr-nitpick-reviewer.lock.yml b/.github/workflows/pr-nitpick-reviewer.lock.yml index 595d052251..6b7f193dba 100644 --- a/.github/workflows/pr-nitpick-reviewer.lock.yml +++ b/.github/workflows/pr-nitpick-reviewer.lock.yml @@ -832,8 +832,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/pr-triage-agent.lock.yml b/.github/workflows/pr-triage-agent.lock.yml index 24ee348bf3..73b01b7bb4 100644 --- a/.github/workflows/pr-triage-agent.lock.yml +++ b/.github/workflows/pr-triage-agent.lock.yml @@ -747,8 +747,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml index 4ace4b96d9..205f97351e 100644 --- a/.github/workflows/prompt-clustering-analysis.lock.yml +++ b/.github/workflows/prompt-clustering-analysis.lock.yml @@ -783,8 +783,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml index 079fbbf8da..48efb04dcb 100644 --- a/.github/workflows/python-data-charts.lock.yml +++ b/.github/workflows/python-data-charts.lock.yml @@ -775,8 +775,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/q.lock.yml b/.github/workflows/q.lock.yml index ea9c148b7d..6eb07dfd99 100644 --- a/.github/workflows/q.lock.yml +++ b/.github/workflows/q.lock.yml @@ -866,8 +866,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/refiner.lock.yml b/.github/workflows/refiner.lock.yml index e493a99a03..e73c570f0d 100644 --- a/.github/workflows/refiner.lock.yml +++ b/.github/workflows/refiner.lock.yml @@ -718,8 +718,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml index b994626962..c113f57642 100644 --- a/.github/workflows/release.lock.yml +++ b/.github/workflows/release.lock.yml @@ -652,8 +652,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/repo-audit-analyzer.lock.yml b/.github/workflows/repo-audit-analyzer.lock.yml index f55a9793b6..4619b08343 100644 --- a/.github/workflows/repo-audit-analyzer.lock.yml +++ b/.github/workflows/repo-audit-analyzer.lock.yml @@ -655,8 +655,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/repo-tree-map.lock.yml b/.github/workflows/repo-tree-map.lock.yml index 676e003842..1e34ead371 100644 --- a/.github/workflows/repo-tree-map.lock.yml +++ b/.github/workflows/repo-tree-map.lock.yml @@ -631,8 +631,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/repository-quality-improver.lock.yml b/.github/workflows/repository-quality-improver.lock.yml index 4c8446c145..a41cc70d9d 100644 --- a/.github/workflows/repository-quality-improver.lock.yml +++ b/.github/workflows/repository-quality-improver.lock.yml @@ -662,8 +662,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/research.lock.yml b/.github/workflows/research.lock.yml index 85bddc9568..655bb2b11d 100644 --- a/.github/workflows/research.lock.yml +++ b/.github/workflows/research.lock.yml @@ -655,8 +655,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml index c4def1f563..aa49e39639 100644 --- a/.github/workflows/safe-output-health.lock.yml +++ b/.github/workflows/safe-output-health.lock.yml @@ -730,8 +730,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/schema-consistency-checker.lock.yml b/.github/workflows/schema-consistency-checker.lock.yml index 0da8826067..8766011c30 100644 --- a/.github/workflows/schema-consistency-checker.lock.yml +++ b/.github/workflows/schema-consistency-checker.lock.yml @@ -653,8 +653,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/scout.lock.yml b/.github/workflows/scout.lock.yml index 2081f3b7ca..0136ee184d 100644 --- a/.github/workflows/scout.lock.yml +++ b/.github/workflows/scout.lock.yml @@ -813,8 +813,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/security-alert-burndown.campaign.g.lock.yml b/.github/workflows/security-alert-burndown.campaign.g.lock.yml index aa2c5a4063..2c6140aec5 100644 --- a/.github/workflows/security-alert-burndown.campaign.g.lock.yml +++ b/.github/workflows/security-alert-burndown.campaign.g.lock.yml @@ -990,8 +990,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/security-compliance.lock.yml b/.github/workflows/security-compliance.lock.yml index 21168b629b..76051a0530 100644 --- a/.github/workflows/security-compliance.lock.yml +++ b/.github/workflows/security-compliance.lock.yml @@ -698,8 +698,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index 4e8adcd533..ccf9d3571d 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -818,8 +818,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/semantic-function-refactor.lock.yml b/.github/workflows/semantic-function-refactor.lock.yml index cb45fefce5..7fbcff424b 100644 --- a/.github/workflows/semantic-function-refactor.lock.yml +++ b/.github/workflows/semantic-function-refactor.lock.yml @@ -720,8 +720,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/sergo.lock.yml b/.github/workflows/sergo.lock.yml index e0222befaa..0f30885cd6 100644 --- a/.github/workflows/sergo.lock.yml +++ b/.github/workflows/sergo.lock.yml @@ -674,8 +674,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml index c9dcda2c1e..6f8f329423 100644 --- a/.github/workflows/slide-deck-maintainer.lock.yml +++ b/.github/workflows/slide-deck-maintainer.lock.yml @@ -704,8 +704,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-agent.lock.yml b/.github/workflows/smoke-agent.lock.yml index a88f045158..596fe374ed 100644 --- a/.github/workflows/smoke-agent.lock.yml +++ b/.github/workflows/smoke-agent.lock.yml @@ -731,8 +731,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml index eadc339f79..57a4b7b216 100644 --- a/.github/workflows/smoke-claude.lock.yml +++ b/.github/workflows/smoke-claude.lock.yml @@ -2156,8 +2156,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index d70fe5c252..5480128c2a 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -1171,8 +1171,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-copilot-arm.lock.yml b/.github/workflows/smoke-copilot-arm.lock.yml index c67c019e01..96b75fbe57 100644 --- a/.github/workflows/smoke-copilot-arm.lock.yml +++ b/.github/workflows/smoke-copilot-arm.lock.yml @@ -1654,8 +1654,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml index f657357409..24d7bae610 100644 --- a/.github/workflows/smoke-copilot.lock.yml +++ b/.github/workflows/smoke-copilot.lock.yml @@ -1651,8 +1651,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-gemini.lock.yml b/.github/workflows/smoke-gemini.lock.yml index a017cdc86d..71ea174133 100644 --- a/.github/workflows/smoke-gemini.lock.yml +++ b/.github/workflows/smoke-gemini.lock.yml @@ -911,8 +911,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-multi-pr.lock.yml b/.github/workflows/smoke-multi-pr.lock.yml index 7602ee9c86..78385d4881 100644 --- a/.github/workflows/smoke-multi-pr.lock.yml +++ b/.github/workflows/smoke-multi-pr.lock.yml @@ -725,8 +725,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-project.lock.yml b/.github/workflows/smoke-project.lock.yml index ea6f8ba663..e55e7cf5d8 100644 --- a/.github/workflows/smoke-project.lock.yml +++ b/.github/workflows/smoke-project.lock.yml @@ -1143,8 +1143,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-temporary-id.lock.yml b/.github/workflows/smoke-temporary-id.lock.yml index 18132b6bdd..6840186d6e 100644 --- a/.github/workflows/smoke-temporary-id.lock.yml +++ b/.github/workflows/smoke-temporary-id.lock.yml @@ -767,8 +767,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-test-tools.lock.yml b/.github/workflows/smoke-test-tools.lock.yml index 4209ba2ddb..0668a55e87 100644 --- a/.github/workflows/smoke-test-tools.lock.yml +++ b/.github/workflows/smoke-test-tools.lock.yml @@ -670,8 +670,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/smoke-workflow-call.lock.yml b/.github/workflows/smoke-workflow-call.lock.yml index f461f4f4e1..8fa6715892 100644 --- a/.github/workflows/smoke-workflow-call.lock.yml +++ b/.github/workflows/smoke-workflow-call.lock.yml @@ -610,8 +610,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml index b593e71fc2..8067755097 100644 --- a/.github/workflows/stale-repo-identifier.lock.yml +++ b/.github/workflows/stale-repo-identifier.lock.yml @@ -783,8 +783,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml index 8b09878e63..1254ecbd6d 100644 --- a/.github/workflows/static-analysis-report.lock.yml +++ b/.github/workflows/static-analysis-report.lock.yml @@ -726,8 +726,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/step-name-alignment.lock.yml b/.github/workflows/step-name-alignment.lock.yml index 3196c6c40e..ac56ef2833 100644 --- a/.github/workflows/step-name-alignment.lock.yml +++ b/.github/workflows/step-name-alignment.lock.yml @@ -666,8 +666,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/sub-issue-closer.lock.yml b/.github/workflows/sub-issue-closer.lock.yml index 3e8615626d..56400c0db4 100644 --- a/.github/workflows/sub-issue-closer.lock.yml +++ b/.github/workflows/sub-issue-closer.lock.yml @@ -729,8 +729,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml index 1026ce4d6f..f3efd0b010 100644 --- a/.github/workflows/super-linter.lock.yml +++ b/.github/workflows/super-linter.lock.yml @@ -685,8 +685,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml index 1bb6865a3a..95a8ac71e6 100644 --- a/.github/workflows/technical-doc-writer.lock.yml +++ b/.github/workflows/technical-doc-writer.lock.yml @@ -778,8 +778,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/terminal-stylist.lock.yml b/.github/workflows/terminal-stylist.lock.yml index 37f9ee188d..01f784511f 100644 --- a/.github/workflows/terminal-stylist.lock.yml +++ b/.github/workflows/terminal-stylist.lock.yml @@ -639,8 +639,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/test-create-pr-error-handling.lock.yml b/.github/workflows/test-create-pr-error-handling.lock.yml index 09d6c4366e..5560916b9f 100644 --- a/.github/workflows/test-create-pr-error-handling.lock.yml +++ b/.github/workflows/test-create-pr-error-handling.lock.yml @@ -663,8 +663,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/test-dispatcher.lock.yml b/.github/workflows/test-dispatcher.lock.yml index 51d3fac2c2..e8151cba31 100644 --- a/.github/workflows/test-dispatcher.lock.yml +++ b/.github/workflows/test-dispatcher.lock.yml @@ -583,8 +583,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/test-project-url-default.lock.yml b/.github/workflows/test-project-url-default.lock.yml index 6c3994d96f..7f31f9a356 100644 --- a/.github/workflows/test-project-url-default.lock.yml +++ b/.github/workflows/test-project-url-default.lock.yml @@ -826,8 +826,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/test-workflow.lock.yml b/.github/workflows/test-workflow.lock.yml index b51c60fd43..b6e2e495c5 100644 --- a/.github/workflows/test-workflow.lock.yml +++ b/.github/workflows/test-workflow.lock.yml @@ -361,8 +361,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml index 27f9d2ecdc..893430652d 100644 --- a/.github/workflows/tidy.lock.yml +++ b/.github/workflows/tidy.lock.yml @@ -752,8 +752,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/typist.lock.yml b/.github/workflows/typist.lock.yml index b677005fb2..a850689494 100644 --- a/.github/workflows/typist.lock.yml +++ b/.github/workflows/typist.lock.yml @@ -655,8 +655,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/ubuntu-image-analyzer.lock.yml b/.github/workflows/ubuntu-image-analyzer.lock.yml index a14a5d2e32..b4125cf1e8 100644 --- a/.github/workflows/ubuntu-image-analyzer.lock.yml +++ b/.github/workflows/ubuntu-image-analyzer.lock.yml @@ -655,8 +655,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml index e6a08aaa82..c3245f5f97 100644 --- a/.github/workflows/unbloat-docs.lock.yml +++ b/.github/workflows/unbloat-docs.lock.yml @@ -814,8 +814,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/video-analyzer.lock.yml b/.github/workflows/video-analyzer.lock.yml index 660adf566c..f8f60fa796 100644 --- a/.github/workflows/video-analyzer.lock.yml +++ b/.github/workflows/video-analyzer.lock.yml @@ -667,8 +667,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/weekly-editors-health-check.lock.yml b/.github/workflows/weekly-editors-health-check.lock.yml index f2f8a5d1e5..0492af7262 100644 --- a/.github/workflows/weekly-editors-health-check.lock.yml +++ b/.github/workflows/weekly-editors-health-check.lock.yml @@ -688,8 +688,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml index 693b45b5dd..080d3c7556 100644 --- a/.github/workflows/weekly-issue-summary.lock.yml +++ b/.github/workflows/weekly-issue-summary.lock.yml @@ -694,8 +694,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/weekly-safe-outputs-spec-review.lock.yml b/.github/workflows/weekly-safe-outputs-spec-review.lock.yml index 51cda2830d..c549df97ec 100644 --- a/.github/workflows/weekly-safe-outputs-spec-review.lock.yml +++ b/.github/workflows/weekly-safe-outputs-spec-review.lock.yml @@ -647,8 +647,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/workflow-generator.lock.yml b/.github/workflows/workflow-generator.lock.yml index cb4d24d848..4934665b5e 100644 --- a/.github/workflows/workflow-generator.lock.yml +++ b/.github/workflows/workflow-generator.lock.yml @@ -775,8 +775,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/workflow-health-manager.lock.yml b/.github/workflows/workflow-health-manager.lock.yml index 31deed04ed..12a22489b2 100644 --- a/.github/workflows/workflow-health-manager.lock.yml +++ b/.github/workflows/workflow-health-manager.lock.yml @@ -836,8 +836,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/workflow-normalizer.lock.yml b/.github/workflows/workflow-normalizer.lock.yml index 9147009ba9..f603a516bc 100644 --- a/.github/workflows/workflow-normalizer.lock.yml +++ b/.github/workflows/workflow-normalizer.lock.yml @@ -722,8 +722,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/.github/workflows/workflow-skill-extractor.lock.yml b/.github/workflows/workflow-skill-extractor.lock.yml index e6ac91f47a..16e8dbda77 100644 --- a/.github/workflows/workflow-skill-extractor.lock.yml +++ b/.github/workflows/workflow-skill-extractor.lock.yml @@ -706,8 +706,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/DEADCODE.md b/DEADCODE.md index c7280e9726..07bd662d83 100644 --- a/DEADCODE.md +++ b/DEADCODE.md @@ -8,12 +8,25 @@ deadcode ./cmd/... ./internal/tools/... 2>/dev/null **Critical:** Always include `./internal/tools/...` — it covers separate binaries called by the Makefile (e.g. `make actions-build`). Running `./cmd/...` alone gives false positives. +## Correct methodology + +`deadcode` analyses the production binary entry points only. **Test files compile into a separate test binary** and do not keep production code alive. A function flagged by `deadcode` is dead regardless of whether test files call it. + +**Correct approach:** +1. `deadcode` flags `Foo` as unreachable +2. `grep -rn "Foo" --include="*.go"` shows callers only in `*_test.go` files +3. **Delete `Foo` AND any test functions that exclusively test `Foo`** + +**Wrong approach (batch 4 mistake):** treating test-only callers as evidence the function is "live" and skipping it. + +**Exception — `compiler_test_helpers.go`:** the 3 functions there (`containsInNonCommentLines`, `indexInNonCommentLines`, `extractJobSection`) are production-file helpers used by ≥15 test files as shared test infrastructure. They're dead in the production binary but valuable as test utilities. Leave them. + ## Verification after every batch ```bash go build ./... go vet ./... -go vet -tags=integration ./... # catches integration test files invisible without the tag +go vet -tags=integration ./... # catches integration test files invisible without this tag make fmt ``` @@ -23,39 +36,141 @@ make fmt - `compiler.ParseWorkflowString` - `compiler.CompileToYAML` -**Test helpers** — `pkg/workflow/compiler_test_helpers.go` shows 3 dead functions but is used by 15 test files. Don't delete it. +**`pkg/console/console_wasm.go`** — this file provides WASM-specific stub implementations of many `pkg/console` functions (gated with `//go:build js || wasm`). Before deleting any function from `pkg/console/`, `grep` for it in `console_wasm.go`. If the function is called there, either inline the logic in `console_wasm.go` or delete the call. Batch 10 mistake: deleted `renderTreeSimple` from `render.go` but `console_wasm.go`'s `RenderTree` still called it, breaking the WASM build. Fix: replaced the `RenderTree` body in `console_wasm.go` with an inlined closure that no longer calls the deleted helper. + +**`compiler_test_helpers.go`** — shows 3 dead functions but serves as shared test infrastructure for ≥15 test files. Do not delete. **Constant/embed rescue** — Some otherwise-dead files contain live constants or `//go:embed` directives. Extract them before deleting the file. --- -## Current dead code (276 functions, as of 2026-02-28) - -Run the command above to regenerate. Top files by dead function count: - -| File | Dead | Notes | -|------|------|-------| -| `pkg/workflow/js.go` | 17 | Get*/bundle stubs; many have no callers anywhere | -| `pkg/workflow/compiler_types.go` | 17 | `With*` option funcs + getters; check WASM first | -| `pkg/workflow/artifact_manager.go` | 14 | Many test callers; do last | -| `pkg/constants/constants.go` | 13 | All `String()`/`IsValid()` on semantic type aliases | -| `pkg/workflow/domains.go` | 10 | Check callers | -| `pkg/workflow/expression_builder.go` | 9 | Check callers | -| `pkg/workflow/validation_helpers.go` | 6 | Check callers | -| `pkg/cli/docker_images.go` | 6 | Check callers | -| `pkg/workflow/permissions_factory.go` | 5 | Check callers | -| `pkg/workflow/map_helpers.go` | 5 | Check callers | -| `pkg/workflow/engine_helpers.go` | 5 | Check callers | -| `pkg/console/console.go` | 5 | Check callers | -| `pkg/workflow/safe_outputs_env.go` | 4 | Check callers | -| `pkg/workflow/expression_nodes.go` | 4 | Check callers | - -~80 additional files have 1–3 dead functions each. - -## Suggested approach - -1. Pick a file with 5+ dead functions. -2. For each dead function, check callers: `grep -rn "FuncName" --include="*.go"`. If only test callers, also remove the tests. -3. Remove the function and any now-unused imports. -4. Run the verification commands above. -5. Commit per logical group, keep PRs small and reviewable. +## Batch plan (248 dead functions as of 2026-02-28) + +Each batch: delete the dead functions, delete the tests that exclusively test them, run verification, commit, open PR. + +### Batch 5 — simple helpers (11 functions) +Files: `pkg/workflow/validation_helpers.go` (6), `pkg/workflow/map_helpers.go` (5) + +Dead functions: +- `ValidateRequired`, `ValidateMaxLength`, `ValidateMinLength`, `ValidateInList`, `ValidatePositiveInt`, `ValidateNonNegativeInt` +- `isEmptyOrNil`, `getMapFieldAsString`, `getMapFieldAsMap`, `getMapFieldAsBool`, `getMapFieldAsInt` + +Tests to remove from `validation_helpers_test.go`: +- `TestValidateRequired`, `TestValidateMaxLength`, `TestValidateMinLength`, `TestValidateInList`, `TestValidatePositiveInt`, `TestValidateNonNegativeInt` +- `TestIsEmptyOrNil`, `TestGetMapFieldAsString`, `TestGetMapFieldAsMap`, `TestGetMapFieldAsBool`, `TestGetMapFieldAsInt` + +### Batch 6 — engine helpers (5 functions) +File: `pkg/workflow/engine_helpers.go` (5) + +Dead functions: `ExtractAgentIdentifier`, `GetHostedToolcachePathSetup`, `GetSanitizedPATHExport`, `GetToolBinsSetup`, `GetToolBinsEnvArg` + +Tests to remove from `engine_helpers_test.go`: +- `TestExtractAgentIdentifier`, `TestGetHostedToolcachePathSetup`, `TestGetHostedToolcachePathSetup_Consistency`, `TestGetHostedToolcachePathSetup_UsesToolBins`, `TestGetToolBinsSetup`, `TestGetToolBinsEnvArg`, `TestGetSanitizedPATHExport`, `TestGetSanitizedPATHExport_ShellExecution` + +### Batch 7 — domain helpers (10 functions) +File: `pkg/workflow/domains.go` (10) + +Dead functions: `mergeDomainsWithNetwork`, `mergeDomainsWithNetworkAndTools`, `GetCopilotAllowedDomains`, `GetCopilotAllowedDomainsWithSafeInputs`, `GetCopilotAllowedDomainsWithTools`, `GetCodexAllowedDomains`, `GetCodexAllowedDomainsWithTools`, `GetClaudeAllowedDomains`, `GetClaudeAllowedDomainsWithSafeInputs`, `GetClaudeAllowedDomainsWithTools` + +Tests to remove from `domains_test.go`, `domains_protocol_test.go`, `domains_sort_test.go`, `safe_inputs_firewall_test.go`, `http_mcp_domains_test.go` — remove only the specific test functions that call these dead helpers; keep tests for live functions in those files. + +### Batch 8 — expression graph (16 functions) +Files: `pkg/workflow/expression_nodes.go` (4), `pkg/workflow/expression_builder.go` (9), `pkg/workflow/known_needs_expressions.go` (3) + +Dead functions in `expression_nodes.go`: `ParenthesesNode.Render`, `NumberLiteralNode.Render`, `TernaryNode.Render`, `ContainsNode.Render` + +Dead functions in `expression_builder.go`: `BuildNumberLiteral`, `BuildContains`, `BuildTernary`, `BuildLabelContains`, `BuildActionEquals`, `BuildRefStartsWith`, `BuildExpressionWithDescription`, `BuildPRCommentCondition`, `AddDetectionSuccessCheck` + +Dead functions in `known_needs_expressions.go`: `getSafeOutputJobNames`, `hasMultipleSafeOutputTypes`, `getCustomJobNames` + +Tests to find and remove: check `expressions_test.go`, `expression_coverage_test.go`, `known_needs_expressions_test.go`. + +### Batch 9 — constants & console (18 functions) +Files: `pkg/constants/constants.go` (13), `pkg/console/console.go` (5) + +Dead functions in `constants.go`: all `String()`/`IsValid()` methods on `LineLength`, `FeatureFlag`, `URL`, `ModelName`, `WorkflowID`, `EngineName`, plus `MCPServerID.IsValid` + +Dead functions in `console.go`: `FormatLocationMessage`, `FormatCountMessage`, `FormatListHeader`, `RenderTree`, `buildLipglossTree` + +Tests to remove: relevant subtests in `constants_test.go`; `TestFormatLocationMessage`, `TestRenderTree`, `TestRenderTreeSimple`, `TestFormatCountMessage`, `TestFormatListHeader` in `console_test.go` and related files. + +### Batch 10 — agent session builder (1 function) +File: `pkg/workflow/create_agent_session.go` + +Dead function: `Compiler.buildCreateOutputAgentSessionJob` + +Find and remove its test(s): `grep -rn "buildCreateOutputAgentSessionJob" --include="*_test.go"`. + +### Batch 11 — safe-outputs & MCP helpers (13 functions) +Files: `pkg/workflow/safe_outputs_env.go` (4), `pkg/workflow/safe_outputs_config_helpers.go` (3), `pkg/workflow/mcp_playwright_config.go` (3), `pkg/workflow/mcp_config_builtin.go` (3) + +Dead functions in `safe_outputs_env.go`: `applySafeOutputEnvToSlice`, `buildTitlePrefixEnvVar`, `buildLabelsEnvVar`, `buildCategoryEnvVar` + +Dead functions in `safe_outputs_config_helpers.go`: `getEnabledSafeOutputToolNamesReflection`, `Compiler.formatDetectionRunsOn`, `GetEnabledSafeOutputToolNames` + +Dead functions in `mcp_playwright_config.go`: `getPlaywrightDockerImageVersion`, `getPlaywrightMCPPackageVersion`, `generatePlaywrightDockerArgs` + +Dead functions in `mcp_config_builtin.go`: `renderSafeOutputsMCPConfig`, `renderSafeOutputsMCPConfigTOML`, `renderAgenticWorkflowsMCPConfigTOML` + +Tests to remove: check `safe_output_helpers_test.go`, `version_field_test.go`, `mcp_benchmark_test.go`, `mcp_config_refactor_test.go`, `mcp_config_shared_test.go`, `threat_detection_test.go`. + +### Batch 12 — small utilities (9 functions) +Files: `pkg/sliceutil/sliceutil.go` (3), `pkg/stringutil/pat_validation.go` (3), `pkg/workflow/error_aggregation.go` (3) + +Dead functions in `sliceutil.go`: `ContainsAny`, `ContainsIgnoreCase`, `FilterMap` + +Dead functions in `pat_validation.go`: `IsFineGrainedPAT`, `IsClassicPAT`, `IsOAuthToken` + +Dead functions in `error_aggregation.go`: `ErrorCollector.HasErrors`, `FormatAggregatedError`, `SplitJoinedErrors` + +### Batch 13 — parser utilities (9 functions) +Files: `pkg/parser/include_expander.go` (3), `pkg/parser/schema_validation.go` (3), `pkg/parser/yaml_error.go` (3) + +Dead functions in `include_expander.go`: `ExpandIncludes`, `ProcessIncludesForEngines`, `ProcessIncludesForSafeOutputs` + +Dead functions in `schema_validation.go`: `ValidateMainWorkflowFrontmatterWithSchema`, `ValidateIncludedFileFrontmatterWithSchema`, `ValidateMCPConfigWithSchema` + +Dead functions in `yaml_error.go`: `ExtractYAMLError`, `extractFromGoccyFormat`, `extractFromStringParsing` + +### Batch 14 — agentic engine & compiler types (16 functions) +Files: `pkg/workflow/agentic_engine.go` (3), `pkg/workflow/compiler_types.go` (10+), `pkg/cli/docker_images.go` (6) + +Dead functions in `agentic_engine.go`: `BaseEngine.convertStepToYAML`, `GenerateSecretValidationStep`, `EngineRegistry.GetAllEngines` + +Dead functions in `compiler_types.go` (check WASM binary first): `WithCustomOutput`, `WithVersion`, `WithSkipValidation`, `WithNoEmit`, `WithStrictMode`, `WithForceRefreshActionPins`, `WithWorkflowIdentifier`, `NewCompilerWithVersion`, `Compiler.GetSharedActionResolverForTest`, `Compiler.GetArtifactManager` + +Dead functions in `docker_images.go`: `isDockerAvailable`, `ResetDockerPullState`, `ValidateMCPServerDockerAvailability`, `SetDockerImageDownloading`, `SetMockImageAvailable`, `PrintDockerPullStatus` + +### Batch 15 — js.go stubs (6 functions) +File: `pkg/workflow/js.go` + +Dead functions: the remaining 6 unreachable `get*Script()` / public `Get*` stubs reported by deadcode. + +### Batch 16 — artifact manager (14 functions) +File: `pkg/workflow/artifact_manager.go` + +Save for last — most complex, with deep coupling to `artifact_manager_integration_test.go`. + +### Remaining (~120 functions) +~80+ files each with 1–3 dead functions. Tackle after the above batches clear the larger clusters. + +--- + +## Per-batch checklist + +For each batch: + +- [ ] Run `deadcode ./cmd/... ./internal/tools/... 2>/dev/null` to confirm current dead list +- [ ] For each dead function, `grep -rn "FuncName" --include="*.go"` to find all callers +- [ ] Delete the function +- [ ] Delete test functions that exclusively call the deleted function (not shared helpers) +- [ ] Check for now-unused imports in edited files +- [ ] If editing `pkg/console/`, check `pkg/console/console_wasm.go` for calls to the deleted functions +- [ ] `go build ./...` +- [ ] `GOARCH=wasm GOOS=js go build ./pkg/console/...` (if `pkg/console/` was touched) +- [ ] `go vet ./...` +- [ ] `go vet -tags=integration ./...` +- [ ] `make fmt` +- [ ] Run selective tests for touched packages: `go test -v -run "TestAffected" ./pkg/...` +- [ ] Commit with message: `chore: remove dead functions (batch N) — X -> Y dead` +- [ ] Open PR, confirm CI passes before merging diff --git a/actions/setup/js/create_missing_data_issue.cjs b/actions/setup/js/create_missing_data_issue.cjs index b04fa7bad6..1276178e31 100644 --- a/actions/setup/js/create_missing_data_issue.cjs +++ b/actions/setup/js/create_missing_data_issue.cjs @@ -18,6 +18,7 @@ const HANDLER_TYPE = "create_missing_data_issue"; const main = buildMissingIssueHandler({ handlerType: HANDLER_TYPE, defaultTitlePrefix: "[missing data]", + defaultLabels: ["agentic-workflows"], itemsField: "missing_data", templatePath: "/opt/gh-aw/prompts/missing_data_issue.md", templateListKey: "missing_data_list", diff --git a/actions/setup/js/create_missing_tool_issue.cjs b/actions/setup/js/create_missing_tool_issue.cjs index 462dab32e0..2ca315be88 100644 --- a/actions/setup/js/create_missing_tool_issue.cjs +++ b/actions/setup/js/create_missing_tool_issue.cjs @@ -18,6 +18,7 @@ const HANDLER_TYPE = "create_missing_tool_issue"; const main = buildMissingIssueHandler({ handlerType: HANDLER_TYPE, defaultTitlePrefix: "[missing tool]", + defaultLabels: ["agentic-workflows"], itemsField: "missing_tools", templatePath: "/opt/gh-aw/prompts/missing_tool_issue.md", templateListKey: "missing_tools_list", diff --git a/actions/setup/js/missing_issue_helpers.cjs b/actions/setup/js/missing_issue_helpers.cjs index 81c548cedc..00b0c52929 100644 --- a/actions/setup/js/missing_issue_helpers.cjs +++ b/actions/setup/js/missing_issue_helpers.cjs @@ -25,15 +25,17 @@ const { sanitizeContent } = require("./sanitize_content.cjs"); * @param {function(string): string[]} options.buildCommentHeader - Returns header lines for the comment body given runUrl * @param {function(Object, number): string[]} options.renderCommentItem - Renders a single item for an existing-issue comment * @param {function(Object, number): string[]} options.renderIssueItem - Renders a single item for a new-issue body + * @param {string[]} [options.defaultLabels] - Labels always applied to created issues (merged with config.labels) * @returns {HandlerFactoryFunction} */ function buildMissingIssueHandler(options) { - const { handlerType, defaultTitlePrefix, itemsField, templatePath, templateListKey, buildCommentHeader, renderCommentItem, renderIssueItem } = options; + const { handlerType, defaultTitlePrefix, itemsField, templatePath, templateListKey, buildCommentHeader, renderCommentItem, renderIssueItem, defaultLabels = [] } = options; return async function main(config = {}) { // Extract configuration const titlePrefix = config.title_prefix || defaultTitlePrefix; - const envLabels = config.labels ? (Array.isArray(config.labels) ? config.labels : config.labels.split(",")).map(label => String(label).trim()).filter(label => label) : []; + const userLabels = config.labels ? (Array.isArray(config.labels) ? config.labels : config.labels.split(",")).map(label => String(label).trim()).filter(label => label) : []; + const envLabels = [...new Set([...defaultLabels, ...userLabels])]; const maxCount = config.max || 1; // Default to 1 to create only one issue per workflow run core.info(`Title prefix: ${titlePrefix}`); diff --git a/actions/setup/js/missing_issue_helpers.test.cjs b/actions/setup/js/missing_issue_helpers.test.cjs index a6ae5280d3..ce0b01de2e 100644 --- a/actions/setup/js/missing_issue_helpers.test.cjs +++ b/actions/setup/js/missing_issue_helpers.test.cjs @@ -270,6 +270,48 @@ describe("missing_issue_helpers.cjs - buildMissingIssueHandler", () => { expect(mockGithub.rest.issues.create).toHaveBeenCalledWith(expect.objectContaining({ labels: ["bug", "needs-triage"] })); }); + + it("should always apply defaultLabels from options even without config.labels", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { total_count: 0, items: [] }, + }); + mockGithub.rest.issues.create.mockResolvedValue({ + data: { number: 77, html_url: "https://github.com/owner/repo/issues/77" }, + }); + + const handler = await buildMissingIssueHandler(makeOptions({ defaultLabels: ["agentic-workflows"] }))({}); + await handler(defaultMessage); + + expect(mockGithub.rest.issues.create).toHaveBeenCalledWith(expect.objectContaining({ labels: ["agentic-workflows"] })); + }); + + it("should merge defaultLabels with config.labels", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { total_count: 0, items: [] }, + }); + mockGithub.rest.issues.create.mockResolvedValue({ + data: { number: 77, html_url: "https://github.com/owner/repo/issues/77" }, + }); + + const handler = await buildMissingIssueHandler(makeOptions({ defaultLabels: ["agentic-workflows"] }))({ labels: ["bug"] }); + await handler(defaultMessage); + + expect(mockGithub.rest.issues.create).toHaveBeenCalledWith(expect.objectContaining({ labels: ["agentic-workflows", "bug"] })); + }); + + it("should deduplicate labels when defaultLabels and config.labels overlap", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { total_count: 0, items: [] }, + }); + mockGithub.rest.issues.create.mockResolvedValue({ + data: { number: 77, html_url: "https://github.com/owner/repo/issues/77" }, + }); + + const handler = await buildMissingIssueHandler(makeOptions({ defaultLabels: ["agentic-workflows"] }))({ labels: ["agentic-workflows", "bug"] }); + await handler(defaultMessage); + + expect(mockGithub.rest.issues.create).toHaveBeenCalledWith(expect.objectContaining({ labels: ["agentic-workflows", "bug"] })); + }); }); describe("error handling", () => { diff --git a/docs/public/schemas/mcp-gateway-config.schema.json b/docs/public/schemas/mcp-gateway-config.schema.json index 01ab6d3aad..ced488d0dc 100644 --- a/docs/public/schemas/mcp-gateway-config.schema.json +++ b/docs/public/schemas/mcp-gateway-config.schema.json @@ -231,6 +231,16 @@ "description": "Directory path for storing large payload JSON files for authenticated clients. MUST be an absolute path: Unix paths start with '/', Windows paths start with a drive letter followed by ':\\'. Relative paths, empty strings, and paths that don't follow these conventions are not allowed.", "minLength": 1, "pattern": "^(/|[A-Za-z]:\\\\)" + }, + "payloadSizeThreshold": { + "type": "integer", + "description": "Size threshold in bytes for writing payloads to files instead of inlining them in the response. Payloads larger than this threshold are written to files in payloadDir. Defaults to 524288 (512KB) if not specified.", + "minimum": 1 + }, + "payloadPathPrefix": { + "type": "string", + "description": "Optional path prefix for payload file paths as seen from within agent containers. Use this when the payload directory is mounted at a different path inside the container than on the host.", + "minLength": 1 } }, "required": ["port", "domain", "apiKey"], diff --git a/docs/src/content/docs/reference/cross-repository.md b/docs/src/content/docs/reference/cross-repository.md index 23f69eef53..7f39bee941 100644 --- a/docs/src/content/docs/reference/cross-repository.md +++ b/docs/src/content/docs/reference/cross-repository.md @@ -92,6 +92,7 @@ tools: github-token: ${{ secrets.CROSS_REPO_PAT }} ``` + See [GitHub Tools Reference](/gh-aw/reference/github-tools/#cross-repository-reading) for complete details on configuring cross-repository read access for GitHub Tools. This authentication is for **reading** information from GitHub. Authorization for **writing** to other repositories (creating issues, PRs, comments) is configured separately, see below. diff --git a/pkg/cli/audit_report_helpers_test.go b/pkg/cli/audit_report_helpers_test.go index d837c9771b..b0e222e97e 100644 --- a/pkg/cli/audit_report_helpers_test.go +++ b/pkg/cli/audit_report_helpers_test.go @@ -10,81 +10,10 @@ import ( "testing" "time" - "github.com/github/gh-aw/pkg/fileutil" "github.com/github/gh-aw/pkg/stringutil" "github.com/github/gh-aw/pkg/testutil" ) -func TestCalculateDirectorySize(t *testing.T) { - tests := []struct { - name string - setup func(t *testing.T) string - expected int64 - }{ - { - name: "empty directory", - setup: func(t *testing.T) string { - dir := testutil.TempDir(t, "test-*") - return dir - }, - expected: 0, - }, - { - name: "single file", - setup: func(t *testing.T) string { - dir := testutil.TempDir(t, "test-*") - err := os.WriteFile(filepath.Join(dir, "test.txt"), []byte("hello"), 0644) - if err != nil { - t.Fatal(err) - } - return dir - }, - expected: 5, - }, - { - name: "multiple files in nested directories", - setup: func(t *testing.T) string { - dir := testutil.TempDir(t, "test-*") - // File 1: 10 bytes - err := os.WriteFile(filepath.Join(dir, "file1.txt"), []byte("0123456789"), 0644) - if err != nil { - t.Fatal(err) - } - // Create subdirectory - subdir := filepath.Join(dir, "subdir") - err = os.Mkdir(subdir, 0755) - if err != nil { - t.Fatal(err) - } - // File 2: 5 bytes - err = os.WriteFile(filepath.Join(subdir, "file2.txt"), []byte("hello"), 0644) - if err != nil { - t.Fatal(err) - } - return dir - }, - expected: 15, - }, - { - name: "nonexistent directory", - setup: func(t *testing.T) string { - return "/nonexistent/path/that/does/not/exist" - }, - expected: 0, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - dir := tt.setup(t) - got := fileutil.CalculateDirectorySize(dir) - if got != tt.expected { - t.Errorf("fileutil.CalculateDirectorySize() = %d, want %d", got, tt.expected) - } - }) - } -} - func TestParseDurationString(t *testing.T) { tests := []struct { name string diff --git a/pkg/cli/completions.go b/pkg/cli/completions.go index 0745e97e61..1bda1d3b7a 100644 --- a/pkg/cli/completions.go +++ b/pkg/cli/completions.go @@ -118,43 +118,6 @@ func CompleteEngineNames(cmd *cobra.Command, args []string, toComplete string) ( return filtered, cobra.ShellCompDirectiveNoFileComp } -// CompleteMCPServerNames provides shell completion for MCP server names -// If a workflow is specified, it returns the MCP servers defined in that workflow -func CompleteMCPServerNames(workflowFile string) func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - return func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - completionsLog.Printf("Completing MCP server names for workflow: %s, prefix: %s", workflowFile, toComplete) - - if workflowFile == "" { - return nil, cobra.ShellCompDirectiveNoFileComp - } - - // Resolve the workflow path - workflowPath, err := ResolveWorkflowPath(workflowFile) - if err != nil { - completionsLog.Printf("Failed to resolve workflow path: %v", err) - return nil, cobra.ShellCompDirectiveNoFileComp - } - - // Load MCP configs from the workflow - // The second parameter is the server filter - empty string means no filtering - _, mcpConfigs, err := loadWorkflowMCPConfigs(workflowPath, "" /* serverFilter */) - if err != nil { - completionsLog.Printf("Failed to load MCP configs: %v", err) - return nil, cobra.ShellCompDirectiveNoFileComp - } - - servers := sliceutil.FilterMap(mcpConfigs, - func(config parser.MCPServerConfig) bool { - return toComplete == "" || strings.HasPrefix(config.Name, toComplete) - }, - func(config parser.MCPServerConfig) string { return config.Name }, - ) - - completionsLog.Printf("Found %d matching MCP servers", len(servers)) - return servers, cobra.ShellCompDirectiveNoFileComp - } -} - // CompleteDirectories provides shell completion for directory paths func CompleteDirectories(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { completionsLog.Printf("Completing directories with prefix: %s", toComplete) diff --git a/pkg/cli/docker_images.go b/pkg/cli/docker_images.go index 3e37772b07..e8dce6a096 100644 --- a/pkg/cli/docker_images.go +++ b/pkg/cli/docker_images.go @@ -3,8 +3,6 @@ package cli import ( "context" "errors" - "fmt" - "os" "os/exec" "strings" "sync" @@ -245,15 +243,6 @@ func ResetDockerPullState() { pullState.mockAvailableInUse = false } -// ValidateMCPServerDockerAvailability validates that Docker is available for MCP server operations -// that require static analysis tools -func ValidateMCPServerDockerAvailability() error { - if !isDockerAvailable() { - return errors.New("docker is not available - required for zizmor, poutine, and actionlint static analysis tools") - } - return nil -} - // SetDockerImageDownloading sets the downloading state for an image (for testing) func SetDockerImageDownloading(image string, downloading bool) { pullState.mu.Lock() @@ -272,12 +261,3 @@ func SetMockImageAvailable(image string, available bool) { pullState.mockAvailableInUse = true pullState.mockAvailable[image] = available } - -// PrintDockerPullStatus prints the current pull status to stderr (for debugging) -func PrintDockerPullStatus() { - pullState.mu.RLock() - defer pullState.mu.RUnlock() - if len(pullState.downloading) > 0 { - fmt.Fprintf(os.Stderr, "Currently downloading images: %v\n", pullState.downloading) - } -} diff --git a/pkg/cli/docker_images_test.go b/pkg/cli/docker_images_test.go index 1513aa6b23..b0e7470ba7 100644 --- a/pkg/cli/docker_images_test.go +++ b/pkg/cli/docker_images_test.go @@ -4,10 +4,9 @@ package cli import ( "context" + "strings" "testing" "time" - - "github.com/github/gh-aw/pkg/sliceutil" ) func TestCheckAndPrepareDockerImages_NoToolsRequested(t *testing.T) { @@ -39,7 +38,7 @@ func TestCheckAndPrepareDockerImages_ImageAlreadyDownloading(t *testing.T) { // Error message should mention downloading and retry if err != nil { errMsg := err.Error() - if !sliceutil.ContainsAny(errMsg, "downloading", "retry") { + if !strings.Contains(errMsg, "downloading") && !strings.Contains(errMsg, "retry") { t.Errorf("Expected error to mention downloading and retry, got: %s", errMsg) } } @@ -110,7 +109,7 @@ func TestDockerImageConstants(t *testing.T) { } for name, image := range expectedImages { - if !sliceutil.ContainsAny(image, "/", ":") { + if !strings.Contains(image, "/") && !strings.Contains(image, ":") { t.Errorf("%s image %s does not look like a Docker image reference", name, image) } } @@ -138,7 +137,7 @@ func TestCheckAndPrepareDockerImages_MultipleImages(t *testing.T) { // Error should mention downloading images if err != nil { errMsg := err.Error() - if !sliceutil.ContainsAny(errMsg, "downloading", "retry") { + if !strings.Contains(errMsg, "downloading") && !strings.Contains(errMsg, "retry") { t.Errorf("Expected error to mention downloading and retry, got: %s", errMsg) } } @@ -172,7 +171,7 @@ func TestCheckAndPrepareDockerImages_RetryMessageFormat(t *testing.T) { } for _, expected := range expectations { - if !sliceutil.ContainsAny(errMsg, expected) { + if !strings.Contains(errMsg, expected) { t.Errorf("Expected error message to contain '%s', got: %s", expected, errMsg) } } @@ -199,7 +198,7 @@ func TestCheckAndPrepareDockerImages_StartedDownloadingMessage(t *testing.T) { errMsg := err.Error() // Should contain zizmor since it's downloading - if !sliceutil.ContainsAny(errMsg, "zizmor") { + if !strings.Contains(errMsg, "zizmor") { t.Errorf("Expected error message to mention zizmor, got: %s", errMsg) } diff --git a/pkg/cli/mcp_tool_table.go b/pkg/cli/mcp_tool_table.go index 974801032e..93796e4960 100644 --- a/pkg/cli/mcp_tool_table.go +++ b/pkg/cli/mcp_tool_table.go @@ -119,118 +119,3 @@ func renderMCPToolTable(info *parser.MCPServerInfo, opts MCPToolTableOptions) st return result } - -// renderMCPHierarchyTree renders all MCP servers and their tools as a tree structure -// This provides a hierarchical view of the MCP configuration -func renderMCPHierarchyTree(configs []parser.MCPServerConfig, serverInfos map[string]*parser.MCPServerInfo) string { - mcpToolTableLog.Printf("Rendering MCP hierarchy tree: server_count=%d", len(configs)) - - if len(configs) == 0 { - mcpToolTableLog.Print("No MCP servers to render") - return "" - } - - // Build tree structure - root := console.TreeNode{ - Value: "MCP Servers", - Children: make([]console.TreeNode, 0, len(configs)), - } - - for _, config := range configs { - serverNode := console.TreeNode{ - Value: fmt.Sprintf("📦 %s (%s)", config.Name, config.Type), - Children: []console.TreeNode{}, - } - - // Add server info if available - if info, ok := serverInfos[config.Name]; ok && info != nil { - // Create a map for quick lookup of allowed tools - allowedMap := make(map[string]bool) - hasWildcard := false - for _, allowed := range config.Allowed { - if allowed == "*" { - hasWildcard = true - } - allowedMap[allowed] = true - } - - // Add tools section - if len(info.Tools) > 0 { - toolsNode := console.TreeNode{ - Value: fmt.Sprintf("🛠️ Tools (%d)", len(info.Tools)), - Children: make([]console.TreeNode, 0, len(info.Tools)), - } - - for _, tool := range info.Tools { - // Determine if tool is allowed - isAllowed := len(config.Allowed) == 0 || hasWildcard || allowedMap[tool.Name] - allowIcon := "🚫" - if isAllowed { - allowIcon = "✅" - } - - // Create tool node with truncated description - toolDesc := tool.Description - if len(toolDesc) > 50 { - toolDesc = toolDesc[:47] + "..." - } - - toolValue := fmt.Sprintf("%s %s - %s", allowIcon, tool.Name, toolDesc) - toolsNode.Children = append(toolsNode.Children, console.TreeNode{ - Value: toolValue, - Children: []console.TreeNode{}, - }) - } - - serverNode.Children = append(serverNode.Children, toolsNode) - } - - // Add resources section - if len(info.Resources) > 0 { - resourcesNode := console.TreeNode{ - Value: fmt.Sprintf("📚 Resources (%d)", len(info.Resources)), - Children: make([]console.TreeNode, 0, len(info.Resources)), - } - - for _, resource := range info.Resources { - resourceValue := fmt.Sprintf("%s - %s", resource.Name, resource.URI) - resourcesNode.Children = append(resourcesNode.Children, console.TreeNode{ - Value: resourceValue, - Children: []console.TreeNode{}, - }) - } - - serverNode.Children = append(serverNode.Children, resourcesNode) - } - - // Add roots section - if len(info.Roots) > 0 { - rootsNode := console.TreeNode{ - Value: fmt.Sprintf("🌳 Roots (%d)", len(info.Roots)), - Children: make([]console.TreeNode, 0, len(info.Roots)), - } - - for _, root := range info.Roots { - rootValue := fmt.Sprintf("%s - %s", root.Name, root.URI) - rootsNode.Children = append(rootsNode.Children, console.TreeNode{ - Value: rootValue, - Children: []console.TreeNode{}, - }) - } - - serverNode.Children = append(serverNode.Children, rootsNode) - } - } else { - // Server info not available (error or not yet queried) - serverNode.Children = append(serverNode.Children, console.TreeNode{ - Value: "⚠️ Server info not available", - Children: []console.TreeNode{}, - }) - } - - root.Children = append(root.Children, serverNode) - } - - // Render the tree - return console.RenderTree(root) -} diff --git a/pkg/cli/mcp_tool_table_test.go b/pkg/cli/mcp_tool_table_test.go index 7d0bd8c646..6eb5a04f0b 100644 --- a/pkg/cli/mcp_tool_table_test.go +++ b/pkg/cli/mcp_tool_table_test.go @@ -255,115 +255,3 @@ func TestDefaultMCPToolTableOptions(t *testing.T) { t.Error("Expected default ShowVerboseHint to be false") } } - -func TestRenderMCPHierarchyTree(t *testing.T) { - // Create test configs - configs := []parser.MCPServerConfig{ - { - BaseMCPServerConfig: types.BaseMCPServerConfig{ - Type: "stdio", - }, - Name: "github", - Allowed: []string{"list_issues", "create_issue"}, - }, - { - BaseMCPServerConfig: types.BaseMCPServerConfig{ - Type: "stdio", - }, - Name: "filesystem", - Allowed: []string{"*"}, - }, - } - - // Create test server infos - serverInfos := map[string]*parser.MCPServerInfo{ - "github": { - Config: parser.MCPServerConfig{ - Name: "github", - Allowed: []string{"list_issues", "create_issue"}, - }, - Tools: []*mcp.Tool{ - {Name: "list_issues", Description: "List GitHub issues"}, - {Name: "create_issue", Description: "Create a new GitHub issue"}, - {Name: "list_pull_requests", Description: "List pull requests"}, - }, - Resources: []*mcp.Resource{ - {Name: "repo", URI: "github://repo"}, - }, - Roots: []*mcp.Root{ - {Name: "root", URI: "github://root"}, - }, - }, - "filesystem": { - Config: parser.MCPServerConfig{ - Name: "filesystem", - Allowed: []string{"*"}, - }, - Tools: []*mcp.Tool{ - {Name: "read_file", Description: "Read a file"}, - {Name: "write_file", Description: "Write to a file"}, - }, - }, - } - - // Render tree - output := renderMCPHierarchyTree(configs, serverInfos) - - // Verify output contains expected elements - expectedStrings := []string{ - "MCP Servers", - "github", - "filesystem", - "list_issues", - "create_issue", - "read_file", - "write_file", - "Tools", - "Resources", - "Roots", - } - - for _, expected := range expectedStrings { - if !strings.Contains(output, expected) { - t.Errorf("Expected output to contain '%s', but it didn't.\nGot:\n%s", expected, output) - } - } - - // Verify output is not empty - if output == "" { - t.Error("renderMCPHierarchyTree returned empty string") - } -} - -func TestRenderMCPHierarchyTree_EmptyConfigs(t *testing.T) { - configs := []parser.MCPServerConfig{} - serverInfos := map[string]*parser.MCPServerInfo{} - - output := renderMCPHierarchyTree(configs, serverInfos) - - if output != "" { - t.Errorf("Expected empty output for empty configs, got: %s", output) - } -} - -func TestRenderMCPHierarchyTree_MissingServerInfo(t *testing.T) { - configs := []parser.MCPServerConfig{ - { - BaseMCPServerConfig: types.BaseMCPServerConfig{ - Type: "stdio", - }, - Name: "missing-server", - }, - } - serverInfos := map[string]*parser.MCPServerInfo{} - - output := renderMCPHierarchyTree(configs, serverInfos) - - // Should still render, but with a warning - if !strings.Contains(output, "missing-server") { - t.Errorf("Expected output to contain server name, got: %s", output) - } - if !strings.Contains(output, "Server info not available") { - t.Errorf("Expected output to contain warning about missing info, got: %s", output) - } -} diff --git a/pkg/cli/preconditions.go b/pkg/cli/preconditions.go index d959882a1b..fd757f9b32 100644 --- a/pkg/cli/preconditions.go +++ b/pkg/cli/preconditions.go @@ -14,52 +14,6 @@ import ( var preconditionsLog = logger.New("cli:preconditions") -// PreconditionCheckResult holds the result of precondition checks -type PreconditionCheckResult struct { - RepoSlug string // The repository slug (owner/repo) - IsPublicRepo bool // Whether the repository is public -} - -// CheckInteractivePreconditions runs common precondition checks for interactive commands -// like `gh aw add` and `gh aw init`. These checks verify: -// - GitHub CLI authentication -// - Git repository presence -// - GitHub Actions enabled -// - User has write permissions -// -// The verbose parameter controls whether success messages are printed. -// Returns the repository slug and whether it's public on success. -func CheckInteractivePreconditions(verbose bool) (*PreconditionCheckResult, error) { - result := &PreconditionCheckResult{} - - // Step 1: Check gh auth status - if err := checkGHAuthStatusShared(verbose); err != nil { - return nil, err - } - - // Step 2: Check git repository and get org/repo - repoSlug, err := checkGitRepositoryShared(verbose) - if err != nil { - return nil, err - } - result.RepoSlug = repoSlug - - // Step 3: Check GitHub Actions is enabled - if err := checkActionsEnabledShared(repoSlug, verbose); err != nil { - return nil, err - } - - // Step 4: Check user permissions - if _, err := checkUserPermissionsShared(repoSlug, verbose); err != nil { - return nil, err - } - - // Step 5: Check repository visibility - result.IsPublicRepo = checkRepoVisibilityShared(repoSlug) - - return result, nil -} - // checkGHAuthStatusShared verifies the user is logged in to GitHub CLI func checkGHAuthStatusShared(verbose bool) error { preconditionsLog.Print("Checking GitHub CLI authentication status") @@ -84,42 +38,6 @@ func checkGHAuthStatusShared(verbose bool) error { return nil } -// checkGitRepositoryShared verifies we're in a git repo and returns the repo slug -func checkGitRepositoryShared(verbose bool) (string, error) { - preconditionsLog.Print("Checking git repository status") - - // Check if we're in a git repository - if !isGitRepo() { - fmt.Fprintln(os.Stderr, console.FormatErrorMessage("Not in a git repository.")) - fmt.Fprintln(os.Stderr, "") - fmt.Fprintln(os.Stderr, "Please navigate to a git repository or initialize one with:") - fmt.Fprintln(os.Stderr, "") - fmt.Fprintln(os.Stderr, console.FormatCommandMessage(" git init")) - fmt.Fprintln(os.Stderr, "") - return "", errors.New("not in a git repository") - } - - // Try to get the repository slug - repoSlug, err := GetCurrentRepoSlug() - if err != nil { - preconditionsLog.Printf("Could not determine repository automatically: %v", err) - fmt.Fprintln(os.Stderr, console.FormatErrorMessage("Could not determine the repository automatically.")) - fmt.Fprintln(os.Stderr, "") - fmt.Fprintln(os.Stderr, "Please ensure you have a remote configured:") - fmt.Fprintln(os.Stderr, "") - fmt.Fprintln(os.Stderr, console.FormatCommandMessage(" git remote add origin https://github.com/owner/repo.git")) - fmt.Fprintln(os.Stderr, "") - return "", fmt.Errorf("could not determine repository: %w", err) - } - - if verbose { - fmt.Fprintln(os.Stderr, console.FormatSuccessMessage("Target repository: "+repoSlug)) - } - preconditionsLog.Printf("Target repository: %s", repoSlug) - - return repoSlug, nil -} - // checkActionsEnabledShared verifies that GitHub Actions is enabled for the repository // and that the allowed actions settings permit running agentic workflows func checkActionsEnabledShared(repoSlug string, verbose bool) error { diff --git a/pkg/console/console.go b/pkg/console/console.go index e0f9430e0c..35e0030da1 100644 --- a/pkg/console/console.go +++ b/pkg/console/console.go @@ -10,7 +10,6 @@ import ( "github.com/charmbracelet/lipgloss" "github.com/charmbracelet/lipgloss/table" - "github.com/charmbracelet/lipgloss/tree" "github.com/github/gh-aw/pkg/logger" "github.com/github/gh-aw/pkg/styles" "github.com/github/gh-aw/pkg/tty" @@ -200,11 +199,6 @@ func RenderTable(config TableConfig) string { return output.String() } -// FormatLocationMessage formats a file/directory location message -func FormatLocationMessage(message string) string { - return applyStyle(styles.Location, "📁 ") + message -} - // FormatCommandMessage formats a command execution message func FormatCommandMessage(command string) string { return applyStyle(styles.Command, "⚡ ") + command @@ -220,21 +214,11 @@ func FormatPromptMessage(message string) string { return applyStyle(styles.Prompt, "❓ ") + message } -// FormatCountMessage formats a count/numeric status message -func FormatCountMessage(message string) string { - return applyStyle(styles.Count, "📊 ") + message -} - // FormatVerboseMessage formats verbose debugging output func FormatVerboseMessage(message string) string { return applyStyle(styles.Verbose, "🔍 ") + message } -// FormatListHeader formats a section header for lists -func FormatListHeader(header string) string { - return applyStyle(styles.ListHeader, header) -} - // FormatListItem formats an item in a list func FormatListItem(item string) string { return applyStyle(styles.ListItem, " • "+item) @@ -322,34 +306,3 @@ func RenderComposedSections(sections []string) { fmt.Fprintln(os.Stderr, "") } } - -// RenderTree renders a hierarchical tree structure using lipgloss/tree package -func RenderTree(root TreeNode) string { - if !isTTY() { - return renderTreeSimple(root, "", true) - } - - lipglossTree := buildLipglossTree(root) - return lipglossTree.String() -} - -// buildLipglossTree converts our TreeNode structure to lipgloss/tree format -func buildLipglossTree(node TreeNode) *tree.Tree { - t := tree.Root(node.Value). - EnumeratorStyle(styles.TreeEnumerator). - ItemStyle(styles.TreeNode) - - if len(node.Children) > 0 { - children := make([]any, len(node.Children)) - for i, child := range node.Children { - if len(child.Children) > 0 { - children[i] = buildLipglossTree(child) - } else { - children[i] = child.Value - } - } - t.Child(children...) - } - - return t -} diff --git a/pkg/console/console_formatting_test.go b/pkg/console/console_formatting_test.go index ed57c367e0..f96cd19e06 100644 --- a/pkg/console/console_formatting_test.go +++ b/pkg/console/console_formatting_test.go @@ -147,51 +147,6 @@ func TestFormatPromptMessage(t *testing.T) { } } -func TestFormatCountMessage(t *testing.T) { - tests := []struct { - name string - message string - expected string - }{ - { - name: "file count", - message: "Found 15 workflows to compile", - expected: "Found 15 workflows to compile", - }, - { - name: "zero count", - message: "Found 0 issues", - expected: "Found 0 issues", - }, - { - name: "percentage", - message: "Coverage: 85.5%", - expected: "Coverage: 85.5%", - }, - { - name: "empty count message", - message: "", - expected: "", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatCountMessage(tt.message) - - // Should contain the chart emoji prefix - if !strings.Contains(result, "📊") { - t.Errorf("FormatCountMessage() should contain 📊 prefix") - } - - // Should contain the message text - if !strings.Contains(result, tt.expected) { - t.Errorf("FormatCountMessage() = %v, should contain %v", result, tt.expected) - } - }) - } -} - func TestFormatVerboseMessage(t *testing.T) { tests := []struct { name string @@ -232,51 +187,6 @@ func TestFormatVerboseMessage(t *testing.T) { } } -func TestFormatListHeader(t *testing.T) { - tests := []struct { - name string - header string - expected string - }{ - { - name: "simple header", - header: "Available Workflows", - expected: "Available Workflows", - }, - { - name: "header with underscores", - header: "==================", - expected: "==================", - }, - { - name: "empty header", - header: "", - expected: "", - }, - { - name: "header with numbers", - header: "Section 1: Configuration", - expected: "Section 1: Configuration", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := FormatListHeader(tt.header) - - // Should contain the header text - if !strings.Contains(result, tt.expected) { - t.Errorf("FormatListHeader() = %v, should contain %v", result, tt.expected) - } - - // Result should not be empty unless input was empty - if tt.header != "" && result == "" { - t.Errorf("FormatListHeader() returned empty string for non-empty input") - } - }) - } -} - func TestFormatListItem(t *testing.T) { tests := []struct { name string @@ -432,9 +342,7 @@ func TestFormattingFunctionsWithSpecialCharacters(t *testing.T) { FormatCommandMessage(specialChars) FormatProgressMessage(specialChars) FormatPromptMessage(specialChars) - FormatCountMessage(specialChars) FormatVerboseMessage(specialChars) - FormatListHeader(specialChars) FormatListItem(specialChars) FormatErrorMessage(specialChars) }) diff --git a/pkg/console/console_test.go b/pkg/console/console_test.go index 3be2edcd5e..ce95e7f715 100644 --- a/pkg/console/console_test.go +++ b/pkg/console/console_test.go @@ -268,16 +268,6 @@ func TestRenderTable(t *testing.T) { } } -func TestFormatLocationMessage(t *testing.T) { - output := FormatLocationMessage("Downloaded to: /path/to/logs") - if !strings.Contains(output, "Downloaded to: /path/to/logs") { - t.Errorf("Expected output to contain message, got: %s", output) - } - if !strings.Contains(output, "📁") { - t.Errorf("Expected output to contain folder icon, got: %s", output) - } -} - func TestToRelativePath(t *testing.T) { tests := []struct { name string @@ -370,62 +360,6 @@ func TestFormatErrorWithAbsolutePaths(t *testing.T) { } } -func TestRenderTableAsJSON(t *testing.T) { - tests := []struct { - name string - config TableConfig - wantErr bool - }{ - { - name: "simple table", - config: TableConfig{ - Headers: []string{"Name", "Status"}, - Rows: [][]string{ - {"workflow1", "active"}, - {"workflow2", "disabled"}, - }, - }, - wantErr: false, - }, - { - name: "table with spaces in headers", - config: TableConfig{ - Headers: []string{"Workflow Name", "Agent Type", "Is Compiled"}, - Rows: [][]string{ - {"test", "copilot", "Yes"}, - }, - }, - wantErr: false, - }, - { - name: "empty table", - config: TableConfig{ - Headers: []string{}, - Rows: [][]string{}, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := RenderTableAsJSON(tt.config) - if (err != nil) != tt.wantErr { - t.Errorf("RenderTableAsJSON() error = %v, wantErr %v", err, tt.wantErr) - return - } - // Verify it's valid JSON - if result == "" && len(tt.config.Headers) > 0 { - t.Error("RenderTableAsJSON() returned empty string for non-empty config") - } - // For empty config, should return "[]" - if len(tt.config.Headers) == 0 && result != "[]" { - t.Errorf("RenderTableAsJSON() = %v, want []", result) - } - }) - } -} - func TestClearScreen(t *testing.T) { // ClearScreen should not panic when called // It only clears if stdout is a TTY, so we can't easily test the output @@ -454,205 +388,6 @@ func TestClearLine(t *testing.T) { }) } -func TestRenderTree(t *testing.T) { - tests := []struct { - name string - tree TreeNode - expected []string // Substrings that should be present in output - }{ - { - name: "simple tree with no children", - tree: TreeNode{ - Value: "Root", - Children: []TreeNode{}, - }, - expected: []string{"Root"}, - }, - { - name: "tree with single level children", - tree: TreeNode{ - Value: "Root", - Children: []TreeNode{ - {Value: "Child1", Children: []TreeNode{}}, - {Value: "Child2", Children: []TreeNode{}}, - {Value: "Child3", Children: []TreeNode{}}, - }, - }, - expected: []string{ - "Root", - "Child1", - "Child2", - "Child3", - }, - }, - { - name: "tree with nested children", - tree: TreeNode{ - Value: "Workflow", - Children: []TreeNode{ - { - Value: "Setup", - Children: []TreeNode{ - {Value: "Install dependencies", Children: []TreeNode{}}, - {Value: "Configure environment", Children: []TreeNode{}}, - }, - }, - { - Value: "Build", - Children: []TreeNode{ - {Value: "Compile source", Children: []TreeNode{}}, - {Value: "Run tests", Children: []TreeNode{}}, - }, - }, - {Value: "Deploy", Children: []TreeNode{}}, - }, - }, - expected: []string{ - "Workflow", - "Setup", - "Install dependencies", - "Configure environment", - "Build", - "Compile source", - "Run tests", - "Deploy", - }, - }, - { - name: "tree with MCP server hierarchy", - tree: TreeNode{ - Value: "MCP Servers", - Children: []TreeNode{ - { - Value: "github", - Children: []TreeNode{ - {Value: "list_issues", Children: []TreeNode{}}, - {Value: "create_issue", Children: []TreeNode{}}, - {Value: "list_pull_requests", Children: []TreeNode{}}, - }, - }, - { - Value: "filesystem", - Children: []TreeNode{ - {Value: "read_file", Children: []TreeNode{}}, - {Value: "write_file", Children: []TreeNode{}}, - }, - }, - }, - }, - expected: []string{ - "MCP Servers", - "github", - "list_issues", - "create_issue", - "list_pull_requests", - "filesystem", - "read_file", - "write_file", - }, - }, - { - name: "deeply nested tree", - tree: TreeNode{ - Value: "Level 1", - Children: []TreeNode{ - { - Value: "Level 2", - Children: []TreeNode{ - { - Value: "Level 3", - Children: []TreeNode{ - {Value: "Level 4", Children: []TreeNode{}}, - }, - }, - }, - }, - }, - }, - expected: []string{ - "Level 1", - "Level 2", - "Level 3", - "Level 4", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := RenderTree(tt.tree) - - // Check that all expected strings are present - for _, expected := range tt.expected { - if !strings.Contains(output, expected) { - t.Errorf("RenderTree() output missing expected string '%s'\nGot:\n%s", expected, output) - } - } - - // Verify output is not empty - if output == "" { - t.Error("RenderTree() returned empty string") - } - }) - } -} - -func TestRenderTreeSimple(t *testing.T) { - tests := []struct { - name string - tree TreeNode - expected []string // Substrings that should be present - }{ - { - name: "simple tree structure", - tree: TreeNode{ - Value: "Root", - Children: []TreeNode{ - {Value: "Child1", Children: []TreeNode{}}, - {Value: "Child2", Children: []TreeNode{}}, - }, - }, - expected: []string{ - "Root", - "Child1", - "Child2", - }, - }, - { - name: "nested tree structure", - tree: TreeNode{ - Value: "Parent", - Children: []TreeNode{ - { - Value: "Child", - Children: []TreeNode{ - {Value: "Grandchild", Children: []TreeNode{}}, - }, - }, - }, - }, - expected: []string{ - "Parent", - "Child", - "Grandchild", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Use renderTreeSimple directly for testing - output := renderTreeSimple(tt.tree, "", true) - - for _, expected := range tt.expected { - if !strings.Contains(output, expected) { - t.Errorf("renderTreeSimple() output missing expected string '%s'\nGot:\n%s", expected, output) - } - } - }) - } -} - func TestRenderTitleBox(t *testing.T) { tests := []struct { name string diff --git a/pkg/console/console_wasm.go b/pkg/console/console_wasm.go index 8de124fbee..c2b1ea8525 100644 --- a/pkg/console/console_wasm.go +++ b/pkg/console/console_wasm.go @@ -127,5 +127,27 @@ func RenderComposedSections(sections []string) { } func RenderTree(root TreeNode) string { - return renderTreeSimple(root, "", true) + var render func(node TreeNode, prefix string, isLast bool) string + render = func(node TreeNode, prefix string, isLast bool) string { + var output strings.Builder + if prefix == "" { + output.WriteString(node.Value + "\n") + } else { + connector := "├── " + if isLast { + connector = "└── " + } + output.WriteString(prefix + connector + node.Value + "\n") + } + for i, child := range node.Children { + childIsLast := i == len(node.Children)-1 + childPrefix := prefix + "│ " + if isLast { + childPrefix = prefix + " " + } + output.WriteString(render(child, childPrefix, childIsLast)) + } + return output.String() + } + return render(root, "", true) } diff --git a/pkg/console/golden_test.go b/pkg/console/golden_test.go index 648da3cf65..7fe7ea52f4 100644 --- a/pkg/console/golden_test.go +++ b/pkg/console/golden_test.go @@ -131,118 +131,6 @@ func TestGolden_BoxRendering(t *testing.T) { // TestGolden_LayoutBoxRendering tests layout box rendering (returns string) -// TestGolden_TreeRendering tests tree rendering with different hierarchies -func TestGolden_TreeRendering(t *testing.T) { - tests := []struct { - name string - tree TreeNode - }{ - { - name: "single_node", - tree: TreeNode{ - Value: "Root", - Children: []TreeNode{}, - }, - }, - { - name: "flat_tree", - tree: TreeNode{ - Value: "Root", - Children: []TreeNode{ - {Value: "Child1", Children: []TreeNode{}}, - {Value: "Child2", Children: []TreeNode{}}, - {Value: "Child3", Children: []TreeNode{}}, - }, - }, - }, - { - name: "nested_tree", - tree: TreeNode{ - Value: "Workflow", - Children: []TreeNode{ - { - Value: "Setup", - Children: []TreeNode{ - {Value: "Install dependencies", Children: []TreeNode{}}, - {Value: "Configure environment", Children: []TreeNode{}}, - }, - }, - { - Value: "Build", - Children: []TreeNode{ - {Value: "Compile source", Children: []TreeNode{}}, - {Value: "Run tests", Children: []TreeNode{}}, - }, - }, - {Value: "Deploy", Children: []TreeNode{}}, - }, - }, - }, - { - name: "deep_hierarchy", - tree: TreeNode{ - Value: "Level 1", - Children: []TreeNode{ - { - Value: "Level 2", - Children: []TreeNode{ - { - Value: "Level 3", - Children: []TreeNode{ - { - Value: "Level 4", - Children: []TreeNode{ - {Value: "Level 5", Children: []TreeNode{}}, - }, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "mcp_server_tree", - tree: TreeNode{ - Value: "MCP Servers", - Children: []TreeNode{ - { - Value: "github", - Children: []TreeNode{ - {Value: "list_issues", Children: []TreeNode{}}, - {Value: "create_issue", Children: []TreeNode{}}, - {Value: "list_pull_requests", Children: []TreeNode{}}, - {Value: "create_pull_request", Children: []TreeNode{}}, - }, - }, - { - Value: "filesystem", - Children: []TreeNode{ - {Value: "read_file", Children: []TreeNode{}}, - {Value: "write_file", Children: []TreeNode{}}, - {Value: "list_directory", Children: []TreeNode{}}, - }, - }, - { - Value: "bash", - Children: []TreeNode{ - {Value: "execute", Children: []TreeNode{}}, - }, - }, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := RenderTree(tt.tree) - golden.RequireEqual(t, []byte(output)) - }) - } -} - // TestGolden_ErrorFormatting tests error formatting with context func TestGolden_ErrorFormatting(t *testing.T) { tests := []struct { @@ -409,11 +297,6 @@ func TestGolden_MessageFormatting(t *testing.T) { message: "Failed to compile workflow", format: FormatErrorMessage, }, - { - name: "location_message", - message: "Downloaded to: /tmp/logs/workflow-123", - format: FormatLocationMessage, - }, { name: "command_message", message: "gh aw compile workflow.md", diff --git a/pkg/console/input.go b/pkg/console/input.go index c6e87ba2fe..9697c524f3 100644 --- a/pkg/console/input.go +++ b/pkg/console/input.go @@ -9,33 +9,6 @@ import ( "github.com/github/gh-aw/pkg/tty" ) -// PromptInput shows an interactive text input prompt using Bubble Tea (huh) -// Returns the entered text or an error -func PromptInput(title, description, placeholder string) (string, error) { - // Check if stdin is a TTY - if not, we can't show interactive forms - if !tty.IsStderrTerminal() { - return "", errors.New("interactive input not available (not a TTY)") - } - - var value string - - form := huh.NewForm( - huh.NewGroup( - huh.NewInput(). - Title(title). - Description(description). - Placeholder(placeholder). - Value(&value), - ), - ).WithAccessible(IsAccessibleMode()) - - if err := form.Run(); err != nil { - return "", err - } - - return value, nil -} - // PromptSecretInput shows an interactive password input prompt with masking // The input is masked for security and includes validation // Returns the entered secret value or an error @@ -69,31 +42,3 @@ func PromptSecretInput(title, description string) (string, error) { return value, nil } - -// PromptInputWithValidation shows an interactive text input with custom validation -// Returns the entered text or an error -func PromptInputWithValidation(title, description, placeholder string, validate func(string) error) (string, error) { - // Check if stdin is a TTY - if not, we can't show interactive forms - if !tty.IsStderrTerminal() { - return "", errors.New("interactive input not available (not a TTY)") - } - - var value string - - form := huh.NewForm( - huh.NewGroup( - huh.NewInput(). - Title(title). - Description(description). - Placeholder(placeholder). - Validate(validate). - Value(&value), - ), - ).WithAccessible(IsAccessibleMode()) - - if err := form.Run(); err != nil { - return "", err - } - - return value, nil -} diff --git a/pkg/console/input_test.go b/pkg/console/input_test.go index 6db522d6da..bd887c5c7f 100644 --- a/pkg/console/input_test.go +++ b/pkg/console/input_test.go @@ -3,37 +3,12 @@ package console import ( - "errors" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestPromptInput(t *testing.T) { - // Note: Interactive Huh forms cannot be fully tested without a mock terminal - // These tests verify function signatures and basic setup - - t.Run("function signature", func(t *testing.T) { - // Verify the function exists and has the right signature - _ = PromptInput - }) - - t.Run("validates parameters", func(t *testing.T) { - // Test that empty title and description don't cause panics - // In a real terminal, this would show a prompt with empty fields - title := "Test Title" - description := "Test Description" - placeholder := "Enter value" - - // Function exists and parameters are accepted - _, err := PromptInput(title, description, placeholder) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) -} - func TestPromptSecretInput(t *testing.T) { t.Run("function signature", func(t *testing.T) { // Verify the function exists and has the right signature @@ -51,28 +26,3 @@ func TestPromptSecretInput(t *testing.T) { assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") }) } - -func TestPromptInputWithValidation(t *testing.T) { - t.Run("function signature", func(t *testing.T) { - // Verify the function exists and has the right signature - _ = PromptInputWithValidation - }) - - t.Run("accepts custom validator", func(t *testing.T) { - title := "Test Title" - description := "Test Description" - placeholder := "Enter value" - validator := func(s string) error { - if len(s) < 3 { - return errors.New("must be at least 3 characters") - } - return nil - } - - // Function exists and parameters are accepted - _, err := PromptInputWithValidation(title, description, placeholder, validator) - // Will error in test environment (no TTY), but that's expected - require.Error(t, err, "Should error when not in TTY") - assert.Contains(t, err.Error(), "not a TTY", "Error should mention TTY") - }) -} diff --git a/pkg/console/progress.go b/pkg/console/progress.go index 482bac5ac4..96cd9a9206 100644 --- a/pkg/console/progress.go +++ b/pkg/console/progress.go @@ -66,31 +66,6 @@ func NewProgressBar(total int64) *ProgressBar { } } -// NewIndeterminateProgressBar creates a progress bar for when the total size is unknown -// This mode shows activity without a specific completion percentage, useful for: -// - Streaming downloads with unknown size -// - Processing unknown number of items -// - Operations where duration cannot be predicted -// -// The progress bar automatically adapts to TTY/non-TTY environments -func NewIndeterminateProgressBar() *ProgressBar { - progressLog.Print("Creating indeterminate progress bar") - prog := progress.New( - progress.WithScaledGradient("#BD93F9", "#8BE9FD"), - progress.WithWidth(40), - ) - - prog.EmptyColor = "#6272A4" // Muted purple-gray - - return &ProgressBar{ - progress: prog, - total: 0, - current: 0, - indeterminate: true, - updateCount: 0, - } -} - // Update updates the current progress and returns a formatted string // In determinate mode: // - TTY: Returns a visual progress bar with gradient and percentage diff --git a/pkg/console/progress_test.go b/pkg/console/progress_test.go index 2214bda14d..5f7f03e452 100644 --- a/pkg/console/progress_test.go +++ b/pkg/console/progress_test.go @@ -328,93 +328,10 @@ func TestProgressBarNonTTYFallback(t *testing.T) { }) } -func TestNewIndeterminateProgressBar(t *testing.T) { - t.Run("creates indeterminate progress bar successfully", func(t *testing.T) { - bar := NewIndeterminateProgressBar() - - require.NotNil(t, bar, "NewIndeterminateProgressBar should not return nil") - assert.Equal(t, int64(0), bar.total, "Total should be 0 for indeterminate mode") - assert.Equal(t, int64(0), bar.current, "Current should start at 0") - assert.True(t, bar.indeterminate, "Indeterminate flag should be true") - require.NotNil(t, bar.progress, "Progress model should be initialized") - }) -} - -func TestIndeterminateProgressBarUpdate(t *testing.T) { - t.Run("indeterminate mode with no data", func(t *testing.T) { - bar := NewIndeterminateProgressBar() - output := bar.Update(0) - - assert.NotEmpty(t, output, "Update should return non-empty string") - - // In non-TTY mode, should show "Processing..." - if !isTTY() { - assert.Equal(t, "Processing...", output, "Should show processing indicator") - } - }) - - t.Run("indeterminate mode with current value", func(t *testing.T) { - bar := NewIndeterminateProgressBar() - output := bar.Update(1024 * 1024) // 1MB processed - - assert.NotEmpty(t, output, "Update should return non-empty string") - - // In non-TTY mode, should show current value - if !isTTY() { - assert.Contains(t, output, "Processing...", "Should show processing text") - assert.Contains(t, output, "1.0MB", "Should show current size") - } - }) - - t.Run("indeterminate mode multiple updates", func(t *testing.T) { - bar := NewIndeterminateProgressBar() - - // Simulate progressive updates without known total - updates := []int64{0, 512 * 1024, 1024 * 1024, 2 * 1024 * 1024} - for _, value := range updates { - output := bar.Update(value) - assert.NotEmpty(t, output, "Each update should produce output") - assert.Equal(t, value, bar.current, "Current should track the latest update") - } - }) - - t.Run("indeterminate mode produces varying output", func(t *testing.T) { - // Skip if not in TTY mode as the pulsing effect is only visible in TTY - if !isTTY() { - t.Skip("Test requires TTY mode to validate pulsing effect") - } - - bar := NewIndeterminateProgressBar() - - // Update with different values to create pulse effect - outputs := make([]string, 8) - for i := range outputs { - outputs[i] = bar.Update(int64(i * 100)) - } - - // In TTY mode, outputs should vary (pulsing effect) - // We just verify they're all non-empty and at least some are different - allSame := true - for i := 1; i < len(outputs); i++ { - if outputs[i] != outputs[0] { - allSame = false - break - } - } - assert.False(t, allSame, "Indeterminate progress should produce varying visual output for pulsing effect") - }) -} - func TestProgressBarModeSelection(t *testing.T) { t.Run("determinate mode has total and not indeterminate", func(t *testing.T) { bar := NewProgressBar(1024) assert.Equal(t, int64(1024), bar.total, "Determinate mode should have total") assert.False(t, bar.indeterminate, "Determinate mode should not be indeterminate") }) - - t.Run("indeterminate mode has no total and is indeterminate", func(t *testing.T) { - bar := NewIndeterminateProgressBar() - assert.Equal(t, int64(0), bar.total, "Indeterminate mode should have zero total") - assert.True(t, bar.indeterminate, "Indeterminate mode should be indeterminate") - }) } diff --git a/pkg/console/render.go b/pkg/console/render.go index 47496ecacc..5faa8baeaa 100644 --- a/pkg/console/render.go +++ b/pkg/console/render.go @@ -1,7 +1,6 @@ package console import ( - "encoding/json" "fmt" "os" "path/filepath" @@ -604,32 +603,6 @@ func ToRelativePath(path string) string { return relPath } -// RenderTableAsJSON renders a table configuration as JSON -func RenderTableAsJSON(config TableConfig) (string, error) { - if len(config.Headers) == 0 { - return "[]", nil - } - - var result []map[string]string - for _, row := range config.Rows { - obj := make(map[string]string) - for i, cell := range row { - if i < len(config.Headers) { - key := strings.ToLower(strings.ReplaceAll(config.Headers[i], " ", "_")) - obj[key] = cell - } - } - result = append(result, obj) - } - - jsonBytes, err := json.Marshal(result) - if err != nil { - return "", fmt.Errorf("failed to marshal table to JSON: %w", err) - } - - return string(jsonBytes), nil -} - // FormatErrorWithSuggestions formats an error message with actionable suggestions func FormatErrorWithSuggestions(message string, suggestions []string) string { var output strings.Builder @@ -645,38 +618,6 @@ func FormatErrorWithSuggestions(message string, suggestions []string) string { return output.String() } -// renderTreeSimple renders a simple text-based tree without styling -func renderTreeSimple(node TreeNode, prefix string, isLast bool) string { - var output strings.Builder - - connector := "├── " - if isLast { - connector = "└── " - } - if prefix == "" { - output.WriteString(node.Value + "\n") - } else { - output.WriteString(prefix + connector + node.Value + "\n") - } - - for i, child := range node.Children { - childIsLast := i == len(node.Children)-1 - var childPrefix string - if prefix == "" { - childPrefix = "" - } else { - if isLast { - childPrefix = prefix + " " - } else { - childPrefix = prefix + "│ " - } - } - output.WriteString(renderTreeSimple(child, childPrefix, childIsLast)) - } - - return output.String() -} - // findWordEnd finds the end of a word starting at the given position func findWordEnd(line string, start int) int { if start >= len(line) { diff --git a/pkg/console/spinner.go b/pkg/console/spinner.go index d378478be2..1bea3b6181 100644 --- a/pkg/console/spinner.go +++ b/pkg/console/spinner.go @@ -177,5 +177,3 @@ func (s *SpinnerWrapper) UpdateMessage(message string) { } } } - -func (s *SpinnerWrapper) IsEnabled() bool { return s.enabled } diff --git a/pkg/console/spinner_test.go b/pkg/console/spinner_test.go index ab89311459..e719faecd1 100644 --- a/pkg/console/spinner_test.go +++ b/pkg/console/spinner_test.go @@ -38,12 +38,6 @@ func TestSpinnerAccessibilityMode(t *testing.T) { // Spinner should be disabled when ACCESSIBLE is set // Note: This may still be true if running in non-TTY environment - if spinner.IsEnabled() { - // Only check if we're actually in a TTY - // In CI/test environments, spinner will be disabled regardless - t.Log("Spinner enabled despite ACCESSIBLE=1 (may be expected in non-TTY)") - } - // Ensure no panic when starting/stopping disabled spinner spinner.Start() spinner.Stop() @@ -67,17 +61,6 @@ func TestSpinnerUpdateMessage(t *testing.T) { spinner.Stop() } -func TestSpinnerIsEnabled(t *testing.T) { - spinner := NewSpinner("Test message") - - // IsEnabled should return a boolean without panicking - enabled := spinner.IsEnabled() - - // The value depends on whether we're running in a TTY or not - // but the method should not panic - _ = enabled -} - func TestSpinnerStopWithMessage(t *testing.T) { spinner := NewSpinner("Processing...") @@ -185,9 +168,7 @@ func TestSpinnerDisabledOperations(t *testing.T) { spinner.StopWithMessage("Final message") // Check that spinner is disabled - if spinner.IsEnabled() && os.Getenv("ACCESSIBLE") != "" { - t.Error("Spinner should be disabled when ACCESSIBLE is set") - } + _ = spinner } func TestSpinnerRapidStartStop(t *testing.T) { diff --git a/pkg/console/terminal.go b/pkg/console/terminal.go index 1436b21985..91e4782542 100644 --- a/pkg/console/terminal.go +++ b/pkg/console/terminal.go @@ -35,22 +35,6 @@ func ClearLine() { } } -// MoveCursorUp moves cursor up n lines if stderr is a TTY. -// Uses ANSI escape code: \033[nA where n is the number of lines. -func MoveCursorUp(n int) { - if tty.IsStderrTerminal() { - fmt.Fprintf(os.Stderr, "\033[%dA", n) - } -} - -// MoveCursorDown moves cursor down n lines if stderr is a TTY. -// Uses ANSI escape code: \033[nB where n is the number of lines. -func MoveCursorDown(n int) { - if tty.IsStderrTerminal() { - fmt.Fprintf(os.Stderr, "\033[%dB", n) - } -} - // ShowWelcomeBanner clears the screen and displays the welcome banner for interactive commands. // Use this at the start of interactive commands (add, trial, init) for a consistent experience. func ShowWelcomeBanner(description string) { diff --git a/pkg/console/terminal_test.go b/pkg/console/terminal_test.go deleted file mode 100644 index 3b643d84b0..0000000000 --- a/pkg/console/terminal_test.go +++ /dev/null @@ -1,173 +0,0 @@ -//go:build !integration - -package console - -import ( - "bytes" - "io" - "os" - "testing" - - "github.com/stretchr/testify/assert" -) - -// captureStderr captures stderr output during function execution -func captureStderr(t *testing.T, fn func()) string { - t.Helper() - - // Save original stderr - oldStderr := os.Stderr - - // Create a pipe to capture stderr - r, w, err := os.Pipe() - if err != nil { - t.Fatalf("Failed to create pipe: %v", err) - } - - // Replace stderr with the write end of the pipe - os.Stderr = w - - // Create a channel to receive the captured output - outputChan := make(chan string, 1) - - // Read from the pipe in a goroutine - go func() { - var buf bytes.Buffer - io.Copy(&buf, r) - outputChan <- buf.String() - }() - - // Execute the function - fn() - - // Close the write end and restore stderr - w.Close() - os.Stderr = oldStderr - - // Get the captured output - output := <-outputChan - r.Close() - - return output -} - -func TestMoveCursorUp(t *testing.T) { - tests := []struct { - name string - lines int - }{ - { - name: "move up 1 line", - lines: 1, - }, - { - name: "move up 5 lines", - lines: 5, - }, - { - name: "move up 0 lines", - lines: 0, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := captureStderr(t, func() { - MoveCursorUp(tt.lines) - }) - - // In non-TTY environments, output should be empty - // We just ensure no panic occurs - assert.NotNil(t, output, "MoveCursorUp should not panic") - }) - } -} - -func TestMoveCursorDown(t *testing.T) { - tests := []struct { - name string - lines int - }{ - { - name: "move down 1 line", - lines: 1, - }, - { - name: "move down 5 lines", - lines: 5, - }, - { - name: "move down 0 lines", - lines: 0, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := captureStderr(t, func() { - MoveCursorDown(tt.lines) - }) - - // In non-TTY environments, output should be empty - // We just ensure no panic occurs - assert.NotNil(t, output, "MoveCursorDown should not panic") - }) - } -} - -func TestTerminalCursorFunctionsNoTTY(t *testing.T) { - // This test verifies that in non-TTY environments (like CI/tests), - // no ANSI codes are emitted for cursor movement functions - - tests := []struct { - name string - fn func() - }{ - { - name: "MoveCursorUp", - fn: func() { MoveCursorUp(5) }, - }, - { - name: "MoveCursorDown", - fn: func() { MoveCursorDown(3) }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - output := captureStderr(t, tt.fn) - - // Since tests typically run in non-TTY, verify output is empty - // This ensures we properly respect TTY detection - if os.Getenv("CI") != "" || !isRealTerminal() { - assert.Empty(t, output, "%s should not output ANSI codes in non-TTY", tt.name) - } - }) - } -} - -// isRealTerminal checks if we're actually running in a terminal -// This is a helper to distinguish between test environments and real terminals -func isRealTerminal() bool { - // In test environments, stderr is typically redirected - fileInfo, err := os.Stderr.Stat() - if err != nil { - return false - } - // Check if stderr is a character device (terminal) - return (fileInfo.Mode() & os.ModeCharDevice) != 0 -} - -func TestTerminalCursorFunctionsDoNotPanic(t *testing.T) { - // Ensure all cursor movement functions can be called safely without panicking - // even in edge cases - - t.Run("all cursor functions", func(t *testing.T) { - assert.NotPanics(t, func() { - MoveCursorUp(0) - MoveCursorUp(100) - MoveCursorDown(0) - MoveCursorDown(100) - }, "Cursor movement functions should never panic") - }) -} diff --git a/pkg/constants/constants.go b/pkg/constants/constants.go index fd773518cb..2d3f9fe693 100644 --- a/pkg/constants/constants.go +++ b/pkg/constants/constants.go @@ -1,7 +1,6 @@ package constants import ( - "fmt" "path/filepath" "time" ) @@ -34,16 +33,6 @@ const CLIExtensionPrefix CommandPrefix = "gh aw" // } type LineLength int -// String returns the string representation of the line length -func (l LineLength) String() string { - return fmt.Sprintf("%d", l) -} - -// IsValid returns true if the line length is positive -func (l LineLength) IsValid() bool { - return l > 0 -} - // Version represents a software version string. // This semantic type distinguishes version strings from arbitrary strings, // enabling future validation logic (e.g., semver parsing) and making @@ -75,16 +64,6 @@ func (v Version) IsValid() bool { // func IsFeatureEnabled(flag FeatureFlag) bool { ... } type FeatureFlag string -// String returns the string representation of the feature flag -func (f FeatureFlag) String() string { - return string(f) -} - -// IsValid returns true if the feature flag is non-empty -func (f FeatureFlag) IsValid() bool { - return len(f) > 0 -} - // URL represents a URL string. // This semantic type distinguishes URLs from arbitrary strings, // making URL parameters explicit and enabling future validation logic. @@ -95,16 +74,6 @@ func (f FeatureFlag) IsValid() bool { // func FetchFromRegistry(url URL) error { ... } type URL string -// String returns the string representation of the URL -func (u URL) String() string { - return string(u) -} - -// IsValid returns true if the URL is non-empty -func (u URL) IsValid() bool { - return len(u) > 0 -} - // ModelName represents an AI model name identifier. // This semantic type distinguishes model names from arbitrary strings, // making model selection explicit in function signatures. @@ -115,16 +84,6 @@ func (u URL) IsValid() bool { // func ExecuteWithModel(model ModelName) error { ... } type ModelName string -// String returns the string representation of the model name -func (m ModelName) String() string { - return string(m) -} - -// IsValid returns true if the model name is non-empty -func (m ModelName) IsValid() bool { - return len(m) > 0 -} - // JobName represents a GitHub Actions job identifier. // This semantic type distinguishes job names from arbitrary strings, // preventing mixing of job identifiers with other string types. @@ -195,16 +154,6 @@ func (c CommandPrefix) IsValid() bool { // func CompileWorkflow(id WorkflowID) error { ... } type WorkflowID string -// String returns the string representation of the workflow ID -func (w WorkflowID) String() string { - return string(w) -} - -// IsValid returns true if the workflow ID is non-empty -func (w WorkflowID) IsValid() bool { - return len(w) > 0 -} - // EngineName represents an AI engine name identifier (copilot, claude, codex, custom). // This semantic type distinguishes engine names from arbitrary strings, // making engine selection explicit and type-safe. @@ -215,16 +164,6 @@ func (w WorkflowID) IsValid() bool { // func SetEngine(engine EngineName) error { ... } type EngineName string -// String returns the string representation of the engine name -func (e EngineName) String() string { - return string(e) -} - -// IsValid returns true if the engine name is non-empty -func (e EngineName) IsValid() bool { - return len(e) > 0 -} - // DocURL represents a documentation URL for error messages and help text. // This semantic type distinguishes documentation URLs from arbitrary URLs, // making documentation references explicit and centralized for easier maintenance. @@ -666,11 +605,6 @@ func (m MCPServerID) String() string { return string(m) } -// IsValid returns true if the MCP server ID is non-empty -func (m MCPServerID) IsValid() bool { - return len(m) > 0 -} - // SafeOutputsMCPServerID is the identifier for the safe-outputs MCP server const SafeOutputsMCPServerID MCPServerID = "safeoutputs" diff --git a/pkg/constants/constants_test.go b/pkg/constants/constants_test.go index 10ef06fabc..0d35d74e26 100644 --- a/pkg/constants/constants_test.go +++ b/pkg/constants/constants_test.go @@ -456,14 +456,6 @@ func TestSemanticTypeAliases(t *testing.T) { if string(testWorkflow) != "ci-doctor" { t.Errorf("WorkflowID conversion failed: got %q, want %q", testWorkflow, "ci-doctor") } - - // Test that WorkflowID can hold typical workflow identifiers - workflows := []WorkflowID{"ci-doctor", "deploy-prod", "test-workflow"} - for i, wf := range workflows { - if !wf.IsValid() { - t.Errorf("WorkflowID[%d] should be valid: %q", i, wf) - } - } }) // Test EngineName type @@ -528,26 +520,6 @@ func TestTypeSafetyBetweenSemanticTypes(t *testing.T) { // TestHelperMethods tests the helper methods on semantic types func TestHelperMethods(t *testing.T) { - t.Run("LineLength", func(t *testing.T) { - length := LineLength(120) - if length.String() != "120" { - t.Errorf("LineLength.String() = %q, want %q", length.String(), "120") - } - if !length.IsValid() { - t.Error("LineLength.IsValid() = false, want true for positive value") - } - - invalidLength := LineLength(0) - if invalidLength.IsValid() { - t.Error("LineLength.IsValid() = true, want false for zero value") - } - - negativeLength := LineLength(-1) - if negativeLength.IsValid() { - t.Error("LineLength.IsValid() = true, want false for negative value") - } - }) - t.Run("Version", func(t *testing.T) { version := Version("1.0.0") if version.String() != "1.0.0" { @@ -563,51 +535,6 @@ func TestHelperMethods(t *testing.T) { } }) - t.Run("FeatureFlag", func(t *testing.T) { - flag := FeatureFlag("test-flag") - if flag.String() != "test-flag" { - t.Errorf("FeatureFlag.String() = %q, want %q", flag.String(), "test-flag") - } - if !flag.IsValid() { - t.Error("FeatureFlag.IsValid() = false, want true for non-empty value") - } - - emptyFlag := FeatureFlag("") - if emptyFlag.IsValid() { - t.Error("FeatureFlag.IsValid() = true, want false for empty value") - } - }) - - t.Run("URL", func(t *testing.T) { - url := URL("https://example.com") - if url.String() != "https://example.com" { - t.Errorf("URL.String() = %q, want %q", url.String(), "https://example.com") - } - if !url.IsValid() { - t.Error("URL.IsValid() = false, want true for non-empty value") - } - - emptyURL := URL("") - if emptyURL.IsValid() { - t.Error("URL.IsValid() = true, want false for empty value") - } - }) - - t.Run("ModelName", func(t *testing.T) { - model := ModelName("gpt-5-mini") - if model.String() != "gpt-5-mini" { - t.Errorf("ModelName.String() = %q, want %q", model.String(), "gpt-5-mini") - } - if !model.IsValid() { - t.Error("ModelName.IsValid() = false, want true for non-empty value") - } - - emptyModel := ModelName("") - if emptyModel.IsValid() { - t.Error("ModelName.IsValid() = true, want false for empty value") - } - }) - t.Run("JobName", func(t *testing.T) { job := JobName("agent") if job.String() != "agent" { @@ -653,35 +580,6 @@ func TestHelperMethods(t *testing.T) { } }) - t.Run("WorkflowID", func(t *testing.T) { - workflow := WorkflowID("ci-doctor") - if workflow.String() != "ci-doctor" { - t.Errorf("WorkflowID.String() = %q, want %q", workflow.String(), "ci-doctor") - } - if !workflow.IsValid() { - t.Error("WorkflowID.IsValid() = false, want true for non-empty value") - } - - emptyWorkflow := WorkflowID("") - if emptyWorkflow.IsValid() { - t.Error("WorkflowID.IsValid() = true, want false for empty value") - } - }) - - t.Run("EngineName", func(t *testing.T) { - engine := EngineName("copilot") - if engine.String() != "copilot" { - t.Errorf("EngineName.String() = %q, want %q", engine.String(), "copilot") - } - if !engine.IsValid() { - t.Error("EngineName.IsValid() = false, want true for non-empty value") - } - - emptyEngine := EngineName("") - if emptyEngine.IsValid() { - t.Error("EngineName.IsValid() = true, want false for empty value") - } - }) } func TestGetAllEngineSecretNames(t *testing.T) { diff --git a/pkg/fileutil/fileutil.go b/pkg/fileutil/fileutil.go index 8b0eba2ce7..505a796972 100644 --- a/pkg/fileutil/fileutil.go +++ b/pkg/fileutil/fileutil.go @@ -101,22 +101,3 @@ func CopyFile(src, dst string) error { log.Printf("File copied successfully: src=%s, dst=%s", src, dst) return out.Sync() } - -// CalculateDirectorySize recursively calculates the total size of files in a directory. -func CalculateDirectorySize(dirPath string) int64 { - log.Printf("Calculating directory size: %s", dirPath) - var totalSize int64 - - _ = filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil - } - if !info.IsDir() { - totalSize += info.Size() - } - return nil - }) - - log.Printf("Directory size: path=%s, size=%d bytes", dirPath, totalSize) - return totalSize -} diff --git a/pkg/logger/slog_adapter.go b/pkg/logger/slog_adapter.go index dc240e2c32..0727cdae10 100644 --- a/pkg/logger/slog_adapter.go +++ b/pkg/logger/slog_adapter.go @@ -3,7 +3,6 @@ package logger import ( "context" "fmt" - "io" "log/slog" "strings" ) @@ -91,21 +90,8 @@ func formatSlogValue(v any) string { return slog.AnyValue(v).String() } -// NewSlogLogger creates a new slog.Logger that uses gh-aw's logger package -// This allows integration with libraries that expect slog.Logger -func NewSlogLogger(namespace string) *slog.Logger { - logger := New(namespace) - handler := NewSlogHandler(logger) - return slog.New(handler) -} - // NewSlogLoggerWithHandler creates a new slog.Logger using an existing Logger instance func NewSlogLoggerWithHandler(logger *Logger) *slog.Logger { handler := NewSlogHandler(logger) return slog.New(handler) } - -// Discard returns a slog.Logger that discards all output -func Discard() *slog.Logger { - return slog.New(slog.NewTextHandler(io.Discard, nil)) -} diff --git a/pkg/logger/slog_adapter_test.go b/pkg/logger/slog_adapter_test.go index f4cf61e5a4..59e35aae6f 100644 --- a/pkg/logger/slog_adapter_test.go +++ b/pkg/logger/slog_adapter_test.go @@ -10,91 +10,6 @@ import ( "testing" ) -func TestSlogAdapter(t *testing.T) { - // Only run if DEBUG is enabled - if os.Getenv("DEBUG") == "" { - t.Skip("Skipping test: DEBUG environment variable not set") - } - - // Capture stderr output - oldStderr := os.Stderr - r, w, _ := os.Pipe() - os.Stderr = w - - // Create slog logger using our adapter - slogLogger := NewSlogLogger("test:slog") - - // Test different log levels - slogLogger.Info("info message", "key", "value") - slogLogger.Debug("debug message", "count", 42) - slogLogger.Warn("warning message") - slogLogger.Error("error message", "error", "something went wrong") - - // Close write end and read output - w.Close() - var buf bytes.Buffer - io.Copy(&buf, r) - output := buf.String() - - // Restore stderr - os.Stderr = oldStderr - - // Verify output contains expected messages - if !strings.Contains(output, "[INFO] info message") { - t.Errorf("Expected info message in output, got: %s", output) - } - if !strings.Contains(output, "[DEBUG] debug message") { - t.Errorf("Expected debug message in output, got: %s", output) - } - if !strings.Contains(output, "[WARN] warning message") { - t.Errorf("Expected warn message in output, got: %s", output) - } - if !strings.Contains(output, "[ERROR] error message") { - t.Errorf("Expected error message in output, got: %s", output) - } - - // Verify attributes are included - if !strings.Contains(output, "key=value") { - t.Errorf("Expected 'key=value' in output, got: %s", output) - } - if !strings.Contains(output, "count=42") { - t.Errorf("Expected 'count=42' in output, got: %s", output) - } -} - -func TestSlogAdapterDisabled(t *testing.T) { - // Only run if DEBUG is not set - if os.Getenv("DEBUG") != "" { - t.Skip("Skipping test: DEBUG environment variable is set") - } - - // Capture stderr output - oldStderr := os.Stderr - r, w, _ := os.Pipe() - os.Stderr = w - - // Create slog logger using our adapter - slogLogger := NewSlogLogger("test:slog") - - // Test logging (should be disabled) - slogLogger.Info("info message", "key", "value") - slogLogger.Debug("debug message") - - // Close write end and read output - w.Close() - var buf bytes.Buffer - io.Copy(&buf, r) - output := buf.String() - - // Restore stderr - os.Stderr = oldStderr - - // Verify no output - if output != "" { - t.Errorf("Expected no output when logger is disabled, got: %s", output) - } -} - func TestNewSlogLoggerWithHandler(t *testing.T) { // Only run if DEBUG is enabled if os.Getenv("DEBUG") == "" { diff --git a/pkg/parser/frontmatter_benchmark_test.go b/pkg/parser/frontmatter_benchmark_test.go index 42bdbbb9e6..021b7cfe9c 100644 --- a/pkg/parser/frontmatter_benchmark_test.go +++ b/pkg/parser/frontmatter_benchmark_test.go @@ -170,93 +170,5 @@ Workflow demonstrating array handling in frontmatter. } // BenchmarkValidateSchema benchmarks schema validation -func BenchmarkValidateSchema(b *testing.B) { - frontmatter := map[string]any{ - "on": "push", - "permissions": map[string]any{ - "contents": "read", - "issues": "write", - "pull-requests": "read", - }, - "engine": "claude", - "tools": map[string]any{ - "github": map[string]any{ - "allowed": []any{"issue_read", "add_issue_comment"}, - }, - "bash": []any{"echo", "ls"}, - }, - "timeout-minutes": 10, - } - - for b.Loop() { - _ = ValidateMainWorkflowFrontmatterWithSchema(frontmatter) - } -} // BenchmarkValidateSchema_Complex benchmarks schema validation with complex data -func BenchmarkValidateSchema_Complex(b *testing.B) { - frontmatter := map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "types": []any{"opened", "synchronize", "reopened"}, - "forks": []any{"org/*", "user/repo"}, - }, - }, - "permissions": map[string]any{ - "contents": "read", - "issues": "write", - "pull-requests": "write", - "actions": "read", - }, - "engine": map[string]any{ - "id": "copilot", - "max-turns": 5, - "max-concurrency": 3, - "model": "gpt-5", - }, - "mcp-servers": map[string]any{ - "github": map[string]any{ - "mode": "remote", - "toolsets": []any{"default", "actions", "discussions"}, - "read-only": false, - }, - "playwright": map[string]any{ - "container": "mcr.microsoft.com/playwright:v1.41.0", - "allowed-domains": []any{"github.com", "*.github.io"}, - }, - }, - "network": map[string]any{ - "allowed": []any{"defaults", "python", "node"}, - "firewall": map[string]any{ - "version": "v1.0.0", - "log-level": "debug", - }, - }, - "tools": map[string]any{ - "edit": true, - "web-fetch": true, - "web-search": true, - "bash": []any{"git status", "git diff", "npm test"}, - }, - "safe-outputs": map[string]any{ - "create-pull-requests": map[string]any{ - "title-prefix": "[ai] ", - "labels": []any{"automation", "ai-generated"}, - "draft": true, - }, - "add-comments": map[string]any{ - "max": 3, - "target": "*", - }, - }, - "timeout-minutes": 30, - "concurrency": map[string]any{ - "group": "workflow-123", - "cancel-in-progress": true, - }, - } - - for b.Loop() { - _ = ValidateMainWorkflowFrontmatterWithSchema(frontmatter) - } -} diff --git a/pkg/parser/frontmatter_includes_test.go b/pkg/parser/frontmatter_includes_test.go index e6503fc90f..4ec353b3e0 100644 --- a/pkg/parser/frontmatter_includes_test.go +++ b/pkg/parser/frontmatter_includes_test.go @@ -7,580 +7,9 @@ import ( "path/filepath" "strings" "testing" - - "github.com/github/gh-aw/pkg/constants" ) -func TestProcessIncludes(t *testing.T) { - // Create temporary test files - tempDir, err := os.MkdirTemp("", "test_includes") - if err != nil { - t.Fatalf("Failed to create temp dir: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create test file with markdown content - testFile := filepath.Join(tempDir, "test.md") - testContent := `--- -tools: - bash: - allowed: ["ls", "cat"] ---- - -# Test Content -This is a test file content. -` - if err := os.WriteFile(testFile, []byte(testContent), 0644); err != nil { - t.Fatalf("Failed to write test file: %v", err) - } - - // Create test file with extra newlines for trimming test - testFileWithNewlines := filepath.Join(tempDir, "test-newlines.md") - testContentWithNewlines := ` - -# Content with Extra Newlines -Some content here. - - -` - if err := os.WriteFile(testFileWithNewlines, []byte(testContentWithNewlines), 0644); err != nil { - t.Fatalf("Failed to write test file with newlines: %v", err) - } - - tests := []struct { - name string - content string - baseDir string - extractTools bool - expected string - wantErr bool - }{ - { - name: "no includes", - content: "# Title\nRegular content", - baseDir: tempDir, - extractTools: false, - expected: "# Title\nRegular content\n", - }, - { - name: "simple include", - content: "@include test.md\n# After include", - baseDir: tempDir, - extractTools: false, - expected: "# Test Content\nThis is a test file content.\n# After include\n", - }, - { - name: "extract tools", - content: "@include test.md", - baseDir: tempDir, - extractTools: true, - expected: `{"bash":{"allowed":["ls","cat"]}}` + "\n", - }, - { - name: "file not found", - content: "@include nonexistent.md", - baseDir: tempDir, - extractTools: false, - wantErr: true, // Now expects error instead of embedding comment - }, - { - name: "include file with extra newlines", - content: "@include test-newlines.md\n# After include", - baseDir: tempDir, - extractTools: false, - expected: "# Content with Extra Newlines\nSome content here.\n# After include\n", - }, - { - name: "simple import (alias for include)", - content: "@import test.md\n# After import", - baseDir: tempDir, - extractTools: false, - expected: "# Test Content\nThis is a test file content.\n# After import\n", - }, - { - name: "extract tools with import", - content: "@import test.md", - baseDir: tempDir, - extractTools: true, - expected: `{"bash":{"allowed":["ls","cat"]}}` + "\n", - }, - { - name: "import file not found", - content: "@import nonexistent.md", - baseDir: tempDir, - extractTools: false, - wantErr: true, - }, - { - name: "optional import missing file", - content: "@import? missing.md\n", - baseDir: tempDir, - extractTools: false, - expected: "", - }, - } - - // Create test file with invalid frontmatter for testing validation - invalidFile := filepath.Join(tempDir, "invalid.md") - invalidContent := `--- -title: Invalid File -on: push -tools: - bash: - allowed: ["ls"] ---- - -# Invalid Content -This file has invalid frontmatter for an included file. -` - if err := os.WriteFile(invalidFile, []byte(invalidContent), 0644); err != nil { - t.Fatalf("Failed to write invalid test file: %v", err) - } - - // Add test case for invalid frontmatter in included file (should now pass with warnings for non-workflow files) - tests = append(tests, struct { - name string - content string - baseDir string - extractTools bool - expected string - wantErr bool - }{ - name: "invalid frontmatter in included file", - content: "@include invalid.md", - baseDir: tempDir, - extractTools: false, - expected: "# Invalid Content\nThis file has invalid frontmatter for an included file.\n", - wantErr: false, - }) - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := ProcessIncludes(tt.content, tt.baseDir, tt.extractTools) - - if tt.wantErr && err == nil { - t.Errorf("ProcessIncludes() expected error, got nil") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("ProcessIncludes() error = %v", err) - return - } - - // Special handling for the invalid frontmatter test case - it should now pass with warnings - if tt.name == "invalid frontmatter in included file" { - // Check that the content was successfully included - if !strings.Contains(result, "# Invalid Content") { - t.Errorf("ProcessIncludes() = %q, expected to contain '# Invalid Content'", result) - } - return - } - - if result != tt.expected { - t.Errorf("ProcessIncludes() = %q, want %q", result, tt.expected) - } - }) - } -} - -func TestProcessIncludesConditionalValidation(t *testing.T) { - // Create temporary directory structure - tempDir, err := os.MkdirTemp("", "test_conditional_validation") - if err != nil { - t.Fatalf("Failed to create temp dir: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create .github/workflows directory structure - workflowsDir := filepath.Join(tempDir, constants.GetWorkflowDir()) - if err := os.MkdirAll(workflowsDir, 0755); err != nil { - t.Fatalf("Failed to create workflows dir: %v", err) - } - - // Create docs directory for non-workflow files - docsDir := filepath.Join(tempDir, "docs") - if err := os.MkdirAll(docsDir, 0755); err != nil { - t.Fatalf("Failed to create docs dir: %v", err) - } - - // Test file 1: Valid workflow file (should pass strict validation) - validWorkflowFile := filepath.Join(workflowsDir, "valid.md") - validWorkflowContent := `--- -tools: - github: - allowed: [issue_read] ---- - -# Valid Workflow -This is a valid workflow file.` - if err := os.WriteFile(validWorkflowFile, []byte(validWorkflowContent), 0644); err != nil { - t.Fatalf("Failed to write valid workflow file: %v", err) - } - - // Test file 2: Invalid workflow file (should fail strict validation) - invalidWorkflowFile := filepath.Join(workflowsDir, "invalid.md") - invalidWorkflowContent := `--- -title: Invalid Field -on: push -tools: - github: - allowed: [issue_read] ---- - -# Invalid Workflow -This has invalid frontmatter fields.` - if err := os.WriteFile(invalidWorkflowFile, []byte(invalidWorkflowContent), 0644); err != nil { - t.Fatalf("Failed to write invalid workflow file: %v", err) - } - - // Test file 2.5: Invalid non-workflow file (should pass with warnings) - invalidNonWorkflowFile := filepath.Join(docsDir, "invalid-external.md") - invalidNonWorkflowContent := `--- -title: Invalid Field -on: push -tools: - github: - allowed: [issue_read] ---- - -# Invalid External File -This has invalid frontmatter fields but it's outside workflows dir.` - if err := os.WriteFile(invalidNonWorkflowFile, []byte(invalidNonWorkflowContent), 0644); err != nil { - t.Fatalf("Failed to write invalid non-workflow file: %v", err) - } - - // Test file 3: Agent instructions file (should pass with warnings) - agentFile := filepath.Join(docsDir, "agent-instructions.md") - agentContent := `--- -description: Agent instructions -applyTo: "**/*.py" -temperature: 0.7 -tools: - github: - allowed: [issue_read] ---- - -# Agent Instructions -These are instructions for AI agents.` - if err := os.WriteFile(agentFile, []byte(agentContent), 0644); err != nil { - t.Fatalf("Failed to write agent file: %v", err) - } - - // Test file 4: Plain markdown file (no frontmatter) - plainFile := filepath.Join(docsDir, "plain.md") - plainContent := `# Plain Markdown -This is just plain markdown content with no frontmatter.` - if err := os.WriteFile(plainFile, []byte(plainContent), 0644); err != nil { - t.Fatalf("Failed to write plain file: %v", err) - } - - tests := []struct { - name string - content string - baseDir string - extractTools bool - wantErr bool - checkContent string - }{ - { - name: "valid workflow file inclusion", - content: "@include .github/workflows/valid.md", - baseDir: tempDir, - extractTools: false, - wantErr: false, - checkContent: "# Valid Workflow", - }, - { - name: "invalid workflow file inclusion should fail", - content: "@include .github/workflows/invalid.md", - baseDir: tempDir, - extractTools: false, - wantErr: true, // Now expects error instead of embedding comment - }, - { - name: "invalid non-workflow file inclusion should succeed with warnings", - content: "@include docs/invalid-external.md", - baseDir: tempDir, - extractTools: false, - wantErr: false, - checkContent: "# Invalid External File", - }, - { - name: "agent instructions file inclusion should succeed", - content: "@include docs/agent-instructions.md", - baseDir: tempDir, - extractTools: false, - wantErr: false, - checkContent: "# Agent Instructions", - }, - { - name: "plain markdown file inclusion should succeed", - content: "@include docs/plain.md", - baseDir: tempDir, - extractTools: false, - wantErr: false, - checkContent: "# Plain Markdown", - }, - { - name: "extract tools from valid workflow file", - content: "@include .github/workflows/valid.md", - baseDir: tempDir, - extractTools: true, - wantErr: false, - checkContent: `{"github":{"allowed":["issue_read"]}}`, - }, - { - name: "extract tools from agent file", - content: "@include docs/agent-instructions.md", - baseDir: tempDir, - extractTools: true, - wantErr: false, - checkContent: `{"github":{"allowed":["issue_read"]}}`, - }, - { - name: "extract tools from plain file (no tools)", - content: "@include docs/plain.md", - baseDir: tempDir, - extractTools: true, - wantErr: false, - checkContent: `{}`, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := ProcessIncludes(tt.content, tt.baseDir, tt.extractTools) - - if tt.wantErr && err == nil { - t.Errorf("ProcessIncludes() expected error, got nil") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("ProcessIncludes() error = %v", err) - return - } - - if !tt.wantErr && tt.checkContent != "" { - if !strings.Contains(result, tt.checkContent) { - t.Errorf("ProcessIncludes() result = %q, expected to contain %q", result, tt.checkContent) - } - } - }) - } -} - -func TestExpandIncludes(t *testing.T) { - // Create temporary test files - tempDir, err := os.MkdirTemp("", "test_expand") - if err != nil { - t.Fatalf("Failed to create temp dir: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create go.mod to make it project root for component resolution - goModFile := filepath.Join(tempDir, "go.mod") - if err := os.WriteFile(goModFile, []byte("module test"), 0644); err != nil { - t.Fatalf("Failed to write go.mod: %v", err) - } - - // Create test file - testFile := filepath.Join(tempDir, "test.md") - testContent := `--- -tools: - bash: - allowed: ["ls"] ---- - -# Test Content -This is test content. -` - if err := os.WriteFile(testFile, []byte(testContent), 0644); err != nil { - t.Fatalf("Failed to write test file: %v", err) - } - - tests := []struct { - name string - content string - baseDir string - extractTools bool - wantContains string - wantErr bool - }{ - { - name: "expand markdown content", - content: "# Start\n@include test.md\n# End", - baseDir: tempDir, - extractTools: false, - wantContains: "# Test Content\nThis is test content.", - }, - { - name: "expand tools", - content: "@include test.md", - baseDir: tempDir, - extractTools: true, - wantContains: `"bash"`, - }, - { - name: "expand markdown content with import", - content: "# Start\n@import test.md\n# End", - baseDir: tempDir, - extractTools: false, - wantContains: "# Test Content\nThis is test content.", - }, - { - name: "expand tools with import", - content: "@import test.md", - baseDir: tempDir, - extractTools: true, - wantContains: `"bash"`, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := ExpandIncludes(tt.content, tt.baseDir, tt.extractTools) - - if tt.wantErr { - if err == nil { - t.Errorf("ExpandIncludes() expected error, got nil") - } - return - } - - if err != nil { - t.Errorf("ExpandIncludes() error = %v", err) - return - } - - if !strings.Contains(result, tt.wantContains) { - t.Errorf("ExpandIncludes() = %q, want to contain %q", result, tt.wantContains) - } - }) - } -} - // Test ExtractWorkflowNameFromMarkdown function -func TestProcessIncludesOptional(t *testing.T) { - // Create temporary directory structure - tempDir, err := os.MkdirTemp("", "test_optional_includes") - if err != nil { - t.Fatalf("Failed to create temp dir: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create an existing include file - existingFile := filepath.Join(tempDir, "existing.md") - existingContent := "# Existing Include\nThis file exists." - if err := os.WriteFile(existingFile, []byte(existingContent), 0644); err != nil { - t.Fatalf("Failed to write existing file: %v", err) - } - - tests := []struct { - name string - content string - extractTools bool - expectedOutput string - expectError bool - }{ - { - name: "regular include existing file", - content: "@include existing.md\n", - extractTools: false, - expectedOutput: existingContent, - }, - { - name: "regular include missing file", - content: "@include missing.md\n", - extractTools: false, - expectError: true, // Now expects error instead of embedding comment - }, - { - name: "optional include existing file", - content: "@include? existing.md\n", - extractTools: false, - expectedOutput: existingContent, - }, - { - name: "optional include missing file", - content: "@include? missing.md\n", - extractTools: false, - expectedOutput: "", // No content added, friendly message goes to stdout - }, - { - name: "optional include missing file extract tools", - content: "@include? missing.md\n", - extractTools: true, - expectedOutput: "", - }, - { - name: "regular include missing file extract tools", - content: "@include missing.md\n", - extractTools: true, - expectError: true, // Now expects error instead of returning {} - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := ProcessIncludes(tt.content, tempDir, tt.extractTools) - - if tt.expectError { - if err == nil { - t.Errorf("ProcessIncludes expected error, got nil") - } - return - } - - if err != nil { - t.Errorf("ProcessIncludes unexpected error: %v", err) - return - } - - if !strings.Contains(result, tt.expectedOutput) { - t.Errorf("ProcessIncludes output = %q, expected to contain %q", result, tt.expectedOutput) - } - }) - } -} - -func TestProcessIncludesWithCycleDetection(t *testing.T) { - // Create a temporary directory for test files - tempDir, err := os.MkdirTemp("", "test_cycle_detection") - if err != nil { - t.Fatalf("Failed to create temp dir: %v", err) - } - defer os.RemoveAll(tempDir) - - // Create file A that includes file B - fileA := filepath.Join(tempDir, "fileA.md") - if err := os.WriteFile(fileA, []byte("# File A\n@include fileB.md\n"), 0644); err != nil { - t.Fatalf("Failed to write fileA: %v", err) - } - - // Create file B that includes file A (creating a cycle) - fileB := filepath.Join(tempDir, "fileB.md") - if err := os.WriteFile(fileB, []byte("# File B\n@include fileA.md\n"), 0644); err != nil { - t.Fatalf("Failed to write fileB: %v", err) - } - - // Process includes from file A - should not hang due to cycle detection - content := "# Main\n@include fileA.md\n" - result, err := ProcessIncludes(content, tempDir, false) - - if err != nil { - t.Errorf("ProcessIncludes with cycle should not error: %v", err) - } - - // Result should contain content from fileA and fileB, but cycle should be prevented - if !strings.Contains(result, "File A") { - t.Errorf("ProcessIncludes result should contain File A content") - } - if !strings.Contains(result, "File B") { - t.Errorf("ProcessIncludes result should contain File B content") - } -} func TestProcessIncludedFileWithNameAndDescription(t *testing.T) { tempDir := t.TempDir() diff --git a/pkg/parser/frontmatter_syntax_errors_test.go b/pkg/parser/frontmatter_syntax_errors_test.go deleted file mode 100644 index 17d4bf2539..0000000000 --- a/pkg/parser/frontmatter_syntax_errors_test.go +++ /dev/null @@ -1,691 +0,0 @@ -//go:build !integration - -package parser - -import ( - "os" - "path/filepath" - "strings" - "testing" - - "github.com/github/gh-aw/pkg/console" -) - -// TestFrontmatterSyntaxErrors provides extensive test suite for frontmatter syntax errors -func TestFrontmatterSyntaxErrors(t *testing.T) { - tests := []struct { - name string - frontmatterContent string - markdownContent string - expectError bool - expectedMinLine int // Minimum expected line number - expectedMinColumn int // Minimum expected column number - expectedErrorContains string // Substring that should be in error message - description string // Human-readable description of the error scenario - }{ - { - name: "missing_colon_in_mapping", - frontmatterContent: `--- -name: Test Workflow -on push -permissions: read-all ----`, - markdownContent: `# Test Workflow -This is a test workflow.`, - expectError: true, - expectedMinLine: 3, - expectedMinColumn: 1, - expectedErrorContains: "non-map value", - description: "Missing colon in YAML mapping", - }, - { - name: "invalid_indentation", - frontmatterContent: `--- -name: Test Workflow -on: - push: - branches: - - main -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has invalid indentation.`, - expectError: true, - expectedMinLine: 4, - expectedMinColumn: 1, - expectedErrorContains: "non-map value", - description: "Invalid indentation in nested YAML structure", - }, - { - name: "duplicate_keys", - frontmatterContent: `--- -name: Test Workflow -on: push -name: Duplicate Name -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has duplicate keys.`, - expectError: true, - expectedMinLine: 4, - expectedMinColumn: 1, - expectedErrorContains: "duplicate", - description: "Duplicate keys in YAML frontmatter", - }, - { - name: "unclosed_bracket_in_array", - frontmatterContent: `--- -name: Test Workflow -on: - push: - branches: [main, dev -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has unclosed brackets.`, - expectError: true, - expectedMinLine: 5, - expectedMinColumn: 1, - expectedErrorContains: "must be specified", - description: "Unclosed bracket in YAML array", - }, - { - name: "unclosed_brace_in_object", - frontmatterContent: `--- -name: Test Workflow -on: - push: {branches: [main], types: [opened -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has unclosed braces.`, - expectError: true, - expectedMinLine: 4, - expectedMinColumn: 1, - expectedErrorContains: "must be specified", - description: "Unclosed brace in YAML object", - }, - { - name: "invalid_yaml_character", - frontmatterContent: `--- -name: Test Workflow -on: @invalid_character -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has invalid YAML characters.`, - expectError: true, - expectedMinLine: 3, - expectedMinColumn: 1, - expectedErrorContains: "reserved character", - description: "Invalid character that cannot start YAML token", - }, - { - name: "malformed_string_quotes", - frontmatterContent: `--- -name: "Test Workflow -on: push -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has malformed string quotes.`, - expectError: true, - expectedMinLine: 2, - expectedMinColumn: 1, - expectedErrorContains: "could not find end character", - description: "Malformed string quotes in YAML", - }, - { - name: "invalid_boolean_value", - frontmatterContent: `--- -name: Test Workflow -on: push -enabled: yes_please -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has invalid boolean value.`, - expectError: false, // This may not cause a parse error, just invalid data - expectedMinLine: 0, - expectedMinColumn: 0, - expectedErrorContains: "", - description: "Invalid boolean value in YAML (may parse as string)", - }, - { - name: "missing_value_after_colon", - frontmatterContent: `--- -name: Test Workflow -on: -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has missing value after colon.`, - expectError: false, // This actually parses as null value - expectedMinLine: 0, - expectedMinColumn: 0, - expectedErrorContains: "", - description: "Missing value after colon in YAML mapping (parses as null)", - }, - { - name: "invalid_list_structure", - frontmatterContent: `--- -name: Test Workflow -on: - push: - branches: - main - - dev -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has invalid list structure.`, - expectError: false, // This may actually parse successfully - expectedMinLine: 0, - expectedMinColumn: 0, - expectedErrorContains: "", - description: "Invalid list structure mixing plain and dash syntax (may be accepted)", - }, - { - name: "unexpected_end_of_stream", - frontmatterContent: `--- -name: Test Workflow -on: - push: - branches: [ ----`, - markdownContent: `# Test Workflow -This workflow has unexpected end of stream.`, - expectError: true, - expectedMinLine: 5, - expectedMinColumn: 14, - expectedErrorContains: "not found", - description: "Unexpected end of stream in YAML", - }, - { - name: "invalid_escape_sequence", - frontmatterContent: `--- -name: Test Workflow -description: "Invalid escape: \z" -on: push -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has invalid escape sequence.`, - expectError: true, - expectedMinLine: 3, - expectedMinColumn: 26, - expectedErrorContains: "escape", - description: "Invalid escape sequence in YAML string", - }, - { - name: "mixed_tab_and_space_indentation", - frontmatterContent: `--- -name: Test Workflow -on: - push: - branches: - - main -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has mixed tab and space indentation.`, - expectError: true, // goccy actually does catch this error - expectedMinLine: 5, - expectedMinColumn: 1, - expectedErrorContains: "cannot start any token", - description: "Mixed tab and space indentation in YAML", - }, - { - name: "anchor_without_alias", - frontmatterContent: `--- -name: Test Workflow -defaults: &default_settings - timeout: 30 -on: push -job1: *missing_anchor -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has anchor without alias.`, - expectError: true, - expectedMinLine: 6, - expectedMinColumn: 7, - expectedErrorContains: "alias", - description: "Reference to undefined YAML anchor", - }, - { - name: "complex_nested_structure_error", - frontmatterContent: `--- -name: Test Workflow -on: - push: - branches: - - main - paths: - - "src/**" - pull_request: - types: [opened, synchronize - branches: [main] -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has complex nested structure error.`, - expectError: true, - expectedMinLine: 10, - expectedMinColumn: 1, - expectedErrorContains: "must be specified", - description: "Complex nested structure with missing closing bracket", - }, - { - name: "invalid_multiline_string", - frontmatterContent: `--- -name: Test Workflow -description: | - This is a multiline - description that has -invalid_key: value -on: push -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow has invalid multiline string.`, - expectError: false, // This may actually parse successfully with literal block - expectedMinLine: 0, - expectedMinColumn: 0, - expectedErrorContains: "", - description: "Invalid multiline string structure in YAML (may be accepted)", - }, - { - name: "schema_validation_error_unknown_field", - frontmatterContent: `--- -name: Test Workflow -on: push -unknown_field: value -invalid_permissions: write -permissions: read-all ----`, - markdownContent: `# Test Workflow -This workflow may have schema validation errors.`, - expectError: false, // This might not be a YAML syntax error but a schema error - expectedMinLine: 0, - expectedMinColumn: 0, - expectedErrorContains: "", - description: "Schema validation error with unknown fields (may not cause parse error)", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create temporary file for testing - tempDir, err := os.MkdirTemp("", "frontmatter_syntax_test_*") - if err != nil { - t.Fatalf("Failed to create temp directory: %v", err) - } - defer os.RemoveAll(tempDir) - - // Write test file with frontmatter and markdown content - testFile := filepath.Join(tempDir, "test.md") - fullContent := tt.frontmatterContent + "\n\n" + tt.markdownContent - if err := os.WriteFile(testFile, []byte(fullContent), 0644); err != nil { - t.Fatalf("Failed to write test file: %v", err) - } - - // Attempt to parse frontmatter - result, err := ExtractFrontmatterFromContent(fullContent) - - if tt.expectError { - if err == nil { - t.Errorf("Expected error for %s, but parsing succeeded", tt.description) - return - } - - // Extract error location information - line, column, message := ExtractYAMLError(err, 2) // Frontmatter starts at line 2 - - // Verify error location is reasonable - if line > 0 && line < tt.expectedMinLine { - t.Errorf("Expected line >= %d, got %d for %s", tt.expectedMinLine, line, tt.description) - } - - if column > 0 && tt.expectedMinColumn > 0 && column < tt.expectedMinColumn { - t.Errorf("Expected column >= %d, got %d for %s", tt.expectedMinColumn, column, tt.description) - } - - // Verify error message contains expected content - if tt.expectedErrorContains != "" && !strings.Contains(strings.ToLower(message), strings.ToLower(tt.expectedErrorContains)) { - t.Errorf("Expected error message to contain '%s', got '%s' for %s", tt.expectedErrorContains, message, tt.description) - } - - // Log detailed error information for debugging - t.Logf("✓ %s: Line %d, Column %d, Error: %s", tt.description, line, column, message) - - // Verify that console error formatting works - compilerError := console.CompilerError{ - Position: console.ErrorPosition{ - File: "test.md", - Line: line, - Column: column, - }, - Type: "error", - Message: "frontmatter parsing failed: " + message, - } - - formattedError := console.FormatError(compilerError) - if formattedError == "" { - t.Errorf("Console error formatting failed for %s", tt.description) - } - - } else { - if err != nil { - t.Errorf("Unexpected error for %s: %v", tt.description, err) - return - } - - if result == nil { - t.Errorf("Expected successful parsing result for %s", tt.description) - return - } - - t.Logf("✓ %s: Successfully parsed (no syntax error as expected)", tt.description) - } - }) - } -} - -// TestFrontmatterParsingWithRealGoccyErrors tests frontmatter parsing with actual goccy/go-yaml errors -func TestFrontmatterParsingWithRealGoccyErrors(t *testing.T) { - tests := []struct { - name string - yamlContent string - expectPreciseLocation bool - description string - }{ - { - name: "real_mapping_error", - yamlContent: `name: Test -on: push -invalid syntax here -permissions: read`, - expectPreciseLocation: true, - description: "Real mapping syntax error that goccy should catch with precise location", - }, - { - name: "real_indentation_error", - yamlContent: `name: Test -on: - push: - branches: - invalid_indent: here -permissions: read`, - expectPreciseLocation: false, // This may actually parse successfully - description: "Real indentation error that may not cause parse error", - }, - { - name: "real_array_error", - yamlContent: `name: Test -on: - push: - branches: [main, dev, feature/test -permissions: read`, - expectPreciseLocation: true, - description: "Real array syntax error that goccy should catch with precise location", - }, - { - name: "real_string_error", - yamlContent: `name: "Unterminated string -on: push -permissions: read`, - expectPreciseLocation: true, - description: "Real string syntax error that goccy should catch with precise location", - }, - { - name: "real_complex_structure_error", - yamlContent: `name: Test -on: - workflow_dispatch: - inputs: - version: - description: 'Version to deploy' - required: true - default: 'latest' - type: string - environment: - description: 'Environment' - required: true - default: 'staging' - type: choice - options: [staging, production -jobs: - deploy: - runs-on: ubuntu-latest`, - expectPreciseLocation: true, - description: "Real complex structure error that goccy should catch with precise location", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Create full frontmatter content - fullContent := "---\n" + tt.yamlContent + "\n---\n\n# Test\nContent here." - - // Attempt to parse frontmatter - _, err := ExtractFrontmatterFromContent(fullContent) - - if err == nil { - if tt.expectPreciseLocation { - t.Errorf("Expected parsing to fail for %s", tt.description) - return - } else { - t.Logf("✓ %s: Parsed successfully (may not be an error)", tt.description) - return - } - } - - // Extract error location using our goccy parser - line, column, message := ExtractYAMLError(err, 2) // Frontmatter starts at line 2 - - t.Logf("Goccy Error for %s:", tt.description) - t.Logf(" Original Error: %s", err.Error()) - t.Logf(" Parsed Location: Line %d, Column %d", line, column) - t.Logf(" Parsed Message: %s", message) - - if tt.expectPreciseLocation { - // Verify we got a reasonable line and column - if line < 2 { // Should be at least at frontmatter start - t.Errorf("Expected line >= 2, got %d for %s", line, tt.description) - } - - if column <= 0 { - t.Errorf("Expected column > 0, got %d for %s", column, tt.description) - } - - if message == "" { - t.Errorf("Expected non-empty message for %s", tt.description) - } - - // Verify that we're getting goccy's native format, not fallback parsing - if strings.Contains(err.Error(), "[") && strings.Contains(err.Error(), "]") { - t.Logf("✓ Using goccy native [line:column] format for %s", tt.description) - } else { - t.Logf("ℹ Using fallback string parsing for %s", tt.description) - } - } - }) - } -} - -// TestFrontmatterErrorContextExtraction tests that we extract good context for error reporting -func TestFrontmatterErrorContextExtraction(t *testing.T) { - content := `--- -name: Test Workflow -on: - push: - branches: [main, dev - pull_request: - types: [opened] -permissions: read-all -jobs: - test: - runs-on: ubuntu-latest ---- - -# Test Workflow - -This is a test workflow with a syntax error in the frontmatter. -The error is on line 5 where there's an unclosed bracket.` - - result, err := ExtractFrontmatterFromContent(content) - - if err == nil { - t.Fatal("Expected parsing to fail due to syntax error") - } - - // Extract error information - line, column, message := ExtractYAMLError(err, 2) - - if line <= 2 { - t.Errorf("Expected error line > 2, got %d", line) - } - - if column <= 0 { - t.Errorf("Expected error column > 0, got %d", column) - } - - // Verify we have frontmatter lines for context - if result != nil && len(result.FrontmatterLines) > 0 { - t.Logf("✓ Frontmatter context available with %d lines", len(result.FrontmatterLines)) - } else { - t.Log("ℹ No frontmatter context available (expected for parse errors)") - } - - // Create console error format - compilerError := console.CompilerError{ - Position: console.ErrorPosition{ - File: "test.md", - Line: line, - Column: column, - }, - Type: "error", - Message: "frontmatter parsing failed: " + message, - } - - // Test that error formatting works - formattedError := console.FormatError(compilerError) - if formattedError == "" { - t.Error("Error formatting failed") - } else { - t.Logf("✓ Formatted error:\n%s", formattedError) - } - - t.Logf("Error details: Line %d, Column %d, Message: %s", line, column, message) -} - -// TestFrontmatterSyntaxErrorBoundaryConditions tests edge cases and boundary conditions -func TestFrontmatterSyntaxErrorBoundaryConditions(t *testing.T) { - tests := []struct { - name string - content string - expectError bool - description string - }{ - { - name: "minimal_invalid_frontmatter", - content: `--- -: ---- - -# Content`, - expectError: true, - description: "Minimal invalid frontmatter with just a colon", - }, - { - name: "empty_frontmatter_with_error", - content: `--- ---- - -# Content`, - expectError: false, - description: "Empty frontmatter should not cause parse error", - }, - { - name: "very_long_line_with_error", - content: `--- -name: Test -very_long_line_with_error: ` + strings.Repeat("a", 1000) + ` invalid: syntax -permissions: read-all ---- - -# Content`, - expectError: true, - description: "Very long line with syntax error", - }, - { - name: "unicode_content_with_error", - content: `--- -name: "测试工作流 🚀" -description: "这是一个测试" -invalid_syntax_here -on: push -permissions: read-all ---- - -# 测试内容 - -这里是 markdown 内容。`, - expectError: true, - description: "Unicode content with syntax error", - }, - { - name: "deeply_nested_error", - content: `--- -name: Test -jobs: - test: - strategy: - matrix: - os: [ubuntu, windows] - node: [14, 16, 18] - include: - - os: ubuntu - node: 20 - special: true - exclude: - - os: windows - node: 14 - invalid syntax here -permissions: read-all ---- - -# Content`, - expectError: true, - description: "Deeply nested structure with syntax error", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ExtractFrontmatterFromContent(tt.content) - - if tt.expectError { - if err == nil { - t.Errorf("Expected error for %s", tt.description) - return - } - - line, column, message := ExtractYAMLError(err, 2) - t.Logf("✓ %s: Line %d, Column %d, Error: %s", tt.description, line, column, message) - } else { - if err != nil { - t.Errorf("Unexpected error for %s: %v", tt.description, err) - } else { - t.Logf("✓ %s: Parsed successfully as expected", tt.description) - } - } - }) - } -} diff --git a/pkg/parser/import_syntax_test.go b/pkg/parser/import_syntax_test.go index b662d9b256..ae8c60d242 100644 --- a/pkg/parser/import_syntax_test.go +++ b/pkg/parser/import_syntax_test.go @@ -3,11 +3,8 @@ package parser import ( - "os" "strings" "testing" - - "github.com/github/gh-aw/pkg/testutil" ) func TestParseImportDirective(t *testing.T) { @@ -172,83 +169,3 @@ func TestParseImportDirective(t *testing.T) { }) } } - -func TestProcessIncludesWithNewSyntax(t *testing.T) { - // Create temporary test files - tempDir := testutil.TempDir(t, "test-*") - - // Create test file with markdown content - testFile := tempDir + "/test.md" - testContent := `--- -tools: - bash: - allowed: ["ls", "cat"] ---- - -# Test Content -This is a test file content. -` - if err := os.WriteFile(testFile, []byte(testContent), 0644); err != nil { - t.Fatalf("Failed to write test file: %v", err) - } - - tests := []struct { - name string - content string - expected string - wantErr bool - }{ - { - name: "new syntax - basic import with colon", - content: "{{#import: test.md}}\n# After import", - expected: "# Test Content\nThis is a test file content.\n# After import\n", - wantErr: false, - }, - { - name: "new syntax - basic import without colon", - content: "{{#import test.md}}\n# After import", - expected: "# Test Content\nThis is a test file content.\n# After import\n", - wantErr: false, - }, - { - name: "new syntax - optional import with colon (file exists)", - content: "{{#import?: test.md}}\n# After import", - expected: "# Test Content\nThis is a test file content.\n# After import\n", - wantErr: false, - }, - { - name: "new syntax - optional import without colon (file exists)", - content: "{{#import? test.md}}\n# After import", - expected: "# Test Content\nThis is a test file content.\n# After import\n", - wantErr: false, - }, - { - name: "new syntax - optional import (file missing)", - content: "{{#import?: nonexistent.md}}\n# After import", - expected: "# After import\n", - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result, err := ProcessIncludes(tt.content, tempDir, false) - - if tt.wantErr { - if err == nil { - t.Errorf("ProcessIncludes() expected error, got nil") - } - return - } - - if err != nil { - t.Errorf("ProcessIncludes() unexpected error = %v", err) - return - } - - if result != tt.expected { - t.Errorf("ProcessIncludes() result = %q, want %q", result, tt.expected) - } - }) - } -} diff --git a/pkg/parser/include_expander.go b/pkg/parser/include_expander.go index b5455cac9e..a49514d36b 100644 --- a/pkg/parser/include_expander.go +++ b/pkg/parser/include_expander.go @@ -10,10 +10,6 @@ import ( // ExpandIncludes recursively expands @include and @import directives until no more remain // This matches the bash expand_includes function behavior -func ExpandIncludes(content, baseDir string, extractTools bool) (string, error) { - expandedContent, _, err := ExpandIncludesWithManifest(content, baseDir, extractTools) - return expandedContent, err -} // ExpandIncludesWithManifest recursively expands @include and @import directives and returns list of included files func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (string, []string, error) { @@ -120,18 +116,8 @@ func expandIncludesForField(content, baseDir string, extractFunc func(string) (s } // ProcessIncludesForEngines processes import directives to extract engine configurations -func ProcessIncludesForEngines(content, baseDir string) ([]string, string, error) { - return processIncludesForField(content, baseDir, func(c string) (string, error) { - return extractFrontmatterField(c, "engine", "") - }, "") -} // ProcessIncludesForSafeOutputs processes import directives to extract safe-outputs configurations -func ProcessIncludesForSafeOutputs(content, baseDir string) ([]string, string, error) { - return processIncludesForField(content, baseDir, func(c string) (string, error) { - return extractFrontmatterField(c, "safe-outputs", "{}") - }, "{}") -} // processIncludesForField processes import directives to extract a specific frontmatter field func processIncludesForField(content, baseDir string, extractFunc func(string) (string, error), emptyValue string) ([]string, string, error) { diff --git a/pkg/parser/include_processor.go b/pkg/parser/include_processor.go index 012453d728..ee72de5fb6 100644 --- a/pkg/parser/include_processor.go +++ b/pkg/parser/include_processor.go @@ -17,11 +17,6 @@ var includeLog = logger.New("parser:include_processor") // ProcessIncludes processes @include, @import (deprecated), and {{#import: directives in markdown content // This matches the bash process_includes function behavior -func ProcessIncludes(content, baseDir string, extractTools bool) (string, error) { - includeLog.Printf("Processing includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content)) - visited := make(map[string]bool) - return processIncludesWithVisited(content, baseDir, extractTools, visited) -} // processIncludesWithVisited processes import directives with cycle detection func processIncludesWithVisited(content, baseDir string, extractTools bool, visited map[string]bool) (string, error) { diff --git a/pkg/parser/mcp.go b/pkg/parser/mcp.go index d3375207df..c4d038e887 100644 --- a/pkg/parser/mcp.go +++ b/pkg/parser/mcp.go @@ -30,55 +30,6 @@ func IsMCPType(typeStr string) bool { } } -// EnsureLocalhostDomains ensures that localhost and 127.0.0.1 are always included -// in the allowed domains list for Playwright, even when custom domains are specified -// Includes port variations to allow all ports on localhost and 127.0.0.1 -func EnsureLocalhostDomains(domains []string) []string { - hasLocalhost := false - hasLocalhostPorts := false - hasLoopback := false - hasLoopbackPorts := false - - for _, domain := range domains { - switch domain { - case "localhost": - hasLocalhost = true - case "localhost:*": - hasLocalhostPorts = true - case "127.0.0.1": - hasLoopback = true - case "127.0.0.1:*": - hasLoopbackPorts = true - } - } - - // CWE-190: Allocation Size Overflow Prevention - // Instead of pre-calculating capacity (len(domains)+4), which could overflow - // if domains is extremely large, we let Go's append handle capacity growth - // automatically. This is safe and efficient for domain arrays which are - // typically small in practice. - var result []string - - // Always add localhost domains first (with and without port specifications) - if !hasLocalhost { - result = append(result, "localhost") - } - if !hasLocalhostPorts { - result = append(result, "localhost:*") - } - if !hasLoopback { - result = append(result, "127.0.0.1") - } - if !hasLoopbackPorts { - result = append(result, "127.0.0.1:*") - } - - // Add the rest of the domains - result = append(result, domains...) - - return result -} - // MCPServerConfig represents a parsed MCP server configuration. // It embeds BaseMCPServerConfig for common fields and adds parser-specific fields. type MCPServerConfig struct { diff --git a/pkg/parser/mcp_test.go b/pkg/parser/mcp_test.go index 40c73f533f..e0bd82427f 100644 --- a/pkg/parser/mcp_test.go +++ b/pkg/parser/mcp_test.go @@ -13,63 +13,6 @@ import ( ) // TestEnsureLocalhostDomains tests the helper function that ensures localhost domains are always included -func TestEnsureLocalhostDomains(t *testing.T) { - tests := []struct { - name string - input []string - expected []string - }{ - { - name: "Empty input should add all localhost domains with ports", - input: []string{}, - expected: []string{"localhost", "localhost:*", "127.0.0.1", "127.0.0.1:*"}, - }, - { - name: "Custom domains without localhost should add localhost domains with ports", - input: []string{"github.com", "*.github.com"}, - expected: []string{"localhost", "localhost:*", "127.0.0.1", "127.0.0.1:*", "github.com", "*.github.com"}, - }, - { - name: "Input with localhost but no 127.0.0.1 should add missing domains", - input: []string{"localhost", "example.com"}, - expected: []string{"localhost:*", "127.0.0.1", "127.0.0.1:*", "localhost", "example.com"}, - }, - { - name: "Input with 127.0.0.1 but no localhost should add missing domains", - input: []string{"127.0.0.1", "example.com"}, - expected: []string{"localhost", "localhost:*", "127.0.0.1:*", "127.0.0.1", "example.com"}, - }, - { - name: "Input with both localhost domains should add port variants", - input: []string{"localhost", "127.0.0.1", "example.com"}, - expected: []string{"localhost:*", "127.0.0.1:*", "localhost", "127.0.0.1", "example.com"}, - }, - { - name: "Input with both in different order should add port variants", - input: []string{"example.com", "127.0.0.1", "localhost"}, - expected: []string{"localhost:*", "127.0.0.1:*", "example.com", "127.0.0.1", "localhost"}, - }, - { - name: "Input with all localhost variants should remain unchanged", - input: []string{"localhost", "localhost:*", "127.0.0.1", "127.0.0.1:*", "example.com"}, - expected: []string{"localhost", "localhost:*", "127.0.0.1", "127.0.0.1:*", "example.com"}, - }, - { - name: "Input with some localhost variants should add missing ones", - input: []string{"localhost:*", "127.0.0.1", "example.com"}, - expected: []string{"localhost", "127.0.0.1:*", "localhost:*", "127.0.0.1", "example.com"}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := EnsureLocalhostDomains(tt.input) - if !reflect.DeepEqual(result, tt.expected) { - t.Errorf("EnsureLocalhostDomains(%v) = %v, want %v", tt.input, result, tt.expected) - } - }) - } -} func TestExtractMCPConfigurations(t *testing.T) { tests := []struct { diff --git a/pkg/parser/schema_additional_properties_test.go b/pkg/parser/schema_additional_properties_test.go deleted file mode 100644 index c6c1f0dd87..0000000000 --- a/pkg/parser/schema_additional_properties_test.go +++ /dev/null @@ -1,283 +0,0 @@ -//go:build !integration - -package parser - -import ( - "strings" - "testing" -) - -// TestAdditionalPropertiesFalse_CommonTypos tests that common typos in frontmatter -// are properly rejected by the schema validation due to additionalProperties: false -func TestAdditionalPropertiesFalse_CommonTypos(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - typoField string // The typo field name that should be rejected - }{ - { - name: "typo: permisions instead of permissions", //nolint:misspell - frontmatter: map[string]any{ - "on": "push", - "permisions": "write-all", //nolint:misspell // typo: should be "permissions" - }, - typoField: "permisions", //nolint:misspell - }, - { - name: "typo: engnie instead of engine", - frontmatter: map[string]any{ - "on": "push", - "engnie": "claude", // typo: should be "engine" - }, - typoField: "engnie", - }, - { - name: "typo: toolz instead of tools", - frontmatter: map[string]any{ - "on": "push", - "toolz": map[string]any{ // typo: should be "tools" - "github": nil, - }, - }, - typoField: "toolz", - }, - { - name: "typo: timeout_minute instead of timeout_minutes", - frontmatter: map[string]any{ - "on": "push", - "timeout_minute": 10, // typo: should be "timeout_minutes" - }, - typoField: "timeout_minute", - }, - { - name: "typo: runs_on instead of runs-on", - frontmatter: map[string]any{ - "on": "push", - "runs_on": "ubuntu-latest", // typo: should be "runs-on" with dash - }, - typoField: "runs_on", - }, - { - name: "typo: safe_outputs instead of safe-outputs", - frontmatter: map[string]any{ - "on": "push", - "safe_outputs": map[string]any{ // typo: should be "safe-outputs" with dash - "create-issue": nil, - }, - }, - typoField: "safe_outputs", - }, - { - name: "typo: mcp_servers instead of mcp-servers", - frontmatter: map[string]any{ - "on": "push", - "mcp_servers": map[string]any{ // typo: should be "mcp-servers" with dash - "test": map[string]any{ - "command": "test", - }, - }, - }, - typoField: "mcp_servers", - }, - { - name: "multiple typos: permisions, engnie, toolz", //nolint:misspell - frontmatter: map[string]any{ - "on": "push", - "permisions": "write-all", //nolint:misspell // typo - "engnie": "claude", // typo - "toolz": map[string]any{ // typo - "github": nil, - }, - }, - typoField: "permisions", //nolint:misspell // error should mention at least one typo - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if err == nil { - t.Fatalf("Expected validation error for typo field '%s', but validation passed", tt.typoField) - } - - errorMsg := err.Error() - - // The error should mention unknown/additional properties - if !strings.Contains(strings.ToLower(errorMsg), "unknown") && - !strings.Contains(strings.ToLower(errorMsg), "additional") && - !strings.Contains(strings.ToLower(errorMsg), "not allowed") { - t.Errorf("Error message should mention unknown/additional properties, got: %s", errorMsg) - } - - // The error should mention the typo field - if !strings.Contains(errorMsg, tt.typoField) { - t.Errorf("Error message should mention the typo field '%s', got: %s", tt.typoField, errorMsg) - } - }) - } -} - -// TestAdditionalPropertiesFalse_IncludedFileSchema tests that the included file schema -// also rejects unknown properties -func TestAdditionalPropertiesFalse_IncludedFileSchema(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - typoField string - }{ - { - name: "typo in included file: toolz instead of tools", - frontmatter: map[string]any{ - "toolz": map[string]any{ // typo: should be "tools" - "github": nil, - }, - }, - typoField: "toolz", - }, - { - name: "typo in included file: mcp_servers instead of mcp-servers", - frontmatter: map[string]any{ - "mcp_servers": map[string]any{ // typo: should be "mcp-servers" - "test": map[string]any{ - "command": "test", - }, - }, - }, - typoField: "mcp_servers", - }, - { - name: "typo in included file: safe_outputs instead of safe-outputs", - frontmatter: map[string]any{ - "safe_outputs": map[string]any{ // typo: should be "safe-outputs" - "jobs": map[string]any{ - "test": map[string]any{ - "inputs": map[string]any{ - "test": map[string]any{ - "type": "string", - }, - }, - }, - }, - }, - }, - typoField: "safe_outputs", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateIncludedFileFrontmatterWithSchema(tt.frontmatter) - - if err == nil { - t.Fatalf("Expected validation error for typo field '%s', but validation passed", tt.typoField) - } - - errorMsg := err.Error() - - // The error should mention the typo field - if !strings.Contains(errorMsg, tt.typoField) { - t.Errorf("Error message should mention the typo field '%s', got: %s", tt.typoField, errorMsg) - } - }) - } -} - -// TestAdditionalPropertiesFalse_MCPConfigSchema tests that the MCP config schema -// also rejects unknown properties -func TestAdditionalPropertiesFalse_MCPConfigSchema(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - typoField string - }{ - { - name: "typo in MCP config: comand instead of command", - frontmatter: map[string]any{ - "comand": "npx", // typo: should be "command" - }, - typoField: "comand", - }, - { - name: "typo in MCP config: typ instead of type", - frontmatter: map[string]any{ - "typ": "stdio", // typo: should be "type" - "command": "test", - }, - typoField: "typ", - }, - { - name: "typo in MCP config: environement instead of env", - frontmatter: map[string]any{ - "command": "test", - "environement": map[string]any{ // typo: should be "env" - "TEST": "value", - }, - }, - typoField: "environement", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMCPConfigWithSchema(tt.frontmatter, "test-tool") - - if err == nil { - t.Fatalf("Expected validation error for typo field '%s', but validation passed", tt.typoField) - } - - errorMsg := err.Error() - - // The error should mention the typo field - if !strings.Contains(errorMsg, tt.typoField) { - t.Errorf("Error message should mention the typo field '%s', got: %s", tt.typoField, errorMsg) - } - }) - } -} - -// TestValidProperties_NotRejected ensures that valid properties are still accepted -func TestValidProperties_NotRejected(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - }{ - { - name: "valid main workflow with all common fields", - frontmatter: map[string]any{ - "on": "push", - "permissions": "read-all", - "engine": "claude", - "tools": map[string]any{ - "github": nil, - }, - "timeout-minutes": 10, - "runs-on": "ubuntu-latest", - "safe-outputs": map[string]any{ - "create-issue": nil, - }, - "mcp-servers": map[string]any{ - "test": map[string]any{ - "command": "test", - }, - }, - }, - }, - { - name: "valid minimal workflow", - frontmatter: map[string]any{ - "on": "push", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if err != nil { - t.Fatalf("Expected no validation error for valid frontmatter, got: %v", err) - } - }) - } -} diff --git a/pkg/parser/schema_oneof_test.go b/pkg/parser/schema_oneof_test.go deleted file mode 100644 index 0a6ffb1b15..0000000000 --- a/pkg/parser/schema_oneof_test.go +++ /dev/null @@ -1,334 +0,0 @@ -//go:build !integration - -package parser - -import ( - "strings" - "testing" -) - -// TestValidateOneOfConstraints tests the oneOf constraints added to the schema -// to prevent mutually exclusive fields from being specified together. -func TestValidateOneOfConstraints(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - // branches vs branches-ignore in push event - { - name: "invalid: both branches and branches-ignore in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches": []string{"main"}, - "branches-ignore": []string{"dev"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "valid: only branches in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches": []string{"main"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid: only branches-ignore in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches-ignore": []string{"dev"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid: neither branches nor branches-ignore in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "tags": []string{"v*"}, - }, - }, - }, - wantErr: false, - }, - - // paths vs paths-ignore in push event - { - name: "invalid: both paths and paths-ignore in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "paths": []string{"src/**"}, - "paths-ignore": []string{"docs/**"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "valid: only paths in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "paths": []string{"src/**"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid: only paths-ignore in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "paths-ignore": []string{"docs/**"}, - }, - }, - }, - wantErr: false, - }, - - // branches vs branches-ignore in pull_request event - { - name: "invalid: both branches and branches-ignore in pull_request", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "branches": []string{"main"}, - "branches-ignore": []string{"dev"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "valid: only branches in pull_request", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "branches": []string{"main"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid: only branches-ignore in pull_request", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "branches-ignore": []string{"dev"}, - }, - }, - }, - wantErr: false, - }, - - // paths vs paths-ignore in pull_request event - { - name: "invalid: both paths and paths-ignore in pull_request", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "paths": []string{"src/**"}, - "paths-ignore": []string{"docs/**"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "valid: only paths in pull_request", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "paths": []string{"src/**"}, - }, - }, - }, - wantErr: false, - }, - - // branches vs branches-ignore in pull_request_target event - { - name: "invalid: both branches and branches-ignore in pull_request_target", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request_target": map[string]any{ - "branches": []string{"main"}, - "branches-ignore": []string{"dev"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "valid: only branches in pull_request_target", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request_target": map[string]any{ - "branches": []string{"main"}, - }, - }, - }, - wantErr: false, - }, - - // paths vs paths-ignore in pull_request_target event - { - name: "invalid: both paths and paths-ignore in pull_request_target", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request_target": map[string]any{ - "paths": []string{"src/**"}, - "paths-ignore": []string{"docs/**"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - - // branches vs branches-ignore in workflow_run event - { - name: "invalid: both branches and branches-ignore in workflow_run", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_run": map[string]any{ - "workflows": []string{"CI"}, - "branches": []string{"main"}, - "branches-ignore": []string{"dev"}, - }, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "valid: only branches in workflow_run", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_run": map[string]any{ - "workflows": []string{"CI"}, - "branches": []string{"main"}, - }, - }, - }, - wantErr: false, - }, - - // slash_command vs label events - { - name: "invalid: slash_command with label event", - frontmatter: map[string]any{ - "on": map[string]any{ - "slash_command": "mybot", - "label": map[string]any{ - "types": []string{"created"}, - }, - }, - }, - wantErr: true, - errContains: "not", - }, - { - name: "valid: slash_command without label event", - frontmatter: map[string]any{ - "on": map[string]any{ - "slash_command": "mybot", - }, - }, - wantErr: false, - }, - { - name: "valid: label event without slash_command", - frontmatter: map[string]any{ - "on": map[string]any{ - "label": map[string]any{ - "types": []string{"created"}, - }, - }, - }, - wantErr: false, - }, - - // command vs label events (deprecated command field) - { - name: "invalid: command with label event", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "mybot", - "label": map[string]any{ - "types": []string{"created"}, - }, - }, - }, - wantErr: true, - errContains: "not", - }, - - // Valid combinations of branches and paths - { - name: "valid: branches and paths in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches": []string{"main"}, - "paths": []string{"src/**"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid: branches-ignore and paths-ignore in push", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches-ignore": []string{"dev"}, - "paths-ignore": []string{"docs/**"}, - }, - }, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if tt.wantErr && err == nil { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() expected error, got nil") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() error = %v", err) - return - } - - if err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() error = %v, expected to contain %q", err.Error(), tt.errContains) - } - } - }) - } -} diff --git a/pkg/parser/schema_passthrough_validation_test.go b/pkg/parser/schema_passthrough_validation_test.go deleted file mode 100644 index 8c2c0fb889..0000000000 --- a/pkg/parser/schema_passthrough_validation_test.go +++ /dev/null @@ -1,572 +0,0 @@ -//go:build !integration - -package parser - -import ( - "strings" - "testing" -) - -// TestPassThroughFieldValidation tests that pass-through YAML fields -// (concurrency, container, environment, env, secrets, runs-on, services) are -// properly validated by the schema during frontmatter parsing. -// -// These fields are "pass-through" in that they are extracted from -// frontmatter and passed directly to GitHub Actions without modification, -// but they still need basic structure validation to catch obvious errors -// at compile time rather than at GitHub Actions runtime. -func TestPassThroughFieldValidation(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - // Concurrency field tests - { - name: "valid concurrency - simple string", - frontmatter: map[string]any{ - "on": "push", - "concurrency": "my-group", - }, - wantErr: false, - }, - { - name: "valid concurrency - object with group", - frontmatter: map[string]any{ - "on": "push", - "concurrency": map[string]any{ - "group": "my-group", - }, - }, - wantErr: false, - }, - { - name: "valid concurrency - object with group and cancel-in-progress", - frontmatter: map[string]any{ - "on": "push", - "concurrency": map[string]any{ - "group": "my-group", - "cancel-in-progress": true, - }, - }, - wantErr: false, - }, - { - name: "invalid concurrency - array", - frontmatter: map[string]any{ - "on": "push", - "concurrency": []string{"invalid"}, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "invalid concurrency - object missing required group", - frontmatter: map[string]any{ - "on": "push", - "concurrency": map[string]any{ - "cancel-in-progress": true, - }, - }, - wantErr: true, - errContains: "missing property 'group'", - }, - { - name: "invalid concurrency - object with invalid field", - frontmatter: map[string]any{ - "on": "push", - "concurrency": map[string]any{ - "group": "my-group", - "invalid": "field", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid' not allowed", - }, - - // Container field tests - { - name: "valid container - simple string", - frontmatter: map[string]any{ - "on": "push", - "container": "ubuntu:latest", - }, - wantErr: false, - }, - { - name: "valid container - object with image", - frontmatter: map[string]any{ - "on": "push", - "container": map[string]any{ - "image": "ubuntu:latest", - }, - }, - wantErr: false, - }, - { - name: "valid container - object with image and credentials", - frontmatter: map[string]any{ - "on": "push", - "container": map[string]any{ - "image": "ubuntu:latest", - "credentials": map[string]any{ - "username": "user", - "password": "${{ secrets.PASSWORD }}", - }, - }, - }, - wantErr: false, - }, - { - name: "invalid container - array", - frontmatter: map[string]any{ - "on": "push", - "container": []string{"invalid"}, - }, - wantErr: true, - errContains: "got array", - }, - { - name: "invalid container - object missing required image", - frontmatter: map[string]any{ - "on": "push", - "container": map[string]any{ - "env": map[string]string{"TEST": "value"}, - }, - }, - wantErr: true, - errContains: "missing property 'image'", - }, - { - name: "invalid container - object with invalid field", - frontmatter: map[string]any{ - "on": "push", - "container": map[string]any{ - "image": "ubuntu:latest", - "invalid": "field", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid' not allowed", - }, - - // Environment field tests - { - name: "valid environment - simple string", - frontmatter: map[string]any{ - "on": "push", - "environment": "production", - }, - wantErr: false, - }, - { - name: "valid environment - object with name", - frontmatter: map[string]any{ - "on": "push", - "environment": map[string]any{ - "name": "production", - }, - }, - wantErr: false, - }, - { - name: "valid environment - object with name and url", - frontmatter: map[string]any{ - "on": "push", - "environment": map[string]any{ - "name": "production", - "url": "https://prod.example.com", - }, - }, - wantErr: false, - }, - { - name: "invalid environment - array", - frontmatter: map[string]any{ - "on": "push", - "environment": []string{"invalid"}, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "invalid environment - object missing required name", - frontmatter: map[string]any{ - "on": "push", - "environment": map[string]any{ - "url": "https://prod.example.com", - }, - }, - wantErr: true, - errContains: "missing property 'name'", - }, - { - name: "invalid environment - object with invalid field", - frontmatter: map[string]any{ - "on": "push", - "environment": map[string]any{ - "name": "production", - "invalid": "field", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid' not allowed", - }, - - // Env field tests - { - name: "valid env - object with string values", - frontmatter: map[string]any{ - "on": "push", - "env": map[string]any{ - "NODE_ENV": "production", - "API_KEY": "${{ secrets.API_KEY }}", - }, - }, - wantErr: false, - }, - { - name: "valid env - string (pass-through)", - frontmatter: map[string]any{ - "on": "push", - "env": "some-string", - }, - wantErr: false, - }, - { - name: "invalid env - array", - frontmatter: map[string]any{ - "on": "push", - "env": []string{"invalid"}, - }, - wantErr: true, - errContains: "oneOf", - }, - - // Secrets field tests - { - name: "valid secrets - simple string values", - frontmatter: map[string]any{ - "on": "push", - "secrets": map[string]any{ - "API_TOKEN": "${{ secrets.API_TOKEN }}", - "DATABASE_URL": "${{ secrets.DB_URL }}", - }, - }, - wantErr: false, - }, - { - name: "valid secrets - object with value and description", - frontmatter: map[string]any{ - "on": "push", - "secrets": map[string]any{ - "API_TOKEN": map[string]any{ - "value": "${{ secrets.API_TOKEN }}", - "description": "API token for external service", - }, - }, - }, - wantErr: false, - }, - { - name: "valid secrets - mixed simple and object values", - frontmatter: map[string]any{ - "on": "push", - "secrets": map[string]any{ - "API_TOKEN": "${{ secrets.API_TOKEN }}", - "DB_URL": map[string]any{ - "value": "${{ secrets.DB_URL }}", - "description": "Database connection string", - }, - }, - }, - wantErr: false, - }, - { - name: "invalid secrets - object missing required value field", - frontmatter: map[string]any{ - "on": "push", - "secrets": map[string]any{ - "API_TOKEN": map[string]any{ - "description": "Missing value field", - }, - }, - }, - wantErr: true, - errContains: "missing property 'value'", - }, - { - name: "invalid secrets - object with additional properties", - frontmatter: map[string]any{ - "on": "push", - "secrets": map[string]any{ - "API_TOKEN": map[string]any{ - "value": "${{ secrets.API_TOKEN }}", - "invalid": "field", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid' not allowed", - }, - { - name: "invalid secrets - array", - frontmatter: map[string]any{ - "on": "push", - "secrets": []string{"invalid"}, - }, - wantErr: true, - errContains: "got array, want object", - }, - { - name: "invalid secrets - non-string, non-object value", - frontmatter: map[string]any{ - "on": "push", - "secrets": map[string]any{ - "API_TOKEN": 123, - }, - }, - wantErr: true, - errContains: "oneOf", - }, - - // Runs-on field tests - { - name: "valid runs-on - simple string", - frontmatter: map[string]any{ - "on": "push", - "runs-on": "ubuntu-latest", - }, - wantErr: false, - }, - { - name: "valid runs-on - array of strings", - frontmatter: map[string]any{ - "on": "push", - "runs-on": []string{"ubuntu-latest", "self-hosted"}, - }, - wantErr: false, - }, - { - name: "valid runs-on - object with labels", - frontmatter: map[string]any{ - "on": "push", - "runs-on": map[string]any{ - "labels": []string{"ubuntu-latest"}, - }, - }, - wantErr: false, - }, - { - name: "valid runs-on - object with group and labels", - frontmatter: map[string]any{ - "on": "push", - "runs-on": map[string]any{ - "group": "larger-runners", - "labels": []string{"ubuntu-latest-8-cores"}, - }, - }, - wantErr: false, - }, - { - name: "invalid runs-on - number", - frontmatter: map[string]any{ - "on": "push", - "runs-on": 123, - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "invalid runs-on - object with invalid field", - frontmatter: map[string]any{ - "on": "push", - "runs-on": map[string]any{ - "labels": []string{"ubuntu-latest"}, - "invalid": "field", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid' not allowed", - }, - - // Services field tests - { - name: "valid services - object with service names", - frontmatter: map[string]any{ - "on": "push", - "services": map[string]any{ - "postgres": "postgres:14", - }, - }, - wantErr: false, - }, - { - name: "valid services - object with service configuration", - frontmatter: map[string]any{ - "on": "push", - "services": map[string]any{ - "postgres": map[string]any{ - "image": "postgres:14", - "env": map[string]any{ - "POSTGRES_PASSWORD": "${{ secrets.DB_PASSWORD }}", - }, - }, - }, - }, - wantErr: false, - }, - { - name: "invalid services - string", - frontmatter: map[string]any{ - "on": "push", - "services": "invalid", - }, - wantErr: true, - errContains: "got string, want object", - }, - { - name: "invalid services - array", - frontmatter: map[string]any{ - "on": "push", - "services": []string{"invalid"}, - }, - wantErr: true, - errContains: "got array, want object", - }, - { - name: "invalid services - service object missing required image", - frontmatter: map[string]any{ - "on": "push", - "services": map[string]any{ - "postgres": map[string]any{ - "env": map[string]any{ - "POSTGRES_PASSWORD": "secret", - }, - }, - }, - }, - wantErr: true, - errContains: "missing property 'image'", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if tt.wantErr && err == nil { - t.Errorf("Expected error but got none") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("Unexpected error: %v", err) - return - } - - if tt.wantErr && err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("Error = %v, expected to contain %q", err, tt.errContains) - } - } - }) - } -} - -// TestPassThroughFieldEdgeCases tests additional edge cases for pass-through fields -func TestPassThroughFieldEdgeCases(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - { - name: "concurrency with expression in group", - frontmatter: map[string]any{ - "on": "push", - "concurrency": map[string]any{ - "group": "workflow-${{ github.ref }}", - }, - }, - wantErr: false, - }, - { - name: "runs-on with empty labels array is valid", - frontmatter: map[string]any{ - "on": "push", - "runs-on": map[string]any{ - "labels": []string{}, - }, - }, - wantErr: false, - }, - { - name: "container with all optional fields", - frontmatter: map[string]any{ - "on": "push", - "container": map[string]any{ - "image": "ubuntu:latest", - "env": map[string]any{ - "TEST": "value", - }, - "ports": []any{8080, "9090"}, - "volumes": []string{"/tmp:/tmp"}, - "options": "--cpus 1", - }, - }, - wantErr: false, - }, - { - name: "environment with expression in url", - frontmatter: map[string]any{ - "on": "push", - "environment": map[string]any{ - "name": "staging", - "url": "${{ steps.deploy.outputs.url }}", - }, - }, - wantErr: false, - }, - { - name: "services with credentials", - frontmatter: map[string]any{ - "on": "push", - "services": map[string]any{ - "redis": map[string]any{ - "image": "redis:alpine", - "credentials": map[string]any{ - "username": "user", - "password": "${{ secrets.PASSWORD }}", - }, - }, - }, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if tt.wantErr && err == nil { - t.Errorf("Expected error but got none") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("Unexpected error: %v", err) - return - } - - if tt.wantErr && err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("Error = %v, expected to contain %q", err, tt.errContains) - } - } - }) - } -} diff --git a/pkg/parser/schema_test.go b/pkg/parser/schema_test.go index d4d35f4792..a9aaa1f669 100644 --- a/pkg/parser/schema_test.go +++ b/pkg/parser/schema_test.go @@ -8,1734 +8,6 @@ import ( "testing" ) -func TestValidateMainWorkflowFrontmatterWithSchema(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - { - name: "valid frontmatter with all allowed keys", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches": []string{"main"}, - }, - "stop-after": "2024-12-31", - }, - "permissions": "read-all", - "run-name": "Test Run", - "runs-on": "ubuntu-latest", - "timeout-minutes": 30, - "concurrency": "test", - "env": map[string]string{"TEST": "value"}, - "if": "true", - "steps": []string{"step1"}, - "engine": "claude", - "tools": map[string]any{"github": "test"}, - "command": "test-workflow", - }, - wantErr: false, - }, - { - name: "valid frontmatter with subset of keys", - frontmatter: map[string]any{ - "on": "push", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "empty frontmatter - missing required 'on' field", - frontmatter: map[string]any{}, - wantErr: true, - errContains: "missing property 'on'", - }, - { - name: "valid engine string format - claude", - frontmatter: map[string]any{ - "on": "push", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid engine string format - codex", - frontmatter: map[string]any{ - "on": "push", - "engine": "codex", - }, - wantErr: false, - }, - { - name: "valid engine object format - minimal", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "claude", - }, - }, - wantErr: false, - }, - { - name: "valid engine object format - with version", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "claude", - "version": "beta", - }, - }, - wantErr: false, - }, - { - name: "valid engine object format - with model", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "codex", - "model": "gpt-4o", - }, - }, - wantErr: false, - }, - { - name: "valid engine object format - complete", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "claude", - "version": "beta", - "model": "claude-3-5-sonnet-20241022", - }, - }, - wantErr: false, - }, - { - name: "invalid engine string format", - frontmatter: map[string]any{ - "on": "push", - "engine": "invalid-engine", - }, - wantErr: true, - errContains: "value must be one of 'claude', 'codex'", - }, - { - name: "invalid engine object format - invalid id", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "invalid-engine", - }, - }, - wantErr: true, - errContains: "value must be one of 'claude', 'codex'", - }, - { - name: "invalid engine object format - missing id", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "version": "beta", - "model": "gpt-4o", - }, - }, - wantErr: true, - errContains: "missing property 'id'", - }, - { - name: "invalid engine object format - additional properties", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "claude", - "invalid": "property", - }, - }, - wantErr: true, - errContains: "additional properties", - }, - { - name: "invalid frontmatter with unexpected key", - frontmatter: map[string]any{ - "on": "push", - "invalid_key": "value", - }, - wantErr: true, - errContains: "additional properties 'invalid_key' not allowed", - }, - { - name: "invalid frontmatter with multiple unexpected keys", - frontmatter: map[string]any{ - "on": "push", - "invalid_key": "value", - "another_invalid": "value2", - }, - wantErr: true, - errContains: "additional properties", - }, - { - name: "valid frontmatter with complex on object", - frontmatter: map[string]any{ - "on": map[string]any{ - "schedule": []map[string]any{ - {"cron": "0 9 * * *"}, - }, - "workflow_dispatch": map[string]any{}, - }, - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with command trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "test-command", - }, - }, - "permissions": map[string]any{ - "issues": "write", - "contents": "read", - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with discussion trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "discussion": map[string]any{ - "types": []string{"created", "edited", "answered"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with discussion_comment trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "discussion_comment": map[string]any{ - "types": []string{"created", "edited"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple discussion trigger", - frontmatter: map[string]any{ - "on": "discussion", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with branch_protection_rule trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "branch_protection_rule": map[string]any{ - "types": []string{"created", "deleted"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with check_run trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "check_run": map[string]any{ - "types": []string{"completed", "rerequested"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with check_suite trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "check_suite": map[string]any{ - "types": []string{"completed"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple create trigger", - frontmatter: map[string]any{ - "on": "create", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple delete trigger", - frontmatter: map[string]any{ - "on": "delete", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple fork trigger", - frontmatter: map[string]any{ - "on": "fork", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple gollum trigger", - frontmatter: map[string]any{ - "on": "gollum", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with label trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "label": map[string]any{ - "types": []string{"created", "deleted"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with merge_group trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "merge_group": map[string]any{ - "types": []string{"checks_requested"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with milestone trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "milestone": map[string]any{ - "types": []string{"opened", "closed"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple page_build trigger", - frontmatter: map[string]any{ - "on": "page_build", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple public trigger", - frontmatter: map[string]any{ - "on": "public", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with pull_request_target trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request_target": map[string]any{ - "types": []string{"opened", "synchronize"}, - "branches": []string{"main"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with pull_request_review trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request_review": map[string]any{ - "types": []string{"submitted", "edited"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with registry_package trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "registry_package": map[string]any{ - "types": []string{"published", "updated"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with repository_dispatch trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "repository_dispatch": map[string]any{ - "types": []string{"custom-event", "deploy"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple status trigger", - frontmatter: map[string]any{ - "on": "status", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with watch trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "watch": map[string]any{ - "types": []string{"started"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with simple workflow_call trigger", - frontmatter: map[string]any{ - "on": "workflow_call", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with updated issues trigger types", - frontmatter: map[string]any{ - "on": map[string]any{ - "issues": map[string]any{ - "types": []string{"opened", "typed", "untyped"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with issues trigger lock-for-agent field", - frontmatter: map[string]any{ - "on": map[string]any{ - "issues": map[string]any{ - "types": []string{"opened"}, - "lock-for-agent": true, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with issues trigger lock-for-agent false", - frontmatter: map[string]any{ - "on": map[string]any{ - "issues": map[string]any{ - "types": []string{"opened"}, - "lock-for-agent": false, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with issue_comment trigger lock-for-agent field", - frontmatter: map[string]any{ - "on": map[string]any{ - "issue_comment": map[string]any{ - "types": []string{"created"}, - "lock-for-agent": true, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with issue_comment trigger lock-for-agent false", - frontmatter: map[string]any{ - "on": map[string]any{ - "issue_comment": map[string]any{ - "types": []string{"created"}, - "lock-for-agent": false, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with updated pull_request trigger types", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "types": []string{"opened", "milestoned", "demilestoned", "ready_for_review", "auto_merge_enabled"}, - }, - }, - "permissions": "read-all", - }, - wantErr: false, - }, - { - name: "valid frontmatter with detailed permissions", - frontmatter: map[string]any{ - "on": "push", - "permissions": map[string]any{ - "contents": "read", - "issues": "write", - "pull-requests": "read", - "models": "read", - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with single cache configuration", - frontmatter: map[string]any{ - "on": "push", - "cache": map[string]any{ - "key": "node-modules-${{ hashFiles('package-lock.json') }}", - "path": "node_modules", - "restore-keys": []string{"node-modules-"}, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with multiple cache configurations", - frontmatter: map[string]any{ - "on": "push", - "cache": []any{ - map[string]any{ - "key": "cache1", - "path": "path1", - }, - map[string]any{ - "key": "cache2", - "path": []string{"path2", "path3"}, - "restore-keys": "restore-key", - "fail-on-cache-miss": true, - }, - }, - }, - wantErr: false, - }, - { - name: "invalid cache configuration missing required key", - frontmatter: map[string]any{ - "cache": map[string]any{ - "path": "node_modules", - }, - }, - wantErr: true, - errContains: "missing property 'key'", - }, - // Test cases for additional properties validation - { - name: "invalid permissions with additional property", - frontmatter: map[string]any{ - "on": "push", - "permissions": map[string]any{ - "contents": "read", - "invalid_perm": "write", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_perm' not allowed", - }, - { - name: "invalid on trigger with additional properties", - frontmatter: map[string]any{ - "on": map[string]any{ - "push": map[string]any{ - "branches": []string{"main"}, - "invalid_prop": "value", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "invalid schedule with additional properties", - frontmatter: map[string]any{ - "on": map[string]any{ - "schedule": []map[string]any{ - { - "cron": "0 9 * * *", - "invalid_prop": "value", - }, - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "invalid empty schedule array", - frontmatter: map[string]any{ - "on": map[string]any{ - "schedule": []map[string]any{}, - }, - }, - wantErr: true, - errContains: "minItems: got 0, want 1", - }, - { - name: "invalid empty toolsets array", - frontmatter: map[string]any{ - "on": "push", - "tools": map[string]any{ - "github": map[string]any{ - "toolsets": []string{}, - }, - }, - }, - wantErr: true, - errContains: "minItems", - }, - { - name: "invalid empty issue names array", - frontmatter: map[string]any{ - "on": map[string]any{ - "issues": map[string]any{ - "types": []string{"labeled"}, - "names": []string{}, - }, - }, - }, - wantErr: true, - errContains: "minItems", - }, - { - name: "invalid empty pull_request names array", - frontmatter: map[string]any{ - "on": map[string]any{ - "pull_request": map[string]any{ - "types": []string{"labeled"}, - "names": []string{}, - }, - }, - }, - wantErr: true, - errContains: "minItems", - }, - { - name: "valid schedule with multiple cron entries", - frontmatter: map[string]any{ - "on": map[string]any{ - "schedule": []map[string]any{ - {"cron": "0 9 * * *"}, - {"cron": "0 17 * * *"}, - }, - }, - "engine": "claude", - }, - wantErr: false, - }, - { - name: "invalid workflow_dispatch with additional properties", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "string", - }, - }, - "invalid_prop": "value", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "invalid concurrency with additional properties", - frontmatter: map[string]any{ - "concurrency": map[string]any{ - "group": "test-group", - "cancel-in-progress": true, - "invalid_prop": "value", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "invalid runs-on object with additional properties", - frontmatter: map[string]any{ - "runs-on": map[string]any{ - "group": "test-group", - "labels": []string{"ubuntu-latest"}, - "invalid_prop": "value", - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "invalid github tools with additional properties", - frontmatter: map[string]any{ - "tools": map[string]any{ - "github": map[string]any{ - "allowed": []string{"create_issue"}, - "invalid_prop": "value", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "invalid claude top-level field (deprecated)", - frontmatter: map[string]any{ - "claude": map[string]any{ - "model": "claude-3", - }, - }, - wantErr: true, - errContains: "additional properties 'claude' not allowed", - }, - { - name: "invalid safe-outputs configuration with additional properties", - frontmatter: map[string]any{ - "safe-outputs": map[string]any{ - "create-issue": map[string]any{ - "title-prefix": "[ai] ", - "invalid_prop": "value", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_prop' not allowed", - }, - { - name: "valid permissions with repository-projects property", - frontmatter: map[string]any{ - "on": "push", - "permissions": map[string]any{ - "contents": "read", - "attestations": "write", - "id-token": "write", - "packages": "read", - "pages": "write", - "repository-projects": "none", - }, - }, - wantErr: false, - }, - { - name: "valid permissions with organization-projects property", - frontmatter: map[string]any{ - "on": "push", - "permissions": map[string]any{ - "contents": "read", - "organization-projects": "write", - }, - }, - wantErr: false, - }, - { - name: "valid claude engine with network permissions", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "claude", - }, - }, - wantErr: false, - }, - { - name: "valid codex engine without permissions", - frontmatter: map[string]any{ - "on": "push", - "engine": map[string]any{ - "id": "codex", - "model": "gpt-4o", - }, - }, - wantErr: false, - }, - { - name: "valid codex string engine (no permissions possible)", - frontmatter: map[string]any{ - "on": "push", - "engine": "codex", - }, - wantErr: false, - }, - { - name: "valid network defaults", - frontmatter: map[string]any{ - "on": "push", - "network": "defaults", - }, - wantErr: false, - }, - { - name: "valid network empty object", - frontmatter: map[string]any{ - "on": "push", - "network": map[string]any{}, - }, - wantErr: false, - }, - { - name: "valid network with allowed domains", - frontmatter: map[string]any{ - "on": "push", - "network": map[string]any{ - "allowed": []string{"example.com", "*.trusted.com"}, - }, - }, - wantErr: false, - }, - { - name: "invalid network string (not defaults)", - frontmatter: map[string]any{ - "on": "push", - "network": "invalid", - }, - wantErr: true, - errContains: "oneOf", - }, - { - name: "invalid network object with unknown property", - frontmatter: map[string]any{ - "on": "push", - "network": map[string]any{ - "invalid": []string{"example.com"}, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid' not allowed", - }, - { - name: "missing required on field", - frontmatter: map[string]any{ - "engine": "claude", - "permissions": map[string]any{ - "contents": "read", - }, - }, - wantErr: true, - errContains: "missing property 'on'", - }, - { - name: "missing required on field with other valid fields", - frontmatter: map[string]any{ - "engine": "copilot", - "timeout-minutes": 30, - "permissions": map[string]any{ - "issues": "write", - }, - }, - wantErr: true, - errContains: "missing property 'on'", - }, - { - name: "invalid: command trigger with issues event", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "test-bot", - }, - "issues": map[string]any{ - "types": []string{"opened"}, - }, - }, - }, - wantErr: true, - errContains: "command trigger cannot be used with 'issues' event", - }, - { - name: "invalid: command trigger with issue_comment event", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "test-bot", - "issue_comment": map[string]any{ - "types": []string{"created"}, - }, - }, - }, - wantErr: true, - errContains: "command trigger cannot be used with 'issue_comment' event", - }, - { - name: "invalid: command trigger with pull_request event", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "test-bot", - }, - "pull_request": map[string]any{ - "types": []string{"opened"}, - }, - }, - }, - wantErr: true, - errContains: "command trigger cannot be used with 'pull_request' event", - }, - { - name: "invalid: command trigger with pull_request_review_comment event", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "test-bot", - "pull_request_review_comment": map[string]any{ - "types": []string{"created"}, - }, - }, - }, - wantErr: true, - errContains: "command trigger cannot be used with 'pull_request_review_comment' event", - }, - { - name: "invalid: command trigger with multiple conflicting events", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "test-bot", - }, - "issues": map[string]any{ - "types": []string{"opened"}, - }, - "pull_request": map[string]any{ - "types": []string{"opened"}, - }, - }, - }, - wantErr: true, - errContains: "command trigger cannot be used with these events", - }, - { - name: "valid: command trigger with non-conflicting events", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "test-bot", - }, - "workflow_dispatch": nil, - "schedule": []map[string]any{ - {"cron": "0 0 * * *"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid: command trigger alone", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "test-bot", - }, - }, - wantErr: false, - }, - { - name: "valid: command trigger as null (default workflow name)", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": nil, - }, - }, - wantErr: false, - }, - { - name: "valid: issues event without command", - frontmatter: map[string]any{ - "on": map[string]any{ - "issues": map[string]any{ - "types": []string{"opened"}, - }, - }, - }, - wantErr: false, - }, - { - name: "invalid: empty string for name field", - frontmatter: map[string]any{ - "on": "push", - "name": "", - }, - wantErr: true, - errContains: "minLength", - }, - { - name: "invalid: empty string for on field (string format)", - frontmatter: map[string]any{ - "on": "", - }, - wantErr: true, - errContains: "minLength", - }, - { - name: "invalid: empty string for command trigger (string format)", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "", - }, - }, - wantErr: true, - errContains: "minLength", - }, - { - name: "invalid: empty string for command.name field", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "", - }, - }, - }, - wantErr: true, - errContains: "minLength", - }, - { - name: "invalid: command name starting with slash (string format)", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "/mybot", - }, - }, - wantErr: true, - errContains: "pattern", - }, - { - name: "invalid: command.name starting with slash (object format)", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "/mybot", - }, - }, - }, - wantErr: true, - errContains: "pattern", - }, - { - name: "valid: command name without slash (string format)", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": "mybot", - }, - }, - wantErr: false, - }, - { - name: "valid: command.name without slash (object format)", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "mybot", - }, - }, - }, - wantErr: false, - }, - { - name: "invalid: empty events array for command trigger", - frontmatter: map[string]any{ - "on": map[string]any{ - "command": map[string]any{ - "name": "test-bot", - "events": []any{}, - }, - }, - }, - wantErr: true, - errContains: "minItems", - }, - { - name: "valid: workflow_dispatch with 25 inputs (max allowed)", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "input1": map[string]any{"description": "Input 1", "type": "string"}, - "input2": map[string]any{"description": "Input 2", "type": "string"}, - "input3": map[string]any{"description": "Input 3", "type": "string"}, - "input4": map[string]any{"description": "Input 4", "type": "string"}, - "input5": map[string]any{"description": "Input 5", "type": "string"}, - "input6": map[string]any{"description": "Input 6", "type": "string"}, - "input7": map[string]any{"description": "Input 7", "type": "string"}, - "input8": map[string]any{"description": "Input 8", "type": "string"}, - "input9": map[string]any{"description": "Input 9", "type": "string"}, - "input10": map[string]any{"description": "Input 10", "type": "string"}, - "input11": map[string]any{"description": "Input 11", "type": "string"}, - "input12": map[string]any{"description": "Input 12", "type": "string"}, - "input13": map[string]any{"description": "Input 13", "type": "string"}, - "input14": map[string]any{"description": "Input 14", "type": "string"}, - "input15": map[string]any{"description": "Input 15", "type": "string"}, - "input16": map[string]any{"description": "Input 16", "type": "string"}, - "input17": map[string]any{"description": "Input 17", "type": "string"}, - "input18": map[string]any{"description": "Input 18", "type": "string"}, - "input19": map[string]any{"description": "Input 19", "type": "string"}, - "input20": map[string]any{"description": "Input 20", "type": "string"}, - "input21": map[string]any{"description": "Input 21", "type": "string"}, - "input22": map[string]any{"description": "Input 22", "type": "string"}, - "input23": map[string]any{"description": "Input 23", "type": "string"}, - "input24": map[string]any{"description": "Input 24", "type": "string"}, - "input25": map[string]any{"description": "Input 25", "type": "string"}, - }, - }, - }, - }, - wantErr: false, - }, - { - name: "invalid: workflow_dispatch with 26 inputs (exceeds max)", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "input1": map[string]any{"description": "Input 1", "type": "string"}, - "input2": map[string]any{"description": "Input 2", "type": "string"}, - "input3": map[string]any{"description": "Input 3", "type": "string"}, - "input4": map[string]any{"description": "Input 4", "type": "string"}, - "input5": map[string]any{"description": "Input 5", "type": "string"}, - "input6": map[string]any{"description": "Input 6", "type": "string"}, - "input7": map[string]any{"description": "Input 7", "type": "string"}, - "input8": map[string]any{"description": "Input 8", "type": "string"}, - "input9": map[string]any{"description": "Input 9", "type": "string"}, - "input10": map[string]any{"description": "Input 10", "type": "string"}, - "input11": map[string]any{"description": "Input 11", "type": "string"}, - "input12": map[string]any{"description": "Input 12", "type": "string"}, - "input13": map[string]any{"description": "Input 13", "type": "string"}, - "input14": map[string]any{"description": "Input 14", "type": "string"}, - "input15": map[string]any{"description": "Input 15", "type": "string"}, - "input16": map[string]any{"description": "Input 16", "type": "string"}, - "input17": map[string]any{"description": "Input 17", "type": "string"}, - "input18": map[string]any{"description": "Input 18", "type": "string"}, - "input19": map[string]any{"description": "Input 19", "type": "string"}, - "input20": map[string]any{"description": "Input 20", "type": "string"}, - "input21": map[string]any{"description": "Input 21", "type": "string"}, - "input22": map[string]any{"description": "Input 22", "type": "string"}, - "input23": map[string]any{"description": "Input 23", "type": "string"}, - "input24": map[string]any{"description": "Input 24", "type": "string"}, - "input25": map[string]any{"description": "Input 25", "type": "string"}, - "input26": map[string]any{"description": "Input 26", "type": "string"}, - }, - }, - }, - }, - wantErr: true, - errContains: "maxProperties", - }, - { - name: "valid: workflow_dispatch with all valid input types", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "string_input": map[string]any{ - "description": "String input", - "type": "string", - "default": "default value", - }, - "choice_input": map[string]any{ - "description": "Choice input", - "type": "choice", - "options": []string{"option1", "option2", "option3"}, - "default": "option1", - }, - "boolean_input": map[string]any{ - "description": "Boolean input", - "type": "boolean", - "default": true, - }, - "number_input": map[string]any{ - "description": "Number input", - "type": "number", - "default": 42, - }, - "environment_input": map[string]any{ - "description": "Environment input", - "type": "environment", - "default": "production", - }, - }, - }, - }, - }, - wantErr: false, - }, - { - name: "invalid: workflow_dispatch with invalid input type 'text'", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "text", - }, - }, - }, - }, - }, - wantErr: true, - errContains: "value must be one of 'string', 'choice', 'boolean', 'number', 'environment'", - }, - { - name: "invalid: workflow_dispatch with invalid input type 'int'", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "int", - }, - }, - }, - }, - }, - wantErr: true, - errContains: "value must be one of 'string', 'choice', 'boolean', 'number', 'environment'", - }, - { - name: "invalid: workflow_dispatch with invalid input type 'bool'", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "bool", - }, - }, - }, - }, - }, - wantErr: true, - errContains: "value must be one of 'string', 'choice', 'boolean', 'number', 'environment'", - }, - { - name: "invalid: workflow_dispatch with invalid input type 'select'", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "select", - }, - }, - }, - }, - }, - wantErr: true, - errContains: "value must be one of 'string', 'choice', 'boolean', 'number', 'environment'", - }, - { - name: "invalid: workflow_dispatch with invalid input type 'dropdown'", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "dropdown", - }, - }, - }, - }, - }, - wantErr: true, - errContains: "value must be one of 'string', 'choice', 'boolean', 'number', 'environment'", - }, - { - name: "invalid: workflow_dispatch with invalid input type 'checkbox'", - frontmatter: map[string]any{ - "on": map[string]any{ - "workflow_dispatch": map[string]any{ - "inputs": map[string]any{ - "test_input": map[string]any{ - "description": "Test input", - "type": "checkbox", - }, - }, - }, - }, - }, - wantErr: true, - errContains: "value must be one of 'string', 'choice', 'boolean', 'number', 'environment'", - }, - { - name: "valid metadata with various key-value pairs", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "author": "John Doe", - "version": "1.0.0", - "category": "automation", - "description": "A workflow that automates something", - }, - }, - wantErr: false, - }, - { - name: "valid metadata with max length key (64 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "a123456789b123456789c123456789d123456789e123456789f123456789abcd": "value", - }, - }, - wantErr: false, - }, - { - name: "valid metadata with max length value (1024 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "long-value": strings.Repeat("a", 1024), - }, - }, - wantErr: false, - }, - { - name: "invalid metadata with key too long (65 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "a123456789b123456789c123456789d123456789e123456789f123456789abcde": "value", - }, - }, - wantErr: true, - errContains: "additional properties", - }, - { - name: "invalid metadata with value too long (1025 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "test": strings.Repeat("a", 1025), - }, - }, - wantErr: true, - errContains: "maxLength", - }, - { - name: "invalid metadata with non-string value", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "count": 123, - }, - }, - wantErr: true, - errContains: "want string", - }, - { - name: "invalid metadata with empty key", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "metadata": map[string]any{ - "": "value", - }, - }, - wantErr: true, - errContains: "additional properties", - }, - { - name: "invalid name too long (257 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "name": strings.Repeat("a", 257), - }, - wantErr: true, - errContains: "maxLength", - }, - { - name: "valid name at max length (256 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "name": strings.Repeat("a", 256), - }, - wantErr: false, - }, - { - name: "invalid description too long (10001 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "description": strings.Repeat("a", 10001), - }, - wantErr: true, - errContains: "maxLength", - }, - { - name: "valid description at max length (10000 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "description": strings.Repeat("a", 10000), - }, - wantErr: false, - }, - { - name: "invalid tracker-id too long (129 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "tracker-id": strings.Repeat("a", 129), - }, - wantErr: true, - errContains: "maxLength", - }, - { - name: "valid tracker-id at max length (128 chars)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "tracker-id": strings.Repeat("a", 128), - }, - wantErr: false, - }, - // id-token permission validation - id-token only supports "write" and "none", not "read" - // See: https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs#defining-access-for-the-github_token-scopes - { - name: "invalid: id-token: read is not allowed (only write and none)", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "permissions": map[string]any{ - "id-token": "read", - }, - }, - wantErr: true, - errContains: "id-token", - }, - { - name: "valid: id-token: write is allowed", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "permissions": map[string]any{ - "id-token": "write", - }, - }, - wantErr: false, - }, - { - name: "valid: id-token: none is allowed", - frontmatter: map[string]any{ - "on": "workflow_dispatch", - "permissions": map[string]any{ - "id-token": "none", - }, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if tt.wantErr && err == nil { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() expected error, got nil") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() error = %v", err) - return - } - - if tt.wantErr && err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() error = %v, expected to contain %v", err, tt.errContains) - } - } - }) - } -} - -func TestValidateIncludedFileFrontmatterWithSchema(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - { - name: "valid frontmatter with tools only", - frontmatter: map[string]any{ - "tools": map[string]any{"github": "test"}, - }, - wantErr: false, - }, - { - name: "empty frontmatter", - frontmatter: map[string]any{}, - wantErr: false, - }, - { - name: "invalid frontmatter with on trigger", - frontmatter: map[string]any{ - "on": "push", - "tools": map[string]any{"github": "test"}, - }, - wantErr: true, - errContains: "cannot be used in shared workflows", - }, - { - name: "invalid frontmatter with multiple unexpected keys", - frontmatter: map[string]any{ - "on": "push", - "permissions": "read-all", - "tools": map[string]any{"github": "test"}, - }, - wantErr: true, - errContains: "cannot be used in shared workflows", - }, - { - name: "invalid frontmatter with only unexpected keys", - frontmatter: map[string]any{ - "on": "push", - "permissions": "read-all", - }, - wantErr: true, - errContains: "cannot be used in shared workflows", - }, - { - name: "valid frontmatter with complex tools object", - frontmatter: map[string]any{ - "tools": map[string]any{ - "github": map[string]any{ - "allowed": []string{"list_issues", "issue_read"}, - }, - "bash": []string{"echo", "ls"}, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with bash as boolean true", - frontmatter: map[string]any{ - "tools": map[string]any{ - "bash": true, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with bash as boolean false", - frontmatter: map[string]any{ - "tools": map[string]any{ - "bash": false, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with bash as null", - frontmatter: map[string]any{ - "tools": map[string]any{ - "bash": nil, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with custom MCP tool", - frontmatter: map[string]any{ - "tools": map[string]any{ - "myTool": map[string]any{ - "mcp": map[string]any{ - "type": "http", - "url": "https://api.contoso.com", - "headers": map[string]any{"Authorization": "Bearer token"}, - }, - "allowed": []string{"api_call1", "api_call2"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with HTTP MCP tool with underscored headers", - frontmatter: map[string]any{ - "tools": map[string]any{ - "datadog": map[string]any{ - "type": "http", - "url": "https://mcp.datadoghq.com/api/unstable/mcp-server/mcp", - "headers": map[string]any{ - "DD_API_KEY": "test-key", - "DD_APPLICATION_KEY": "test-app", - "DD_SITE": "datadoghq.com", - }, - "allowed": []string{"get-monitors", "get-monitor"}, - }, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with cache-memory as boolean true", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": true, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with cache-memory as boolean false", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": false, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with cache-memory as nil", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": nil, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with cache-memory as object with key", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": map[string]any{ - "key": "custom-memory-${{ github.workflow }}", - }, - }, - }, - wantErr: false, - }, - { - name: "valid frontmatter with cache-memory with all valid options", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": map[string]any{ - "key": "custom-key", - "retention-days": 30, - "description": "Test cache description", - }, - }, - }, - wantErr: false, - }, - { - name: "invalid cache-memory with invalid retention-days (too low)", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": map[string]any{ - "retention-days": 0, - }, - }, - }, - wantErr: true, - errContains: "got 0, want 1", - }, - { - name: "invalid cache-memory with invalid retention-days (too high)", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": map[string]any{ - "retention-days": 91, - }, - }, - }, - wantErr: true, - errContains: "got 91, want 90", - }, - { - name: "invalid cache-memory with unsupported docker-image field", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": map[string]any{ - "docker-image": "custom/memory:latest", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'docker-image' not allowed", - }, - { - name: "invalid cache-memory with additional property", - frontmatter: map[string]any{ - "tools": map[string]any{ - "cache-memory": map[string]any{ - "key": "custom-key", - "invalid_option": "value", - }, - }, - }, - wantErr: true, - errContains: "additional properties 'invalid_option' not allowed", - }, - { - name: "invalid: included file cannot have inputs at root level", - frontmatter: map[string]any{ - "inputs": map[string]any{ - "input1": map[string]any{"description": "Input 1", "type": "string"}, - }, - }, - wantErr: true, - errContains: "additional properties 'inputs' not allowed", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateIncludedFileFrontmatterWithSchema(tt.frontmatter) - - if tt.wantErr && err == nil { - t.Errorf("ValidateIncludedFileFrontmatterWithSchema() expected error, got nil") - return - } - - if !tt.wantErr && err != nil { - t.Errorf("ValidateIncludedFileFrontmatterWithSchema() error = %v", err) - return - } - - if tt.wantErr && err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("ValidateIncludedFileFrontmatterWithSchema() error = %v, expected to contain %v", err, tt.errContains) - } - } - }) - } -} - func TestValidateWithSchema(t *testing.T) { tests := []struct { name string @@ -1864,82 +136,6 @@ timeout_minu tes: 10 } } -func TestValidateMCPConfigWithSchema(t *testing.T) { - tests := []struct { - name string - mcpConfig map[string]any - toolName string - wantErr bool - errContains string - }{ - { - name: "valid stdio MCP config with command", - mcpConfig: map[string]any{ - "type": "stdio", - "command": "npx", - "args": []string{"-y", "@modelcontextprotocol/server-memory"}, - }, - toolName: "memory", - wantErr: false, - }, - { - name: "valid http MCP config with url", - mcpConfig: map[string]any{ - "type": "http", - "url": "https://api.example.com/mcp", - }, - toolName: "api-server", - wantErr: false, - }, - { - name: "invalid: empty string for command field", - mcpConfig: map[string]any{ - "type": "stdio", - "command": "", - }, - toolName: "test-tool", - wantErr: true, - errContains: "minLength", - }, - { - name: "invalid: empty string for url field", - mcpConfig: map[string]any{ - "type": "http", - "url": "", - }, - toolName: "test-tool", - wantErr: true, - errContains: "minLength", - }, - { - name: "valid stdio MCP config with container", - mcpConfig: map[string]any{ - "type": "stdio", - "container": "ghcr.io/modelcontextprotocol/server-memory", - }, - toolName: "memory", - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMCPConfigWithSchema(tt.mcpConfig, tt.toolName) - - if (err != nil) != tt.wantErr { - t.Errorf("ValidateMCPConfigWithSchema() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("Error message should contain %q, got: %v", tt.errContains, err) - } - } - }) - } -} - // TestGetSafeOutputTypeKeys tests extracting safe output type keys from the embedded schema func TestGetSafeOutputTypeKeys(t *testing.T) { keys, err := GetSafeOutputTypeKeys() diff --git a/pkg/parser/schema_utilities_test.go b/pkg/parser/schema_utilities_test.go index 48bb52f7fd..1fc5d3003a 100644 --- a/pkg/parser/schema_utilities_test.go +++ b/pkg/parser/schema_utilities_test.go @@ -3,7 +3,6 @@ package parser import ( - "strings" "testing" "github.com/github/gh-aw/pkg/constants" @@ -132,140 +131,3 @@ func TestFilterIgnoredFields(t *testing.T) { }) } } - -func TestValidateMainWorkflowWithIgnoredFields(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - { - name: "valid frontmatter with description field - now properly validated", - frontmatter: map[string]any{ - "on": "push", - "description": "This is a test workflow description", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "invalid frontmatter with applyTo field - not allowed in main workflow", - frontmatter: map[string]any{ - "on": "push", - "applyTo": "some-target", - "engine": "claude", - }, - wantErr: true, - errContains: "applyTo", - }, - { - name: "valid frontmatter with description - now properly validated", - frontmatter: map[string]any{ - "on": "push", - "description": "Test workflow", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "valid frontmatter with user-invokable field - should be ignored", - frontmatter: map[string]any{ - "on": "push", - "user-invokable": true, - "engine": "claude", - }, - wantErr: false, - }, - { - name: "invalid frontmatter with ignored fields - other validation should still work", - frontmatter: map[string]any{ - "on": "push", - "description": "Test workflow", - "applyTo": "some-target", - "invalid_field": "should-fail", - }, - wantErr: true, - errContains: "invalid_field", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateMainWorkflowFrontmatterWithSchema(tt.frontmatter) - - if (err != nil) != tt.wantErr { - t.Errorf("ValidateMainWorkflowFrontmatterWithSchema() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("Error message should contain %q, got: %v", tt.errContains, err) - } - } - }) - } -} - -func TestValidateIncludedFileWithIgnoredFields(t *testing.T) { - tests := []struct { - name string - frontmatter map[string]any - wantErr bool - errContains string - }{ - { - name: "valid included file with applyTo - should fail (not in schema yet)", - frontmatter: map[string]any{ - "applyTo": "some-target", - "engine": "claude", - }, - wantErr: true, - errContains: "applyTo", - }, - { - name: "valid included file with description - should pass", - frontmatter: map[string]any{ - "description": "This is a test description", - "engine": "claude", - }, - wantErr: false, - }, - { - name: "invalid included file with 'on' field - should fail", - frontmatter: map[string]any{ - "on": "push", - "engine": "claude", - }, - wantErr: true, - errContains: "on", - }, - { - name: "invalid included file with invalid field - should fail", - frontmatter: map[string]any{ - "invalid_field": "should-fail", - "engine": "claude", - }, - wantErr: true, - errContains: "invalid_field", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := ValidateIncludedFileFrontmatterWithSchema(tt.frontmatter) - - if (err != nil) != tt.wantErr { - t.Errorf("ValidateIncludedFileFrontmatterWithSchema() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("Error message should contain %q, got: %v", tt.errContains, err) - } - } - }) - } -} diff --git a/pkg/parser/schema_validation.go b/pkg/parser/schema_validation.go index 41432f5e7a..3e4ead0fb0 100644 --- a/pkg/parser/schema_validation.go +++ b/pkg/parser/schema_validation.go @@ -52,28 +52,6 @@ func validateSharedWorkflowFields(frontmatter map[string]any) error { // - Invalid additional properties (e.g., unknown fields) // // See pkg/parser/schema_passthrough_validation_test.go for comprehensive test coverage. -func ValidateMainWorkflowFrontmatterWithSchema(frontmatter map[string]any) error { - schemaValidationLog.Print("Validating main workflow frontmatter with schema") - - // Filter out ignored fields before validation - filtered := filterIgnoredFields(frontmatter) - - // First run custom validation for command trigger conflicts (provides better error messages) - if err := validateCommandTriggerConflicts(filtered); err != nil { - schemaValidationLog.Printf("Command trigger validation failed: %v", err) - return err - } - - // Then run the standard schema validation - // This validates all fields including pass-through fields (concurrency, container, etc.) - if err := validateWithSchema(filtered, mainWorkflowSchema, "main workflow file"); err != nil { - schemaValidationLog.Printf("Schema validation failed for main workflow: %v", err) - return err - } - - // Finally run other custom validation rules - return validateEngineSpecificRules(filtered) -} // ValidateMainWorkflowFrontmatterWithSchemaAndLocation validates main workflow frontmatter with file location info func ValidateMainWorkflowFrontmatterWithSchemaAndLocation(frontmatter map[string]any, filePath string) error { @@ -95,34 +73,6 @@ func ValidateMainWorkflowFrontmatterWithSchemaAndLocation(frontmatter map[string } // ValidateIncludedFileFrontmatterWithSchema validates included file frontmatter using JSON schema -func ValidateIncludedFileFrontmatterWithSchema(frontmatter map[string]any) error { - schemaValidationLog.Print("Validating included file frontmatter with schema") - - // Filter out ignored fields before validation - filtered := filterIgnoredFields(frontmatter) - - // First check for forbidden fields in shared workflows - if err := validateSharedWorkflowFields(filtered); err != nil { - schemaValidationLog.Printf("Shared workflow field validation failed: %v", err) - return err - } - - // To validate shared workflows against the main schema, we temporarily add an 'on' field - // This allows us to use the full schema validation while still enforcing the forbidden field check above - tempFrontmatter := make(map[string]any) - maps.Copy(tempFrontmatter, filtered) - // Add a temporary 'on' field to satisfy the schema's required field - tempFrontmatter["on"] = "push" - - // Validate with the main schema (which will catch unknown fields) - if err := validateWithSchema(tempFrontmatter, mainWorkflowSchema, "included file"); err != nil { - schemaValidationLog.Printf("Schema validation failed for included file: %v", err) - return err - } - - // Run custom validation for engine-specific rules - return validateEngineSpecificRules(filtered) -} // ValidateIncludedFileFrontmatterWithSchemaAndLocation validates included file frontmatter with file location info func ValidateIncludedFileFrontmatterWithSchemaAndLocation(frontmatter map[string]any, filePath string) error { @@ -150,7 +100,3 @@ func ValidateIncludedFileFrontmatterWithSchemaAndLocation(frontmatter map[string } // ValidateMCPConfigWithSchema validates MCP configuration using JSON schema -func ValidateMCPConfigWithSchema(mcpConfig map[string]any, toolName string) error { - schemaValidationLog.Printf("Validating MCP configuration for tool: %s", toolName) - return validateWithSchema(mcpConfig, mcpConfigSchema, fmt.Sprintf("MCP configuration for tool '%s'", toolName)) -} diff --git a/pkg/parser/schema_validation_test.go b/pkg/parser/schema_validation_test.go deleted file mode 100644 index 0618a07c4f..0000000000 --- a/pkg/parser/schema_validation_test.go +++ /dev/null @@ -1,74 +0,0 @@ -//go:build !integration - -package parser - -import ( - "strings" - "testing" - - "github.com/github/gh-aw/pkg/constants" -) - -// TestForbiddenFieldsInSharedWorkflows verifies each forbidden field is properly rejected -func TestForbiddenFieldsInSharedWorkflows(t *testing.T) { - // Use the SharedWorkflowForbiddenFields constant from constants package - forbiddenFields := constants.SharedWorkflowForbiddenFields - - for _, field := range forbiddenFields { - t.Run("reject_"+field, func(t *testing.T) { - frontmatter := map[string]any{ - field: "test-value", - "tools": map[string]any{"bash": true}, - } - - err := ValidateIncludedFileFrontmatterWithSchema(frontmatter) - if err == nil { - t.Errorf("Expected error for forbidden field '%s', got nil", field) - } - - if err != nil && !strings.Contains(err.Error(), "cannot be used in shared workflows") { - t.Errorf("Error message should mention shared workflows, got: %v", err) - } - }) - } -} - -// TestAllowedFieldsInSharedWorkflows verifies allowed fields work correctly -func TestAllowedFieldsInSharedWorkflows(t *testing.T) { - allowedFields := map[string]any{ - "tools": map[string]any{"bash": true}, - "engine": "copilot", - "network": map[string]any{"allowed": []string{"defaults"}}, - "mcp-servers": map[string]any{}, - "permissions": "read-all", - "runtimes": map[string]any{"node": map[string]any{"version": "20"}}, - "safe-outputs": map[string]any{}, - "safe-inputs": map[string]any{}, - "services": map[string]any{}, - "steps": []any{}, - "secret-masking": true, - "jobs": map[string]any{"test": map[string]any{"runs-on": "ubuntu-latest", "steps": []any{map[string]any{"run": "echo test"}}}}, - "description": "test", - "metadata": map[string]any{}, - "inputs": map[string]any{}, - "bots": []string{"copilot"}, - "post-steps": []any{map[string]any{"run": "echo cleanup"}}, - "labels": []string{"automation", "testing"}, - "imports": []string{"./shared.md"}, - "cache": map[string]any{"key": "test-key", "path": "node_modules"}, - "source": "githubnext/agentics/workflows/ci-doctor.md@v1.0.0", - } - - for field, value := range allowedFields { - t.Run("allow_"+field, func(t *testing.T) { - frontmatter := map[string]any{ - field: value, - } - - err := ValidateIncludedFileFrontmatterWithSchema(frontmatter) - if err != nil && strings.Contains(err.Error(), "cannot be used in shared workflows") { - t.Errorf("Field '%s' should be allowed in shared workflows, got error: %v", field, err) - } - }) - } -} diff --git a/pkg/parser/yaml_error.go b/pkg/parser/yaml_error.go index c5d3981998..b46ee4a838 100644 --- a/pkg/parser/yaml_error.go +++ b/pkg/parser/yaml_error.go @@ -102,153 +102,7 @@ func adjustLineNumbersInFormattedError(formatted string, offset int) string { // // NOTE: This function is kept for backward compatibility. New code should use FormatYAMLError() // which leverages yaml.FormatError() for better error messages with source context. -func ExtractYAMLError(err error, frontmatterLineOffset int) (line int, column int, message string) { - yamlErrorLog.Printf("Extracting YAML error information: offset=%d", frontmatterLineOffset) - errStr := err.Error() - - // First try to extract from goccy/go-yaml's [line:column] format - line, column, message = extractFromGoccyFormat(errStr, frontmatterLineOffset) - if line > 0 || column > 0 { - yamlErrorLog.Printf("Extracted error location from goccy format: line=%d, column=%d", line, column) - return line, column, message - } - - // Fallback to standard YAML error string parsing for other libraries - yamlErrorLog.Print("Falling back to string parsing for error location") - return extractFromStringParsing(errStr, frontmatterLineOffset) -} // extractFromGoccyFormat extracts line/column from goccy/go-yaml's [line:column] message format -func extractFromGoccyFormat(errStr string, frontmatterLineOffset int) (line int, column int, message string) { - // Look for goccy format like "[5:10] mapping value is not allowed in this context" - if strings.Contains(errStr, "[") && strings.Contains(errStr, "]") { - start := strings.Index(errStr, "[") - end := strings.Index(errStr, "]") - if start >= 0 && end > start { - locationPart := errStr[start+1 : end] - messagePart := strings.TrimSpace(errStr[end+1:]) - - // Parse line:column format - if strings.Contains(locationPart, ":") { - parts := strings.Split(locationPart, ":") - if len(parts) == 2 { - lineStr := strings.TrimSpace(parts[0]) - columnStr := strings.TrimSpace(parts[1]) - - // Parse line and column numbers - if _, parseErr := fmt.Sscanf(lineStr, "%d", &line); parseErr == nil { - if _, parseErr := fmt.Sscanf(columnStr, "%d", &column); parseErr == nil { - // Adjust line number to account for frontmatter position in file - if line > 0 { - line += frontmatterLineOffset - 1 // -1 because line numbers in YAML errors are 1-based relative to YAML content - } - - // Only return valid positions - avoid returning 1,1 when location is unknown - if line <= frontmatterLineOffset && column <= 1 { - return 0, 0, messagePart - } - - return line, column, messagePart - } - } - } - } - } - } - - return 0, 0, "" -} // extractFromStringParsing provides fallback string parsing for other YAML libraries -func extractFromStringParsing(errStr string, frontmatterLineOffset int) (line int, column int, message string) { - // Parse "yaml: line X: column Y: message" format (enhanced parsers that provide column info) - if strings.Contains(errStr, "yaml: line ") && strings.Contains(errStr, "column ") { - parts := strings.SplitN(errStr, "yaml: line ", 2) - if len(parts) > 1 { - lineInfo := parts[1] - - // Look for column information - colonIndex := strings.Index(lineInfo, ":") - if colonIndex > 0 { - lineStr := lineInfo[:colonIndex] - - // Parse line number - if _, parseErr := fmt.Sscanf(lineStr, "%d", &line); parseErr == nil { - // Look for column part - remaining := lineInfo[colonIndex+1:] - if strings.Contains(remaining, "column ") { - columnParts := strings.SplitN(remaining, "column ", 2) - if len(columnParts) > 1 { - columnInfo := columnParts[1] - colonIndex2 := strings.Index(columnInfo, ":") - if colonIndex2 > 0 { - columnStr := columnInfo[:colonIndex2] - message = strings.TrimSpace(columnInfo[colonIndex2+1:]) - - // Parse column number - if _, parseErr := fmt.Sscanf(columnStr, "%d", &column); parseErr == nil { - // Adjust line number to account for frontmatter position in file - line += frontmatterLineOffset - 1 // -1 because line numbers in YAML errors are 1-based relative to YAML content - return - } - } - } - } - } - } - } - } - - // Parse "yaml: line X: message" format (standard format without column info) - if strings.Contains(errStr, "yaml: line ") { - parts := strings.SplitN(errStr, "yaml: line ", 2) - if len(parts) > 1 { - lineInfo := parts[1] - colonIndex := strings.Index(lineInfo, ":") - if colonIndex > 0 { - lineStr := lineInfo[:colonIndex] - message = strings.TrimSpace(lineInfo[colonIndex+1:]) - - // Parse line number - if _, parseErr := fmt.Sscanf(lineStr, "%d", &line); parseErr == nil { - // Adjust line number to account for frontmatter position in file - line += frontmatterLineOffset - 1 // -1 because line numbers in YAML errors are 1-based relative to YAML content - // Don't default to column 1 when not provided - return 0 instead - column = 0 - return - } - } - } - } - - // Parse "yaml: unmarshal errors: line X: message" format (multiline errors) - if strings.Contains(errStr, "yaml: unmarshal errors:") && strings.Contains(errStr, "line ") { - lines := strings.SplitSeq(errStr, "\n") - for errorLine := range lines { - errorLine = strings.TrimSpace(errorLine) - if strings.Contains(errorLine, "line ") && strings.Contains(errorLine, ":") { - // Extract the first line number found in the error - parts := strings.SplitN(errorLine, "line ", 2) - if len(parts) > 1 { - colonIndex := strings.Index(parts[1], ":") - if colonIndex > 0 { - lineStr := parts[1][:colonIndex] - restOfMessage := strings.TrimSpace(parts[1][colonIndex+1:]) - - // Parse line number - if _, parseErr := fmt.Sscanf(lineStr, "%d", &line); parseErr == nil { - // Adjust line number to account for frontmatter position in file - line += frontmatterLineOffset - 1 // -1 because line numbers in YAML errors are 1-based relative to YAML content - column = 0 // Don't default to column 1 - message = restOfMessage - return - } - } - } - } - } - } - - // Fallback: return original error message with no location - return 0, 0, errStr -} diff --git a/pkg/parser/yaml_error_test.go b/pkg/parser/yaml_error_test.go index 59d69266bc..1847307bb0 100644 --- a/pkg/parser/yaml_error_test.go +++ b/pkg/parser/yaml_error_test.go @@ -3,7 +3,6 @@ package parser import ( - "errors" "fmt" "strings" "testing" @@ -11,277 +10,6 @@ import ( "github.com/goccy/go-yaml" ) -func TestExtractYAMLError(t *testing.T) { - tests := []struct { - name string - err error - frontmatterLineOffset int - expectedLine int - expectedColumn int - expectedMessage string - }{ - { - name: "yaml line error", - err: errors.New("yaml: line 7: mapping values are not allowed in this context"), - frontmatterLineOffset: 1, - expectedLine: 7, // 7 + 1 - 1 = 7 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "mapping values are not allowed in this context", - }, - { - name: "yaml line error with frontmatter offset", - err: errors.New("yaml: line 3: found character that cannot start any token"), - frontmatterLineOffset: 5, - expectedLine: 7, // 3 + 5 - 1 = 7 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "found character that cannot start any token", - }, - { - name: "non-yaml error", - err: errors.New("some other error"), - frontmatterLineOffset: 1, - expectedLine: 0, - expectedColumn: 0, - expectedMessage: "some other error", - }, - { - name: "yaml error with different message format", - err: errors.New("yaml: line 15: found unexpected end of stream"), - frontmatterLineOffset: 2, - expectedLine: 16, // 15 + 2 - 1 = 16 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "found unexpected end of stream", - }, - { - name: "yaml error with indentation issue", - err: errors.New("yaml: line 4: bad indentation of a mapping entry"), - frontmatterLineOffset: 1, - expectedLine: 4, // 4 + 1 - 1 = 4 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "bad indentation of a mapping entry", - }, - { - name: "yaml error with duplicate key", - err: errors.New("yaml: line 6: found duplicate key"), - frontmatterLineOffset: 3, - expectedLine: 8, // 6 + 3 - 1 = 8 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "found duplicate key", - }, - { - name: "yaml error with complex format", - err: errors.New("yaml: line 12: did not find expected ',' or ']'"), - frontmatterLineOffset: 0, - expectedLine: 11, // 12 + 0 - 1 = 11 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "did not find expected ',' or ']'", - }, - { - name: "yaml unmarshal error multiline", - err: errors.New("yaml: unmarshal errors:\n line 4: mapping key \"permissions\" already defined at line 2"), - frontmatterLineOffset: 1, - expectedLine: 4, // 4 + 1 - 1 = 4 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "mapping key \"permissions\" already defined at line 2", - }, - { - name: "yaml error with flow mapping", - err: errors.New("yaml: line 8: did not find expected ',' or '}'"), - frontmatterLineOffset: 1, - expectedLine: 8, // 8 + 1 - 1 = 8 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "did not find expected ',' or '}'", - }, - { - name: "yaml error with invalid character", - err: errors.New("yaml: line 5: found character that cannot start any token"), - frontmatterLineOffset: 0, - expectedLine: 4, // 5 + 0 - 1 = 4 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "found character that cannot start any token", - }, - { - name: "yaml error with unmarshal type issue", - err: errors.New("yaml: line 3: cannot unmarshal !!str `yes_please` into bool"), - frontmatterLineOffset: 2, - expectedLine: 4, // 3 + 2 - 1 = 4 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "cannot unmarshal !!str `yes_please` into bool", - }, - { - name: "yaml complex unmarshal error with nested line info", - err: errors.New("yaml: unmarshal errors:\n line 7: found unexpected end of stream\n line 9: mapping values are not allowed in this context"), - frontmatterLineOffset: 1, - expectedLine: 7, // First line 7 + 1 - 1 = 7 - expectedColumn: 0, // No column info provided in string format - expectedMessage: "found unexpected end of stream", - }, - { - name: "yaml error with column information greater than 1", - err: errors.New("yaml: line 5: column 12: invalid character at position"), - frontmatterLineOffset: 1, - expectedLine: 5, // 5 + 1 - 1 = 5 - expectedColumn: 12, - expectedMessage: "invalid character at position", - }, - { - name: "yaml error with high column number", - err: errors.New("yaml: line 3: column 45: unexpected token found"), - frontmatterLineOffset: 2, - expectedLine: 4, // 3 + 2 - 1 = 4 - expectedColumn: 45, - expectedMessage: "unexpected token found", - }, - { - name: "yaml error with column 1 explicitly specified", - err: errors.New("yaml: line 8: column 1: mapping values not allowed in this context"), - frontmatterLineOffset: 0, - expectedLine: 7, // 8 + 0 - 1 = 7 - expectedColumn: 1, - expectedMessage: "mapping values not allowed in this context", - }, - { - name: "yaml error with medium column position", - err: errors.New("yaml: line 2: column 23: found character that cannot start any token"), - frontmatterLineOffset: 3, - expectedLine: 4, // 2 + 3 - 1 = 4 - expectedColumn: 23, - expectedMessage: "found character that cannot start any token", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - line, column, message := ExtractYAMLError(tt.err, tt.frontmatterLineOffset) - - if line != tt.expectedLine { - t.Errorf("Expected line %d, got %d", tt.expectedLine, line) - } - if column != tt.expectedColumn { - t.Errorf("Expected column %d, got %d", tt.expectedColumn, column) - } - if message != tt.expectedMessage { - t.Errorf("Expected message '%s', got '%s'", tt.expectedMessage, message) - } - }) - } -} - -// TestExtractYAMLErrorWithGoccyErrors tests extraction from actual goccy/go-yaml errors -func TestExtractYAMLErrorWithGoccyErrors(t *testing.T) { - tests := []struct { - name string - yamlContent string - frontmatterLineOffset int - expectedMinLine int // Use min line since exact line may vary - expectedMinColumn int // Use min column since exact column may vary - expectValidLocation bool - }{ - { - name: "goccy invalid syntax", - yamlContent: "invalid: yaml: content", - frontmatterLineOffset: 1, - expectedMinLine: 1, // Should be >= frontmatterLineOffset - expectedMinColumn: 5, // Should have a valid column - expectValidLocation: true, - }, - { - name: "goccy indentation error", - yamlContent: "name: test\n invalid_indentation: here", - frontmatterLineOffset: 2, - expectedMinLine: 2, // Should be >= frontmatterLineOffset - expectedMinColumn: 1, // Should have a valid column - expectValidLocation: true, - }, - { - name: "goccy duplicate key", - yamlContent: "name: test\nname: duplicate", - frontmatterLineOffset: 0, - expectedMinLine: 0, // Should be >= frontmatterLineOffset (could be 0 for some cases) - expectedMinColumn: 1, // Should have a valid column - expectValidLocation: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Generate an actual goccy/go-yaml error - var result map[string]any - err := yaml.Unmarshal([]byte(tt.yamlContent), &result) - - if err == nil { - t.Errorf("Expected YAML parsing to fail for content: %q", tt.yamlContent) - return - } - - line, column, message := ExtractYAMLError(err, tt.frontmatterLineOffset) - - if tt.expectValidLocation { - if line < tt.expectedMinLine { - t.Errorf("Expected line >= %d, got %d", tt.expectedMinLine, line) - } - if column < tt.expectedMinColumn { - t.Errorf("Expected column >= %d, got %d", tt.expectedMinColumn, column) - } - if message == "" { - t.Errorf("Expected non-empty message") - } - } else { - if line != 0 || column != 0 { - t.Errorf("Expected no location (0,0) when location unknown, got (%d,%d)", line, column) - } - } - - t.Logf("YAML: %q -> Line: %d, Column: %d, Message: %s", tt.yamlContent, line, column, message) - }) - } -} - -// TestExtractYAMLErrorUnknownLocation tests that 0,0 is returned when location is unknown -func TestExtractYAMLErrorUnknownLocation(t *testing.T) { - tests := []struct { - name string - err error - frontmatterLineOffset int - expectedLine int - expectedColumn int - expectedMessage string - }{ - { - name: "non-yaml error without location", - err: errors.New("generic error without location info"), - frontmatterLineOffset: 1, - expectedLine: 0, - expectedColumn: 0, - expectedMessage: "generic error without location info", - }, - { - name: "malformed yaml error string", - err: errors.New("yaml: some error without line info"), - frontmatterLineOffset: 1, - expectedLine: 0, - expectedColumn: 0, - expectedMessage: "yaml: some error without line info", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - line, column, message := ExtractYAMLError(tt.err, tt.frontmatterLineOffset) - - if line != tt.expectedLine { - t.Errorf("Expected line %d, got %d", tt.expectedLine, line) - } - if column != tt.expectedColumn { - t.Errorf("Expected column %d, got %d", tt.expectedColumn, column) - } - if message != tt.expectedMessage { - t.Errorf("Expected message '%s', got '%s'", tt.expectedMessage, message) - } - }) - } -} - // TestFormatYAMLError tests the new FormatYAMLError function that uses yaml.FormatError() func TestFormatYAMLError(t *testing.T) { tests := []struct { diff --git a/pkg/repoutil/repoutil.go b/pkg/repoutil/repoutil.go index 8d793e9dfc..304743f626 100644 --- a/pkg/repoutil/repoutil.go +++ b/pkg/repoutil/repoutil.go @@ -23,35 +23,6 @@ func SplitRepoSlug(slug string) (owner, repo string, err error) { return parts[0], parts[1], nil } -// ParseGitHubURL extracts the owner and repo from a GitHub URL. -// Handles both SSH (git@github.com:owner/repo.git) and HTTPS (https://github.com/owner/repo.git) formats. -func ParseGitHubURL(url string) (owner, repo string, err error) { - log.Printf("Parsing GitHub URL: %s", url) - var repoPath string - - // SSH format: git@github.com:owner/repo.git - if after, ok := strings.CutPrefix(url, "git@github.com:"); ok { - repoPath = after - log.Printf("Detected SSH format, extracted path: %s", repoPath) - } else if strings.Contains(url, "github.com/") { - // HTTPS format: https://github.com/owner/repo.git - parts := strings.Split(url, "github.com/") - if len(parts) >= 2 { - repoPath = parts[1] - log.Printf("Detected HTTPS format, extracted path: %s", repoPath) - } - } else { - log.Printf("URL does not match known GitHub formats: %s", url) - return "", "", fmt.Errorf("URL does not appear to be a GitHub repository: %s", url) - } - - // Remove .git suffix if present - repoPath = strings.TrimSuffix(repoPath, ".git") - - // Split into owner/repo - return SplitRepoSlug(repoPath) -} - // SanitizeForFilename converts a repository slug (owner/repo) to a filename-safe string. // Replaces "/" with "-". Returns "clone-mode" if the slug is empty. func SanitizeForFilename(slug string) string { diff --git a/pkg/repoutil/repoutil_test.go b/pkg/repoutil/repoutil_test.go index 03619848c2..97b949d1f1 100644 --- a/pkg/repoutil/repoutil_test.go +++ b/pkg/repoutil/repoutil_test.go @@ -70,81 +70,6 @@ func TestSplitRepoSlug(t *testing.T) { } } -func TestParseGitHubURL(t *testing.T) { - tests := []struct { - name string - url string - expectedOwner string - expectedRepo string - expectError bool - }{ - { - name: "SSH format with .git", - url: "git@github.com:github/gh-aw.git", - expectedOwner: "github", - expectedRepo: "gh-aw", - expectError: false, - }, - { - name: "SSH format without .git", - url: "git@github.com:octocat/hello-world", - expectedOwner: "octocat", - expectedRepo: "hello-world", - expectError: false, - }, - { - name: "HTTPS format with .git", - url: "https://github.com/github/gh-aw.git", - expectedOwner: "github", - expectedRepo: "gh-aw", - expectError: false, - }, - { - name: "HTTPS format without .git", - url: "https://github.com/octocat/hello-world", - expectedOwner: "octocat", - expectedRepo: "hello-world", - expectError: false, - }, - { - name: "non-GitHub URL", - url: "https://gitlab.com/user/repo.git", - expectError: true, - }, - { - name: "invalid URL", - url: "not-a-url", - expectError: true, - }, - { - name: "empty URL", - url: "", - expectError: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - owner, repo, err := ParseGitHubURL(tt.url) - if tt.expectError { - if err == nil { - t.Errorf("ParseGitHubURL(%q) expected error, got nil", tt.url) - } - } else { - if err != nil { - t.Errorf("ParseGitHubURL(%q) unexpected error: %v", tt.url, err) - } - if owner != tt.expectedOwner { - t.Errorf("ParseGitHubURL(%q) owner = %q; want %q", tt.url, owner, tt.expectedOwner) - } - if repo != tt.expectedRepo { - t.Errorf("ParseGitHubURL(%q) repo = %q; want %q", tt.url, repo, tt.expectedRepo) - } - } - }) - } -} - func TestSanitizeForFilename(t *testing.T) { tests := []struct { name string @@ -190,13 +115,6 @@ func BenchmarkSplitRepoSlug(b *testing.B) { } } -func BenchmarkParseGitHubURL(b *testing.B) { - url := "https://github.com/github/gh-aw.git" - for b.Loop() { - _, _, _ = ParseGitHubURL(url) - } -} - func BenchmarkSanitizeForFilename(b *testing.B) { slug := "github/gh-aw" for b.Loop() { @@ -312,73 +230,6 @@ func TestSplitRepoSlug_SpecialCharacters(t *testing.T) { } } -func TestParseGitHubURL_Variants(t *testing.T) { - tests := []struct { - name string - url string - expectedOwner string - expectedRepo string - expectError bool - }{ - { - name: "SSH with port (invalid format)", - url: "git@github.com:22:owner/repo.git", - expectedOwner: "", - expectedRepo: "", - expectError: false, // Will parse but give unexpected results - }, - { - name: "HTTPS with www", - url: "https://www.github.com/owner/repo.git", - expectedOwner: "owner", - expectedRepo: "repo", - expectError: false, - }, - { - name: "HTTP instead of HTTPS", - url: "http://github.com/owner/repo.git", - expectedOwner: "owner", - expectedRepo: "repo", - expectError: false, - }, - { - name: "URL with trailing slash (will fail)", - url: "https://github.com/owner/repo/", - expectedOwner: "", - expectedRepo: "", - expectError: true, // Will fail due to extra slash - }, - { - name: "SSH without git extension", - url: "git@github.com:owner/repo", - expectedOwner: "owner", - expectedRepo: "repo", - expectError: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - owner, repo, err := ParseGitHubURL(tt.url) - if tt.expectError { - if err == nil { - t.Errorf("Expected error for URL %q", tt.url) - } - } else { - if err != nil && tt.expectedOwner != "" { - t.Errorf("Unexpected error for URL %q: %v", tt.url, err) - } - if err == nil && tt.expectedOwner != "" { - if owner != tt.expectedOwner || repo != tt.expectedRepo { - t.Errorf("ParseGitHubURL(%q) = (%q, %q); want (%q, %q)", - tt.url, owner, repo, tt.expectedOwner, tt.expectedRepo) - } - } - } - }) - } -} - func TestSanitizeForFilename_SpecialCases(t *testing.T) { tests := []struct { name string @@ -458,17 +309,3 @@ func BenchmarkSplitRepoSlug_Invalid(b *testing.B) { _, _, _ = SplitRepoSlug(slug) } } - -func BenchmarkParseGitHubURL_SSH(b *testing.B) { - url := "git@github.com:github/gh-aw.git" - for b.Loop() { - _, _, _ = ParseGitHubURL(url) - } -} - -func BenchmarkParseGitHubURL_HTTPS(b *testing.B) { - url := "https://github.com/github/gh-aw.git" - for b.Loop() { - _, _, _ = ParseGitHubURL(url) - } -} diff --git a/pkg/sliceutil/sliceutil.go b/pkg/sliceutil/sliceutil.go index 265e265a07..ea6166e23d 100644 --- a/pkg/sliceutil/sliceutil.go +++ b/pkg/sliceutil/sliceutil.go @@ -3,7 +3,6 @@ package sliceutil import ( "slices" - "strings" ) // Contains checks if a string slice contains a specific string. @@ -11,21 +10,6 @@ func Contains(slice []string, item string) bool { return slices.Contains(slice, item) } -// ContainsAny checks if a string contains any of the given substrings. -func ContainsAny(s string, substrings ...string) bool { - for _, sub := range substrings { - if strings.Contains(s, sub) { - return true - } - } - return false -} - -// ContainsIgnoreCase checks if a string contains a substring, ignoring case. -func ContainsIgnoreCase(s, substr string) bool { - return strings.Contains(strings.ToLower(s), strings.ToLower(substr)) -} - // Filter returns a new slice containing only elements that match the predicate. // This is a pure function that does not modify the input slice. func Filter[T any](slice []T, predicate func(T) bool) []T { diff --git a/pkg/sliceutil/sliceutil_test.go b/pkg/sliceutil/sliceutil_test.go index ce63b5e2c7..1b34c00cb1 100644 --- a/pkg/sliceutil/sliceutil_test.go +++ b/pkg/sliceutil/sliceutil_test.go @@ -62,138 +62,6 @@ func TestContains(t *testing.T) { } } -func TestContainsAny(t *testing.T) { - tests := []struct { - name string - s string - substrings []string - expected bool - }{ - { - name: "contains first substring", - s: "hello world", - substrings: []string{"hello", "goodbye"}, - expected: true, - }, - { - name: "contains second substring", - s: "hello world", - substrings: []string{"goodbye", "world"}, - expected: true, - }, - { - name: "contains no substrings", - s: "hello world", - substrings: []string{"goodbye", "farewell"}, - expected: false, - }, - { - name: "empty substrings", - s: "hello world", - substrings: []string{}, - expected: false, - }, - { - name: "empty string", - s: "", - substrings: []string{"hello"}, - expected: false, - }, - { - name: "contains empty substring", - s: "hello world", - substrings: []string{""}, - expected: true, - }, - { - name: "multiple matches", - s: "Docker images are being downloaded", - substrings: []string{"downloading", "retry"}, - expected: false, - }, - { - name: "match found", - s: "downloading images", - substrings: []string{"downloading", "retry"}, - expected: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ContainsAny(tt.s, tt.substrings...) - assert.Equal(t, tt.expected, result, - "ContainsAny should return correct value for string %q and substrings %v", tt.s, tt.substrings) - }) - } -} - -func TestContainsIgnoreCase(t *testing.T) { - tests := []struct { - name string - s string - substr string - expected bool - }{ - { - name: "exact match", - s: "Hello World", - substr: "Hello", - expected: true, - }, - { - name: "case insensitive match", - s: "Hello World", - substr: "hello", - expected: true, - }, - { - name: "case insensitive match uppercase", - s: "hello world", - substr: "WORLD", - expected: true, - }, - { - name: "no match", - s: "Hello World", - substr: "goodbye", - expected: false, - }, - { - name: "empty substring", - s: "Hello World", - substr: "", - expected: true, - }, - { - name: "empty string", - s: "", - substr: "hello", - expected: false, - }, - { - name: "both empty", - s: "", - substr: "", - expected: true, - }, - { - name: "mixed case substring in mixed case string", - s: "GitHub Actions Workflow", - substr: "actions", - expected: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ContainsIgnoreCase(tt.s, tt.substr) - assert.Equal(t, tt.expected, result, - "ContainsIgnoreCase should return correct value for string %q and substring %q", tt.s, tt.substr) - }) - } -} - func BenchmarkContains(b *testing.B) { slice := []string{"apple", "banana", "cherry", "date", "elderberry"} for b.Loop() { @@ -201,22 +69,6 @@ func BenchmarkContains(b *testing.B) { } } -func BenchmarkContainsAny(b *testing.B) { - s := "hello world from the testing framework" - substrings := []string{"goodbye", "world", "farewell"} - for b.Loop() { - ContainsAny(s, substrings...) - } -} - -func BenchmarkContainsIgnoreCase(b *testing.B) { - s := "Hello World From The Testing Framework" - substr := "world" - for b.Loop() { - ContainsIgnoreCase(s, substr) - } -} - // Additional edge case tests for better coverage func TestContains_LargeSlice(t *testing.T) { @@ -243,77 +95,6 @@ func TestContains_SingleElement(t *testing.T) { assert.False(t, Contains(slice, "other"), "should not find different item in single-element slice") } -func TestContainsAny_MultipleMatches(t *testing.T) { - s := "The quick brown fox jumps over the lazy dog" - - // Multiple substrings that match - assert.True(t, ContainsAny(s, "quick", "lazy"), "should find at least one matching substring") - - // First one matches - assert.True(t, ContainsAny(s, "quick", "missing", "absent"), "should find first matching substring") - - // Last one matches - assert.True(t, ContainsAny(s, "missing", "absent", "dog"), "should find last matching substring") -} - -func TestContainsAny_NilSubstrings(t *testing.T) { - s := "test string" - - // Nil substrings should return false - assert.False(t, ContainsAny(s, nil...), "should return false for nil substrings") -} - -func TestContainsIgnoreCase_Unicode(t *testing.T) { - tests := []struct { - name string - s string - substr string - expected bool - }{ - { - name: "unicode characters", - s: "Café España", - substr: "café", - expected: true, - }, - { - name: "unicode uppercase", - s: "café españa", - substr: "CAFÉ", - expected: true, - }, - { - name: "emoji in string", - s: "Hello 👋 World", - substr: "👋", - expected: true, - }, - { - name: "special characters", - s: "test@example.com", - substr: "EXAMPLE", - expected: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ContainsIgnoreCase(tt.s, tt.substr) - assert.Equal(t, tt.expected, result, - "ContainsIgnoreCase should return correct value for string %q and substring %q", tt.s, tt.substr) - }) - } -} - -func TestContainsIgnoreCase_PartialMatch(t *testing.T) { - s := "GitHub Actions Workflow" - - // Should find partial matches - assert.True(t, ContainsIgnoreCase(s, "hub"), "should find partial match 'hub' in 'GitHub'") - assert.True(t, ContainsIgnoreCase(s, "WORK"), "should find partial match 'WORK' in 'Workflow'") - assert.True(t, ContainsIgnoreCase(s, "actions workflow"), "should find multi-word partial match") -} - func TestContains_Duplicates(t *testing.T) { // Slice with duplicate values slice := []string{"apple", "banana", "apple", "cherry", "apple"} @@ -329,16 +110,3 @@ func TestContains_Duplicates(t *testing.T) { } assert.Equal(t, 3, count, "should count all occurrences of duplicate item") } - -func TestContainsAny_OrderMatters(t *testing.T) { - s := "test string with multiple words" - - // Test that function returns on first match (short-circuit behavior) - // Both should find a match, order shouldn't affect result - result1 := ContainsAny(s, "string", "words") - result2 := ContainsAny(s, "words", "string") - - assert.Equal(t, result1, result2, "should return same result regardless of substring order") - assert.True(t, result1, "should find matches in first ordering") - assert.True(t, result2, "should find matches in second ordering") -} diff --git a/pkg/stringutil/identifiers.go b/pkg/stringutil/identifiers.go index 51d730036f..8ae8607533 100644 --- a/pkg/stringutil/identifiers.go +++ b/pkg/stringutil/identifiers.go @@ -99,29 +99,3 @@ func LockFileToMarkdown(lockPath string) string { cleaned := filepath.Clean(lockPath) return strings.TrimSuffix(cleaned, ".lock.yml") + ".md" } - -// IsAgenticWorkflow returns true if the file path is an agentic workflow file. -// Agentic workflows end with .md. -// -// Examples: -// -// IsAgenticWorkflow("test.md") // returns true -// IsAgenticWorkflow("weekly-research.md") // returns true -// IsAgenticWorkflow(".github/workflows/workflow.md") // returns true -// IsAgenticWorkflow("test.lock.yml") // returns false -func IsAgenticWorkflow(path string) bool { - // Must end with .md - return strings.HasSuffix(path, ".md") -} - -// IsLockFile returns true if the file path is a compiled lock file. -// Lock files end with .lock.yml and are compiled from agentic workflows. -// -// Examples: -// -// IsLockFile("test.lock.yml") // returns true -// IsLockFile(".github/workflows/workflow.lock.yml") // returns true -// IsLockFile("test.md") // returns false -func IsLockFile(path string) bool { - return strings.HasSuffix(path, ".lock.yml") -} diff --git a/pkg/stringutil/identifiers_test.go b/pkg/stringutil/identifiers_test.go index e282901e55..9ed7839986 100644 --- a/pkg/stringutil/identifiers_test.go +++ b/pkg/stringutil/identifiers_test.go @@ -3,7 +3,6 @@ package stringutil import ( - "strings" "testing" ) @@ -283,109 +282,3 @@ func TestRoundTripConversions(t *testing.T) { } }) } - -func TestIsAgenticWorkflow(t *testing.T) { - tests := []struct { - name string - path string - expected bool - }{ - { - name: "regular workflow", - path: "test.md", - expected: true, - }, - { - name: "workflow with path", - path: ".github/workflows/weekly-research.md", - expected: true, - }, - { - name: "workflow with dots in name", - path: "my.workflow.test.md", - expected: true, - }, - { - name: "lock file", - path: "test.lock.yml", - expected: false, - }, - { - name: "no extension", - path: "test", - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := IsAgenticWorkflow(tt.path) - if result != tt.expected { - t.Errorf("IsAgenticWorkflow(%q) = %v, expected %v", tt.path, result, tt.expected) - } - }) - } -} - -func TestIsLockFile(t *testing.T) { - tests := []struct { - name string - path string - expected bool - }{ - { - name: "regular lock file", - path: "test.lock.yml", - expected: true, - }, - { - name: "lock file with path", - path: ".github/workflows/test.lock.yml", - expected: true, - }, - { - name: "workflow file", - path: "test.md", - expected: false, - }, - { - name: "yaml file", - path: "test.yml", - expected: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := IsLockFile(tt.path) - if result != tt.expected { - t.Errorf("IsLockFile(%q) = %v, expected %v", tt.path, result, tt.expected) - } - }) - } -} - -func TestFileTypeHelpers_Exclusivity(t *testing.T) { - // Test that file types are mutually exclusive (except lock files) - testPaths := []string{ - "test.md", - "test.lock.yml", - } - - for _, path := range testPaths { - t.Run(path, func(t *testing.T) { - isWorkflow := IsAgenticWorkflow(path) - isLock := IsLockFile(path) - - // All .md files should be workflows - if strings.HasSuffix(path, ".md") && !isWorkflow { - t.Errorf("Path %q should be a workflow but isn't", path) - } - - // All .lock.yml files should be lock files - if strings.HasSuffix(path, ".lock.yml") && !isLock { - t.Errorf("Path %q should be a lock file but isn't", path) - } - }) - } -} diff --git a/pkg/stringutil/pat_validation.go b/pkg/stringutil/pat_validation.go index a1c79bcabd..53c5e0e957 100644 --- a/pkg/stringutil/pat_validation.go +++ b/pkg/stringutil/pat_validation.go @@ -66,21 +66,6 @@ func ClassifyPAT(token string) PATType { return patType } -// IsFineGrainedPAT returns true if the token is a fine-grained personal access token -func IsFineGrainedPAT(token string) bool { - return strings.HasPrefix(token, "github_pat_") -} - -// IsClassicPAT returns true if the token is a classic personal access token -func IsClassicPAT(token string) bool { - return strings.HasPrefix(token, "ghp_") -} - -// IsOAuthToken returns true if the token is an OAuth token (not a PAT) -func IsOAuthToken(token string) bool { - return strings.HasPrefix(token, "gho_") -} - // ValidateCopilotPAT validates that a token is a valid fine-grained PAT for Copilot. // Returns an error if the token is not a fine-grained PAT with a descriptive error message. // diff --git a/pkg/stringutil/pat_validation_test.go b/pkg/stringutil/pat_validation_test.go index 2adaa03bea..4e49a7d2e8 100644 --- a/pkg/stringutil/pat_validation_test.go +++ b/pkg/stringutil/pat_validation_test.go @@ -74,27 +74,6 @@ func TestPATType_IsValid(t *testing.T) { assert.False(t, PATTypeUnknown.IsValid(), "unknown should not be valid") } -func TestIsFineGrainedPAT(t *testing.T) { - assert.True(t, IsFineGrainedPAT("github_pat_abc123"), "should identify fine-grained PAT") - assert.False(t, IsFineGrainedPAT("ghp_abc123"), "should not identify classic PAT as fine-grained") - assert.False(t, IsFineGrainedPAT("gho_abc123"), "should not identify OAuth token as fine-grained") - assert.False(t, IsFineGrainedPAT("random"), "should not identify unknown token as fine-grained") -} - -func TestIsClassicPAT(t *testing.T) { - assert.True(t, IsClassicPAT("ghp_abc123"), "should identify classic PAT") - assert.False(t, IsClassicPAT("github_pat_abc123"), "should not identify fine-grained PAT as classic") - assert.False(t, IsClassicPAT("gho_abc123"), "should not identify OAuth token as classic") - assert.False(t, IsClassicPAT("random"), "should not identify unknown token as classic") -} - -func TestIsOAuthToken(t *testing.T) { - assert.True(t, IsOAuthToken("gho_abc123"), "should identify OAuth token") - assert.False(t, IsOAuthToken("github_pat_abc123"), "should not identify fine-grained PAT as OAuth") - assert.False(t, IsOAuthToken("ghp_abc123"), "should not identify classic PAT as OAuth") - assert.False(t, IsOAuthToken("random"), "should not identify unknown token as OAuth") -} - func TestValidateCopilotPAT(t *testing.T) { tests := []struct { name string diff --git a/pkg/stringutil/stringutil.go b/pkg/stringutil/stringutil.go index 325c49e1d0..4dcfeb5228 100644 --- a/pkg/stringutil/stringutil.go +++ b/pkg/stringutil/stringutil.go @@ -81,27 +81,3 @@ func IsPositiveInteger(s string) bool { num, err := strconv.ParseInt(s, 10, 64) return err == nil && num > 0 } - -// StripANSIEscapeCodes removes ANSI escape sequences from a string. -// This prevents terminal color codes and other control sequences from -// being accidentally included in generated files (e.g., YAML workflows). -// -// Common ANSI escape sequences that are removed: -// - Color codes: \x1b[31m (red), \x1b[0m (reset) -// - Text formatting: \x1b[1m (bold), \x1b[4m (underline) -// - Cursor control: \x1b[2J (clear screen) -// -// Example: -// -// input := "Hello \x1b[31mWorld\x1b[0m" // "Hello [red]World[reset]" -// output := StripANSIEscapeCodes(input) // "Hello World" -// -// This function is particularly important for: -// - Workflow descriptions copied from terminal output -// - Comments in generated YAML files -// - Any text that should be plain ASCII -// -// Deprecated: Use StripANSI instead, which handles a broader range of terminal sequences. -func StripANSIEscapeCodes(s string) string { - return StripANSI(s) -} diff --git a/pkg/stringutil/stringutil_test.go b/pkg/stringutil/stringutil_test.go index 2247f3c126..ee577db98a 100644 --- a/pkg/stringutil/stringutil_test.go +++ b/pkg/stringutil/stringutil_test.go @@ -414,163 +414,6 @@ func TestParseVersionValue(t *testing.T) { } } -func TestStripANSIEscapeCodes(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "no ANSI codes", - input: "Hello World", - expected: "Hello World", - }, - { - name: "simple color reset", - input: "Hello World[m", - expected: "Hello World[m", // [m without ESC is not an ANSI code - }, - { - name: "ANSI color reset", - input: "Hello World\x1b[m", - expected: "Hello World", - }, - { - name: "ANSI color code with reset", - input: "Hello \x1b[31mWorld\x1b[0m", - expected: "Hello World", - }, - { - name: "ANSI bold text", - input: "\x1b[1mBold text\x1b[0m", - expected: "Bold text", - }, - { - name: "multiple ANSI codes", - input: "\x1b[1m\x1b[31mRed Bold\x1b[0m", - expected: "Red Bold", - }, - { - name: "ANSI with parameters", - input: "Text \x1b[1;32mgreen bold\x1b[0m more text", - expected: "Text green bold more text", - }, - { - name: "ANSI clear screen", - input: "\x1b[2JCleared", - expected: "Cleared", - }, - { - name: "empty string", - input: "", - expected: "", - }, - { - name: "only ANSI codes", - input: "\x1b[0m\x1b[31m\x1b[1m", - expected: "", - }, - { - name: "real-world example from issue", - input: "2. **REQUIRED**: Run 'make recompile' to update workflows (MUST be run after any constant changes)\x1b[m", - expected: "2. **REQUIRED**: Run 'make recompile' to update workflows (MUST be run after any constant changes)", - }, - { - name: "another real-world example", - input: "- **SAVE TO CACHE**: Store help outputs (main and all subcommands) and version check results in cache-memory\x1b[m", - expected: "- **SAVE TO CACHE**: Store help outputs (main and all subcommands) and version check results in cache-memory", - }, - { - name: "ANSI underline", - input: "\x1b[4mUnderlined\x1b[0m text", - expected: "Underlined text", - }, - { - name: "ANSI 256 color", - input: "\x1b[38;5;214mOrange\x1b[0m", - expected: "Orange", - }, - { - name: "mixed content with newlines", - input: "Line 1\x1b[31m\nLine 2\x1b[0m\nLine 3", - expected: "Line 1\nLine 2\nLine 3", - }, - { - name: "ANSI cursor movement", - input: "\x1b[2AMove up\x1b[3BMove down", - expected: "Move upMove down", - }, - { - name: "ANSI erase in line", - input: "Start\x1b[KEnd", - expected: "StartEnd", - }, - { - name: "consecutive ANSI codes", - input: "\x1b[1m\x1b[31m\x1b[4mRed Bold Underline\x1b[0m\x1b[0m\x1b[0m", - expected: "Red Bold Underline", - }, - { - name: "ANSI with large parameter", - input: "\x1b[38;5;255mWhite\x1b[0m", - expected: "White", - }, - { - name: "ANSI RGB color (24-bit)", - input: "\x1b[38;2;255;128;0mOrange RGB\x1b[0m", - expected: "Orange RGB", - }, - { - name: "ANSI codes in the middle of words", - input: "hel\x1b[31mlo\x1b[0m wor\x1b[32mld\x1b[0m", - expected: "hello world", - }, - { - name: "ANSI save/restore cursor", - input: "Text\x1b[s more text\x1b[u end", - expected: "Text more text end", - }, - { - name: "ANSI cursor position", - input: "\x1b[H\x1b[2JClear and home", - expected: "Clear and home", - }, - { - name: "long string with multiple ANSI codes", - input: "\x1b[1mThis\x1b[0m \x1b[31mis\x1b[0m \x1b[32ma\x1b[0m \x1b[33mvery\x1b[0m \x1b[34mlong\x1b[0m \x1b[35mstring\x1b[0m \x1b[36mwith\x1b[0m \x1b[37mmany\x1b[0m \x1b[1mANSI\x1b[0m \x1b[4mcodes\x1b[0m", - expected: "This is a very long string with many ANSI codes", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := StripANSIEscapeCodes(tt.input) - if result != tt.expected { - t.Errorf("StripANSIEscapeCodes(%q) = %q, expected %q", tt.input, result, tt.expected) - } - - // Verify no ANSI escape sequences remain - if result != "" && strings.Contains(result, "\x1b[") { - t.Errorf("Result still contains ANSI escape sequences: %q", result) - } - }) - } -} - -func BenchmarkStripANSIEscapeCodes_Clean(b *testing.B) { - s := "This is a clean string without any ANSI codes" - for b.Loop() { - StripANSIEscapeCodes(s) - } -} - -func BenchmarkStripANSIEscapeCodes_WithCodes(b *testing.B) { - s := "This \x1b[31mhas\x1b[0m some \x1b[1mANSI\x1b[0m codes" - for b.Loop() { - StripANSIEscapeCodes(s) - } -} - func TestIsPositiveInteger(t *testing.T) { tests := []struct { name string diff --git a/pkg/workflow/agentic_engine.go b/pkg/workflow/agentic_engine.go index af89f2f1f1..d0fd1ceaf2 100644 --- a/pkg/workflow/agentic_engine.go +++ b/pkg/workflow/agentic_engine.go @@ -39,8 +39,7 @@ type GitHubActionStep []string // ├── SupportsToolsAllowlist() // ├── SupportsMaxTurns() // ├── SupportsWebFetch() -// ├── SupportsWebSearch() -// └── SupportsFirewall() +// └── SupportsWebSearch() // // WorkflowExecutor (compilation - required) // ├── GetDeclaredOutputFiles() @@ -118,10 +117,6 @@ type CapabilityProvider interface { // SupportsWebSearch returns true if this engine has built-in support for the web-search tool SupportsWebSearch() bool - // SupportsFirewall returns true if this engine supports network firewalling/sandboxing - // When true, the engine can enforce network restrictions defined in the workflow - SupportsFirewall() bool - // SupportsPlugins returns true if this engine supports plugin installation // When true, plugins can be installed using the engine's plugin install command SupportsPlugins() bool @@ -230,7 +225,6 @@ type BaseEngine struct { supportsMaxContinuations bool supportsWebFetch bool supportsWebSearch bool - supportsFirewall bool supportsPlugins bool supportsLLMGateway bool } @@ -267,10 +261,6 @@ func (e *BaseEngine) SupportsWebSearch() bool { return e.supportsWebSearch } -func (e *BaseEngine) SupportsFirewall() bool { - return e.supportsFirewall -} - func (e *BaseEngine) SupportsPlugins() bool { return e.supportsPlugins } diff --git a/pkg/workflow/agentic_engine_interfaces_test.go b/pkg/workflow/agentic_engine_interfaces_test.go index 79259d48cc..1336ef4f5e 100644 --- a/pkg/workflow/agentic_engine_interfaces_test.go +++ b/pkg/workflow/agentic_engine_interfaces_test.go @@ -55,7 +55,6 @@ func TestInterfaceSegregation(t *testing.T) { _ = engine.SupportsMaxTurns() _ = engine.SupportsWebFetch() _ = engine.SupportsWebSearch() - _ = engine.SupportsFirewall() } }) @@ -217,7 +216,6 @@ func TestSpecificInterfaceUsage(t *testing.T) { "max_turns": cp.SupportsMaxTurns(), "web_fetch": cp.SupportsWebFetch(), "web_search": cp.SupportsWebSearch(), - "firewall": cp.SupportsFirewall(), } } @@ -225,7 +223,7 @@ func TestSpecificInterfaceUsage(t *testing.T) { for _, engine := range registry.GetAllEngines() { caps := checkCapabilities(engine) assert.NotNil(t, caps, "Engine %s should have capabilities", engine.GetID()) - assert.Len(t, caps, 5, "Should have 5 capability flags") + assert.Len(t, caps, 4, "Should have 4 capability flags") } }) @@ -264,7 +262,6 @@ func TestBaseEngineImplementsAllInterfaces(t *testing.T) { supportsMaxTurns: true, supportsWebFetch: true, supportsWebSearch: true, - supportsFirewall: true, } // Verify Engine interface methods @@ -278,7 +275,6 @@ func TestBaseEngineImplementsAllInterfaces(t *testing.T) { assert.True(t, base.SupportsMaxTurns()) assert.True(t, base.SupportsWebFetch()) assert.True(t, base.SupportsWebSearch()) - assert.True(t, base.SupportsFirewall()) // Verify default implementations assert.Empty(t, base.GetDeclaredOutputFiles()) @@ -308,7 +304,6 @@ func TestEngineCapabilityVariety(t *testing.T) { assert.False(t, copilot.SupportsMaxTurns()) assert.True(t, copilot.SupportsWebFetch()) assert.False(t, copilot.SupportsWebSearch()) - assert.True(t, copilot.SupportsFirewall()) assert.False(t, copilot.IsExperimental()) }) @@ -317,7 +312,6 @@ func TestEngineCapabilityVariety(t *testing.T) { assert.True(t, claude.SupportsMaxTurns()) assert.True(t, claude.SupportsWebFetch()) assert.True(t, claude.SupportsWebSearch()) - assert.True(t, claude.SupportsFirewall()) assert.False(t, claude.IsExperimental()) }) @@ -326,7 +320,6 @@ func TestEngineCapabilityVariety(t *testing.T) { assert.False(t, codex.SupportsMaxTurns()) assert.False(t, codex.SupportsWebFetch()) assert.True(t, codex.SupportsWebSearch()) - assert.True(t, codex.SupportsFirewall()) assert.False(t, codex.IsExperimental()) }) } diff --git a/pkg/workflow/artifact_manager.go b/pkg/workflow/artifact_manager.go index f3a9036816..966c9fb97a 100644 --- a/pkg/workflow/artifact_manager.go +++ b/pkg/workflow/artifact_manager.go @@ -1,11 +1,6 @@ package workflow import ( - "errors" - "fmt" - "path/filepath" - "strings" - "github.com/github/gh-aw/pkg/logger" ) @@ -98,388 +93,10 @@ func NewArtifactManager() *ArtifactManager { } } -// SetCurrentJob sets the job currently being processed -func (am *ArtifactManager) SetCurrentJob(jobName string) { - artifactManagerLog.Printf("Setting current job: %s", jobName) - am.currentJob = jobName -} - -// GetCurrentJob returns the current job name -func (am *ArtifactManager) GetCurrentJob() string { - return am.currentJob -} - -// RecordUpload records an artifact upload operation -func (am *ArtifactManager) RecordUpload(upload *ArtifactUpload) error { - if upload.Name == "" { - return errors.New("artifact upload must have a name") - } - if len(upload.Paths) == 0 { - return errors.New("artifact upload must have at least one path") - } - - // Set the job name if not already set - if upload.JobName == "" { - upload.JobName = am.currentJob - } - - // Compute normalized paths with common parent removed - upload.NormalizedPaths = computeNormalizedPaths(upload.Paths) - - artifactManagerLog.Printf("Recording upload: artifact=%s, job=%s, paths=%v, normalized=%v", - upload.Name, upload.JobName, upload.Paths, upload.NormalizedPaths) - - am.uploads[upload.JobName] = append(am.uploads[upload.JobName], upload) - return nil -} - -// RecordDownload records an artifact download operation -func (am *ArtifactManager) RecordDownload(download *ArtifactDownload) error { - if download.Name == "" && download.Pattern == "" { - return errors.New("artifact download must have either name or pattern") - } - if download.Path == "" { - return errors.New("artifact download must have a path") - } - - // Set the job name if not already set - if download.JobName == "" { - download.JobName = am.currentJob - } - - artifactManagerLog.Printf("Recording download: name=%s, pattern=%s, job=%s, path=%s", - download.Name, download.Pattern, download.JobName, download.Path) - - am.downloads[download.JobName] = append(am.downloads[download.JobName], download) - return nil -} - -// computeNormalizedPaths computes normalized paths with common parent directory removed. -// This simulates GitHub Actions behavior where files uploaded with paths like: -// // /tmp/gh-aw/aw-prompts/prompt.txt // /tmp/gh-aw/aw.patch -// -// are stored in the artifact as: -// // aw-prompts/prompt.txt // aw.patch -// -// (with common parent /tmp/gh-aw/ removed) -func computeNormalizedPaths(paths []string) map[string]string { - if len(paths) == 0 { - return nil - } - - // If only one path, normalize it relative to its parent - if len(paths) == 1 { - path := filepath.Clean(paths[0]) - // Get the base name (file/dir name without parent) - base := filepath.Base(path) - result := make(map[string]string) - result[path] = base - artifactManagerLog.Printf("Single path normalization: %s -> %s", path, base) - return result - } - - // Find common parent directory for multiple paths - commonParent := findCommonParent(paths) - artifactManagerLog.Printf("Common parent for %d paths: %s", len(paths), commonParent) - - // Create mapping of original path to normalized path - normalized := make(map[string]string) - for _, path := range paths { - cleanPath := filepath.Clean(path) - var relativePath string - - if commonParent != "" && commonParent != "." { - // Remove common parent - rel, err := filepath.Rel(commonParent, cleanPath) - if err != nil { - // If we can't compute relative path, use the base name - relativePath = filepath.Base(cleanPath) - } else { - relativePath = rel - } - } else { - // No common parent, use base name - relativePath = filepath.Base(cleanPath) - } - - normalized[cleanPath] = relativePath - artifactManagerLog.Printf("Path normalization: %s -> %s (parent: %s)", cleanPath, relativePath, commonParent) - } - - return normalized -} - -// findCommonParent finds the common parent directory of multiple paths -func findCommonParent(paths []string) string { - if len(paths) == 0 { - return "" - } - if len(paths) == 1 { - return filepath.Dir(filepath.Clean(paths[0])) - } - - // Clean all paths and split into components - splitPaths := make([][]string, len(paths)) - for i, p := range paths { - cleanPath := filepath.Clean(p) - // Split the full path (not just directory) - // Handle absolute paths starting with / - if strings.HasPrefix(cleanPath, string(filepath.Separator)) { - cleanPath = cleanPath[1:] // Remove leading separator for splitting - } - splitPaths[i] = strings.Split(cleanPath, string(filepath.Separator)) - } - - // Find the minimum length among all paths - minLen := len(splitPaths[0]) - for _, sp := range splitPaths[1:] { - if len(sp) < minLen { - minLen = len(sp) - } - } - - // Find common prefix by comparing each component - var commonParts []string - for i := range minLen - 1 { // minLen-1 to exclude the filename - part := splitPaths[0][i] - allMatch := true - for _, sp := range splitPaths[1:] { - if sp[i] != part { - allMatch = false - break - } - } - if allMatch { - commonParts = append(commonParts, part) - } else { - break - } - } - - if len(commonParts) == 0 { - return "" - } - - // Reconstruct the path with leading separator if original paths were absolute - result := filepath.Join(commonParts...) - if strings.HasPrefix(paths[0], string(filepath.Separator)) { - result = string(filepath.Separator) + result - } - - return result -} - -// ComputeDownloadPath computes the actual file path after download -// based on GitHub Actions v4 behavior. -// -// Rules: -// - Download by name: files extracted directly to path/ (e.g., path/file.txt) -// - Download by pattern without merge: files in path/artifact-name/ (e.g., path/artifact-1/file.txt) -// - Download by pattern with merge: files extracted directly to path/ (e.g., path/file.txt) -// - Common parent directories are stripped during upload (simulated via NormalizedPaths) -func (am *ArtifactManager) ComputeDownloadPath(download *ArtifactDownload, upload *ArtifactUpload, originalPath string) string { - // Get the normalized path (with common parent removed) from the upload - // This simulates how GitHub Actions strips common parent directories - cleanOriginal := filepath.Clean(originalPath) - normalizedPath := cleanOriginal - - // If upload has normalized paths, use them - if upload.NormalizedPaths != nil { - if normalized, ok := upload.NormalizedPaths[cleanOriginal]; ok { - normalizedPath = normalized - artifactManagerLog.Printf("Using normalized path from upload: %s -> %s", cleanOriginal, normalizedPath) - } - } else { - // Fallback: remove leading ./ - normalizedPath = strings.TrimPrefix(originalPath, "./") - } - - // If downloading by name (not pattern), files go directly to download path - if download.Name != "" && download.Pattern == "" { - result := filepath.Join(download.Path, normalizedPath) - artifactManagerLog.Printf("Download by name: %s -> %s", originalPath, result) - return result - } - - // If downloading by pattern with merge-multiple, files go directly to download path - if download.Pattern != "" && download.MergeMultiple { - result := filepath.Join(download.Path, normalizedPath) - artifactManagerLog.Printf("Download by pattern (merge): %s -> %s", originalPath, result) - return result - } - - // If downloading by pattern without merge, files go to path/artifact-name/ - if download.Pattern != "" && !download.MergeMultiple { - result := filepath.Join(download.Path, upload.Name, normalizedPath) - artifactManagerLog.Printf("Download by pattern (no merge): %s -> %s", originalPath, result) - return result - } - - // Default: direct to download path - result := filepath.Join(download.Path, normalizedPath) - artifactManagerLog.Printf("Download default: %s -> %s", originalPath, result) - return result -} - -// FindUploadedArtifact finds an uploaded artifact by name from jobs this job depends on -func (am *ArtifactManager) FindUploadedArtifact(artifactName string, dependsOn []string) *ArtifactUpload { - // Search in all dependent jobs - for _, jobName := range dependsOn { - uploads := am.uploads[jobName] - for _, upload := range uploads { - if upload.Name == artifactName { - artifactManagerLog.Printf("Found artifact %s uploaded by job %s", artifactName, jobName) - return upload - } - } - } - - // If not found in dependencies, search all jobs (for backwards compatibility) - // This handles cases where dependencies aren't explicitly tracked - for jobName, uploads := range am.uploads { - for _, upload := range uploads { - if upload.Name == artifactName { - artifactManagerLog.Printf("Found artifact %s uploaded by job %s (global search)", artifactName, jobName) - return upload - } - } - } - - artifactManagerLog.Printf("Artifact %s not found in any job", artifactName) - return nil -} - -// ValidateDownload validates that a download operation can find its artifact -func (am *ArtifactManager) ValidateDownload(download *ArtifactDownload) error { - if download.Name != "" { - // Download by name - must find exact artifact - upload := am.FindUploadedArtifact(download.Name, download.DependsOn) - if upload == nil { - return fmt.Errorf("artifact '%s' downloaded by job '%s' not found in any dependent job", - download.Name, download.JobName) - } - artifactManagerLog.Printf("Validated download: artifact=%s exists in job=%s", - download.Name, upload.JobName) - } - - if download.Pattern != "" { - // Download by pattern - must find at least one matching artifact - found := false - for _, jobName := range download.DependsOn { - uploads := am.uploads[jobName] - for _, upload := range uploads { - // Simple pattern matching for now (could be enhanced with glob) - if matchesPattern(upload.Name, download.Pattern) { - found = true - break - } - } - if found { - break - } - } - if !found { - // Try global search - for _, uploads := range am.uploads { - for _, upload := range uploads { - if matchesPattern(upload.Name, download.Pattern) { - found = true - break - } - } - if found { - break - } - } - } - if !found { - return fmt.Errorf("no artifacts matching pattern '%s' found for job '%s'", - download.Pattern, download.JobName) - } - artifactManagerLog.Printf("Validated download: pattern=%s matches at least one artifact", - download.Pattern) - } - - return nil -} - -// matchesPattern performs simple wildcard pattern matching -// Supports * as wildcard (e.g., "agent-*" matches "agent-artifacts") -func matchesPattern(name, pattern string) bool { - // If pattern has no wildcard, do exact match - if !strings.Contains(pattern, "*") { - return name == pattern - } - - // Handle leading wildcard: "*suffix" - if after, ok := strings.CutPrefix(pattern, "*"); ok { - suffix := after - return strings.HasSuffix(name, suffix) - } - - // Handle trailing wildcard: "prefix*" - if before, ok := strings.CutSuffix(pattern, "*"); ok { - prefix := before - return strings.HasPrefix(name, prefix) - } - - // Handle middle wildcard: "prefix*suffix" - parts := strings.Split(pattern, "*") - if len(parts) == 2 { - prefix, suffix := parts[0], parts[1] - // Check that name starts with prefix, ends with suffix, and has something in between - if strings.HasPrefix(name, prefix) && strings.HasSuffix(name, suffix) { - // Ensure there's content between prefix and suffix - // The middle part should be at least as long as the non-overlapping parts - minLength := len(prefix) + len(suffix) - return len(name) >= minLength - } - return false - } - - // For more complex patterns, just do exact match - return name == pattern -} - -// GetUploadsForJob returns all uploads for a specific job -func (am *ArtifactManager) GetUploadsForJob(jobName string) []*ArtifactUpload { - return am.uploads[jobName] -} - -// GetDownloadsForJob returns all downloads for a specific job -func (am *ArtifactManager) GetDownloadsForJob(jobName string) []*ArtifactDownload { - return am.downloads[jobName] -} - -// ValidateAllDownloads validates all download operations -func (am *ArtifactManager) ValidateAllDownloads() []error { - var errors []error - - for jobName, downloads := range am.downloads { - for _, download := range downloads { - if err := am.ValidateDownload(download); err != nil { - errors = append(errors, fmt.Errorf("job %s: %w", jobName, err)) - } - } - } - - if len(errors) > 0 { - artifactManagerLog.Printf("Validation found %d error(s)", len(errors)) - } else { - artifactManagerLog.Print("All downloads validated successfully") - } - - return errors -} - -// GetAllArtifacts returns all uploaded artifacts -func (am *ArtifactManager) GetAllArtifacts() map[string][]*ArtifactUpload { - return am.uploads -} // Reset clears all tracked uploads and downloads func (am *ArtifactManager) Reset() { diff --git a/pkg/workflow/artifact_manager_integration_test.go b/pkg/workflow/artifact_manager_integration_test.go deleted file mode 100644 index c17df1b6eb..0000000000 --- a/pkg/workflow/artifact_manager_integration_test.go +++ /dev/null @@ -1,280 +0,0 @@ -//go:build integration - -package workflow - -import ( - "os" - "path/filepath" - "testing" - - "github.com/github/gh-aw/pkg/stringutil" - - "github.com/github/gh-aw/pkg/testutil" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// TestArtifactManagerIntegrationWithCompiler tests that the artifact manager -// is properly integrated into the compiler and resets between compilations -func TestArtifactManagerIntegrationWithCompiler(t *testing.T) { - tmpDir := testutil.TempDir(t, "artifact-manager-integration-*") - - // Create a simple workflow - workflowContent := `--- -on: workflow_dispatch -permissions: - contents: read -engine: copilot ---- - -# Test Artifact Manager Integration - -This test verifies that the artifact manager is integrated into the compiler. -` - - workflowFile := filepath.Join(tmpDir, "test-workflow.md") - err := os.WriteFile(workflowFile, []byte(workflowContent), 0644) - require.NoError(t, err) - - // Create compiler - compiler := NewCompiler() - - // Verify artifact manager is initialized - artifactManager := compiler.GetArtifactManager() - require.NotNil(t, artifactManager, "Artifact manager should be initialized") - - // First compilation - err = compiler.CompileWorkflow(workflowFile) - require.NoError(t, err) - - // Artifact manager should be reset (empty) after first compilation - assert.Empty(t, artifactManager.GetAllArtifacts(), "Artifact manager should be reset between compilations") - - // Manually add some test data to artifact manager - artifactManager.SetCurrentJob("test-job") - err = artifactManager.RecordUpload(&ArtifactUpload{ - Name: "test-artifact", - Paths: []string{"/tmp/test.txt"}, - JobName: "test-job", - }) - require.NoError(t, err) - - // Second compilation should reset the artifact manager - err = compiler.CompileWorkflow(workflowFile) - require.NoError(t, err) - - // Verify artifact manager was reset - assert.Empty(t, artifactManager.GetAllArtifacts(), "Artifact manager should be reset after second compilation") -} - -// TestArtifactManagerAccessDuringCompilation demonstrates how the artifact -// manager can be accessed and used during workflow compilation -func TestArtifactManagerAccessDuringCompilation(t *testing.T) { - tmpDir := testutil.TempDir(t, "artifact-manager-access-*") - - workflowContent := `--- -on: workflow_dispatch -permissions: - contents: read -safe-outputs: - create-issue: - title-prefix: "[bot] " -engine: copilot ---- - -# Test Artifact Manager Access - -This workflow has safe outputs configured. -` - - workflowFile := filepath.Join(tmpDir, "test-workflow.md") - err := os.WriteFile(workflowFile, []byte(workflowContent), 0644) - require.NoError(t, err) - - compiler := NewCompiler() - - // Compile the workflow - err = compiler.CompileWorkflow(workflowFile) - require.NoError(t, err) - - // Access the artifact manager after compilation - artifactManager := compiler.GetArtifactManager() - require.NotNil(t, artifactManager) - - // The manager should be available but empty (since we didn't track anything yet) - // In future integration, the compiler would populate this during job generation - assert.NotNil(t, artifactManager, "Artifact manager should be accessible after compilation") -} - -// TestArtifactManagerWithMultipleWorkflows tests that the artifact manager -// properly resets between multiple workflow compilations -func TestArtifactManagerWithMultipleWorkflows(t *testing.T) { - tmpDir := testutil.TempDir(t, "artifact-manager-multi-*") - - // Create multiple workflow files - workflows := []struct { - name string - content string - }{ - { - name: "workflow1.md", - content: `--- -on: push -permissions: - contents: read -engine: copilot ---- - -# Workflow 1 -Test workflow 1. -`, - }, - { - name: "workflow2.md", - content: `--- -on: pull_request -permissions: - contents: read -engine: copilot ---- - -# Workflow 2 -Test workflow 2. -`, - }, - { - name: "workflow3.md", - content: `--- -on: workflow_dispatch -permissions: - contents: read -engine: copilot ---- - -# Workflow 3 -Test workflow 3. -`, - }, - } - - compiler := NewCompiler() - artifactManager := compiler.GetArtifactManager() - - for i, wf := range workflows { - workflowFile := filepath.Join(tmpDir, wf.name) - err := os.WriteFile(workflowFile, []byte(wf.content), 0644) - require.NoError(t, err) - - // Add some test data before compilation - artifactManager.SetCurrentJob("test-job") - err = artifactManager.RecordUpload(&ArtifactUpload{ - Name: "artifact-" + wf.name, - Paths: []string{"/tmp/file.txt"}, - JobName: "test-job", - }) - require.NoError(t, err) - - // Compile workflow - err = compiler.CompileWorkflow(workflowFile) - require.NoError(t, err, "Workflow %d should compile successfully", i+1) - - // Verify artifact manager was reset - assert.Empty(t, artifactManager.GetAllArtifacts(), - "Artifact manager should be reset after compiling workflow %d", i+1) - - // Verify lock file was created - lockFile := stringutil.MarkdownToLockFile(workflowFile) - _, err = os.Stat(lockFile) - assert.NoError(t, err, "Lock file should exist for workflow %d", i+1) - } -} - -// TestArtifactManagerLazyInitialization tests that the artifact manager -// is lazily initialized if not present -func TestArtifactManagerLazyInitialization(t *testing.T) { - tmpDir := testutil.TempDir(t, "artifact-manager-lazy-*") - - workflowContent := `--- -on: workflow_dispatch -permissions: - contents: read -engine: copilot ---- - -# Test Lazy Init - -Test lazy initialization. -` - - workflowFile := filepath.Join(tmpDir, "test-workflow.md") - err := os.WriteFile(workflowFile, []byte(workflowContent), 0644) - require.NoError(t, err) - - // Create compiler without initializing artifact manager - compiler := &Compiler{ - verbose: false, - version: "test", - skipValidation: true, - actionMode: ActionModeDev, - jobManager: NewJobManager(), - engineRegistry: GetGlobalEngineRegistry(), - stepOrderTracker: NewStepOrderTracker(), - // artifactManager intentionally not initialized - } - - // GetArtifactManager should lazy-initialize - artifactManager := compiler.GetArtifactManager() - assert.NotNil(t, artifactManager, "GetArtifactManager should lazy-initialize") - - // Second call should return same instance - artifactManager2 := compiler.GetArtifactManager() - assert.Same(t, artifactManager, artifactManager2, "Should return same instance") -} - -// TestArtifactManagerValidationExample demonstrates how validation could work -// This is a conceptual test showing how the artifact manager could validate -// artifact dependencies in a workflow -func TestArtifactManagerValidationExample(t *testing.T) { - // Create a compiler with artifact manager - compiler := NewCompiler() - artifactManager := compiler.GetArtifactManager() - - // Simulate job 1 uploading an artifact - artifactManager.SetCurrentJob("build") - err := artifactManager.RecordUpload(&ArtifactUpload{ - Name: "build-artifact", - Paths: []string{"/dist/app"}, - JobName: "build", - }) - require.NoError(t, err) - - // Simulate job 2 downloading the artifact - artifactManager.SetCurrentJob("test") - err = artifactManager.RecordDownload(&ArtifactDownload{ - Name: "build-artifact", - Path: "/tmp/build", - JobName: "test", - DependsOn: []string{"build"}, - }) - require.NoError(t, err) - - // Validate all downloads - errors := artifactManager.ValidateAllDownloads() - assert.Empty(t, errors, "All downloads should be valid") - - // Simulate a job trying to download a non-existent artifact - artifactManager.SetCurrentJob("deploy") - err = artifactManager.RecordDownload(&ArtifactDownload{ - Name: "nonexistent-artifact", - Path: "/tmp/deploy", - JobName: "deploy", - DependsOn: []string{"build"}, - }) - require.NoError(t, err) - - // Validation should catch the missing artifact - errors = artifactManager.ValidateAllDownloads() - assert.Len(t, errors, 1, "Should detect missing artifact") - assert.Contains(t, errors[0].Error(), "nonexistent-artifact") - assert.Contains(t, errors[0].Error(), "not found") -} diff --git a/pkg/workflow/artifact_manager_test.go b/pkg/workflow/artifact_manager_test.go deleted file mode 100644 index 9cff97b0d7..0000000000 --- a/pkg/workflow/artifact_manager_test.go +++ /dev/null @@ -1,866 +0,0 @@ -//go:build !integration - -package workflow - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewArtifactManager(t *testing.T) { - am := NewArtifactManager() - assert.NotNil(t, am) - assert.NotNil(t, am.uploads) - assert.NotNil(t, am.downloads) - assert.Empty(t, am.currentJob) -} - -func TestSetCurrentJob(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("test-job") - assert.Equal(t, "test-job", am.GetCurrentJob()) -} - -func TestRecordUpload(t *testing.T) { - tests := []struct { - name string - upload *ArtifactUpload - wantError bool - errorMsg string - }{ - { - name: "valid upload", - upload: &ArtifactUpload{ - Name: "test-artifact", - Paths: []string{"/tmp/test.txt"}, - JobName: "test-job", - }, - wantError: false, - }, - { - name: "upload without name", - upload: &ArtifactUpload{ - Paths: []string{"/tmp/test.txt"}, - JobName: "test-job", - }, - wantError: true, - errorMsg: "artifact upload must have a name", - }, - { - name: "upload without paths", - upload: &ArtifactUpload{ - Name: "test-artifact", - Paths: []string{}, - JobName: "test-job", - }, - wantError: true, - errorMsg: "artifact upload must have at least one path", - }, - { - name: "upload with multiple paths", - upload: &ArtifactUpload{ - Name: "multi-path-artifact", - Paths: []string{"/tmp/file1.txt", "/tmp/file2.txt"}, - JobName: "test-job", - }, - wantError: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - am := NewArtifactManager() - err := am.RecordUpload(tt.upload) - - if tt.wantError { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.errorMsg) - } else { - require.NoError(t, err) - uploads := am.GetUploadsForJob(tt.upload.JobName) - assert.Len(t, uploads, 1) - assert.Equal(t, tt.upload.Name, uploads[0].Name) - } - }) - } -} - -func TestRecordUploadUsesCurrentJob(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("current-job") - - upload := &ArtifactUpload{ - Name: "test-artifact", - Paths: []string{"/tmp/test.txt"}, - // JobName not set - should use current job - } - - err := am.RecordUpload(upload) - require.NoError(t, err) - assert.Equal(t, "current-job", upload.JobName) - - uploads := am.GetUploadsForJob("current-job") - assert.Len(t, uploads, 1) -} - -func TestRecordDownload(t *testing.T) { - tests := []struct { - name string - download *ArtifactDownload - wantError bool - errorMsg string - }{ - { - name: "valid download by name", - download: &ArtifactDownload{ - Name: "test-artifact", - Path: "/tmp/download", - JobName: "test-job", - }, - wantError: false, - }, - { - name: "valid download by pattern", - download: &ArtifactDownload{ - Pattern: "agent-*", - Path: "/tmp/download", - JobName: "test-job", - }, - wantError: false, - }, - { - name: "download without name or pattern", - download: &ArtifactDownload{ - Path: "/tmp/download", - JobName: "test-job", - }, - wantError: true, - errorMsg: "artifact download must have either name or pattern", - }, - { - name: "download without path", - download: &ArtifactDownload{ - Name: "test-artifact", - JobName: "test-job", - }, - wantError: true, - errorMsg: "artifact download must have a path", - }, - { - name: "download with merge-multiple", - download: &ArtifactDownload{ - Pattern: "build-*", - Path: "/tmp/builds", - MergeMultiple: true, - JobName: "test-job", - }, - wantError: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - am := NewArtifactManager() - err := am.RecordDownload(tt.download) - - if tt.wantError { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.errorMsg) - } else { - require.NoError(t, err) - downloads := am.GetDownloadsForJob(tt.download.JobName) - assert.Len(t, downloads, 1) - } - }) - } -} - -func TestRecordDownloadUsesCurrentJob(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("current-job") - - download := &ArtifactDownload{ - Name: "test-artifact", - Path: "/tmp/download", - // JobName not set - should use current job - } - - err := am.RecordDownload(download) - require.NoError(t, err) - assert.Equal(t, "current-job", download.JobName) - - downloads := am.GetDownloadsForJob("current-job") - assert.Len(t, downloads, 1) -} - -func TestComputeDownloadPath(t *testing.T) { - tests := []struct { - name string - download *ArtifactDownload - upload *ArtifactUpload - originalPath string - expectedPath string - }{ - { - name: "download by name - direct path", - download: &ArtifactDownload{ - Name: "agent-artifacts", - Path: "/tmp/download", - }, - upload: &ArtifactUpload{ - Name: "agent-artifacts", - }, - originalPath: "file.txt", - expectedPath: "/tmp/download/file.txt", - }, - { - name: "download by name - nested file", - download: &ArtifactDownload{ - Name: "agent-artifacts", - Path: "/tmp/download", - }, - upload: &ArtifactUpload{ - Name: "agent-artifacts", - }, - originalPath: "subdir/file.txt", - expectedPath: "/tmp/download/subdir/file.txt", - }, - { - name: "download by pattern with merge - direct path", - download: &ArtifactDownload{ - Pattern: "build-*", - Path: "/tmp/builds", - MergeMultiple: true, - }, - upload: &ArtifactUpload{ - Name: "build-linux", - }, - originalPath: "app.exe", - expectedPath: "/tmp/builds/app.exe", - }, - { - name: "download by pattern without merge - artifact subdirectory", - download: &ArtifactDownload{ - Pattern: "build-*", - Path: "/tmp/builds", - MergeMultiple: false, - }, - upload: &ArtifactUpload{ - Name: "build-linux", - }, - originalPath: "app.exe", - expectedPath: "/tmp/builds/build-linux/app.exe", - }, - { - name: "download by pattern without merge - nested file", - download: &ArtifactDownload{ - Pattern: "agent-*", - Path: "/tmp/agents", - MergeMultiple: false, - }, - upload: &ArtifactUpload{ - Name: "agent-output", - }, - originalPath: "logs/output.json", - expectedPath: "/tmp/agents/agent-output/logs/output.json", - }, - { - name: "download with leading ./ in original path", - download: &ArtifactDownload{ - Name: "test-artifact", - Path: "/tmp/test", - }, - upload: &ArtifactUpload{ - Name: "test-artifact", - }, - originalPath: "./data/file.txt", - expectedPath: "/tmp/test/data/file.txt", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - am := NewArtifactManager() - result := am.ComputeDownloadPath(tt.download, tt.upload, tt.originalPath) - assert.Equal(t, tt.expectedPath, result) - }) - } -} - -func TestFindUploadedArtifact(t *testing.T) { - am := NewArtifactManager() - - // Setup: create uploads in different jobs - am.SetCurrentJob("job1") - err := am.RecordUpload(&ArtifactUpload{ - Name: "artifact-1", - Paths: []string{"/tmp/file1.txt"}, - JobName: "job1", - }) - require.NoError(t, err) - - am.SetCurrentJob("job2") - err = am.RecordUpload(&ArtifactUpload{ - Name: "artifact-2", - Paths: []string{"/tmp/file2.txt"}, - JobName: "job2", - }) - require.NoError(t, err) - - tests := []struct { - name string - artifactName string - dependsOn []string - expectFound bool - expectedJob string - }{ - { - name: "find artifact in dependencies", - artifactName: "artifact-1", - dependsOn: []string{"job1"}, - expectFound: true, - expectedJob: "job1", - }, - { - name: "find artifact with multiple dependencies", - artifactName: "artifact-2", - dependsOn: []string{"job1", "job2"}, - expectFound: true, - expectedJob: "job2", - }, - { - name: "artifact not in dependencies but exists", - artifactName: "artifact-1", - dependsOn: []string{"job2"}, - expectFound: true, - expectedJob: "job1", - }, - { - name: "artifact does not exist", - artifactName: "nonexistent", - dependsOn: []string{"job1", "job2"}, - expectFound: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := am.FindUploadedArtifact(tt.artifactName, tt.dependsOn) - - if tt.expectFound { - assert.NotNil(t, result, "Expected to find artifact") - assert.Equal(t, tt.artifactName, result.Name) - assert.Equal(t, tt.expectedJob, result.JobName) - } else { - assert.Nil(t, result, "Expected not to find artifact") - } - }) - } -} - -func TestValidateDownload(t *testing.T) { - am := NewArtifactManager() - - // Setup: create uploads - am.SetCurrentJob("upload-job") - err := am.RecordUpload(&ArtifactUpload{ - Name: "test-artifact", - Paths: []string{"/tmp/file.txt"}, - JobName: "upload-job", - }) - require.NoError(t, err) - - err = am.RecordUpload(&ArtifactUpload{ - Name: "build-linux", - Paths: []string{"/tmp/linux.exe"}, - JobName: "upload-job", - }) - require.NoError(t, err) - - err = am.RecordUpload(&ArtifactUpload{ - Name: "build-windows", - Paths: []string{"/tmp/windows.exe"}, - JobName: "upload-job", - }) - require.NoError(t, err) - - tests := []struct { - name string - download *ArtifactDownload - wantError bool - errorMsg string - }{ - { - name: "valid download by name", - download: &ArtifactDownload{ - Name: "test-artifact", - Path: "/tmp/download", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - }, - wantError: false, - }, - { - name: "invalid download - artifact not found", - download: &ArtifactDownload{ - Name: "nonexistent-artifact", - Path: "/tmp/download", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - }, - wantError: true, - errorMsg: "not found in any dependent job", - }, - { - name: "valid download by pattern", - download: &ArtifactDownload{ - Pattern: "build-*", - Path: "/tmp/builds", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - }, - wantError: false, - }, - { - name: "invalid download - pattern matches nothing", - download: &ArtifactDownload{ - Pattern: "logs-*", - Path: "/tmp/tests", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - }, - wantError: true, - errorMsg: "no artifacts matching pattern", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := am.ValidateDownload(tt.download) - - if tt.wantError { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.errorMsg) - } else { - require.NoError(t, err) - } - }) - } -} - -func TestValidateAllDownloads(t *testing.T) { - am := NewArtifactManager() - - // Setup: create uploads - am.SetCurrentJob("upload-job") - err := am.RecordUpload(&ArtifactUpload{ - Name: "artifact-1", - Paths: []string{"/tmp/file1.txt"}, - JobName: "upload-job", - }) - require.NoError(t, err) - - // Setup: create downloads (some valid, some invalid) - am.SetCurrentJob("download-job") - err = am.RecordDownload(&ArtifactDownload{ - Name: "artifact-1", - Path: "/tmp/download1", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - }) - require.NoError(t, err) - - err = am.RecordDownload(&ArtifactDownload{ - Name: "nonexistent", - Path: "/tmp/download2", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - }) - require.NoError(t, err) - - // Validate all downloads - errors := am.ValidateAllDownloads() - - // Should have 1 error (nonexistent artifact) - assert.Len(t, errors, 1) - assert.Contains(t, errors[0].Error(), "nonexistent") - assert.Contains(t, errors[0].Error(), "not found") -} - -func TestMatchesPattern(t *testing.T) { - tests := []struct { - name string - pattern string - matches []string - noMatch []string - }{ - { - name: "exact match", - pattern: "artifact", - matches: []string{"artifact"}, - noMatch: []string{"artifact-1", "test-artifact", "other"}, - }, - { - name: "leading wildcard", - pattern: "*-artifact", - matches: []string{"test-artifact", "my-artifact"}, - noMatch: []string{"artifact", "artifact-test"}, - }, - { - name: "trailing wildcard", - pattern: "build-*", - matches: []string{"build-linux", "build-windows", "build-"}, - noMatch: []string{"build", "test-build-linux"}, - }, - { - name: "middle wildcard", - pattern: "build-*-x64", - matches: []string{"build-linux-x64", "build-windows-x64"}, - noMatch: []string{"build-x64", "build-linux-arm64"}, - }, - { - name: "wildcard matches all", - pattern: "*", - matches: []string{"anything", "test", "build-linux"}, - noMatch: []string{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - for _, name := range tt.matches { - assert.True(t, matchesPattern(name, tt.pattern), - "Expected %s to match pattern %s", name, tt.pattern) - } - for _, name := range tt.noMatch { - assert.False(t, matchesPattern(name, tt.pattern), - "Expected %s NOT to match pattern %s", name, tt.pattern) - } - }) - } -} - -func TestReset(t *testing.T) { - am := NewArtifactManager() - - // Add some data - am.SetCurrentJob("test-job") - err := am.RecordUpload(&ArtifactUpload{ - Name: "test-artifact", - Paths: []string{"/tmp/file.txt"}, - JobName: "test-job", - }) - require.NoError(t, err) - - err = am.RecordDownload(&ArtifactDownload{ - Name: "test-artifact", - Path: "/tmp/download", - JobName: "test-job", - }) - require.NoError(t, err) - - // Verify data exists - assert.Len(t, am.uploads, 1) - assert.Len(t, am.downloads, 1) - assert.Equal(t, "test-job", am.currentJob) - - // Reset - am.Reset() - - // Verify everything is cleared - assert.Empty(t, am.uploads) - assert.Empty(t, am.downloads) - assert.Empty(t, am.currentJob) -} - -func TestComplexWorkflowScenario(t *testing.T) { - am := NewArtifactManager() - - // Job 1: Upload agent artifacts - am.SetCurrentJob("agent") - err := am.RecordUpload(&ArtifactUpload{ - Name: "agent-artifacts", - Paths: []string{"/tmp/gh-aw/aw-prompts/prompt.txt", "/tmp/gh-aw/patch/aw.patch"}, - JobName: "agent", - }) - require.NoError(t, err) - - // Job 2: Download agent artifacts for safe outputs - am.SetCurrentJob("safe_outputs") - err = am.RecordDownload(&ArtifactDownload{ - Name: "agent-artifacts", - Path: "/tmp/gh-aw/", - JobName: "safe_outputs", - DependsOn: []string{"agent"}, - }) - require.NoError(t, err) - - // Validate downloads - errors := am.ValidateAllDownloads() - assert.Empty(t, errors, "Expected no validation errors") - - // Test path computation - download := am.GetDownloadsForJob("safe_outputs")[0] - upload := am.FindUploadedArtifact("agent-artifacts", []string{"agent"}) - require.NotNil(t, upload) - - // Files should be extracted directly to download path (v4 behavior) - promptPath := am.ComputeDownloadPath(download, upload, "aw-prompts/prompt.txt") - assert.Equal(t, "/tmp/gh-aw/aw-prompts/prompt.txt", promptPath) - - patchPath := am.ComputeDownloadPath(download, upload, "patch/aw.patch") - assert.Equal(t, "/tmp/gh-aw/patch/aw.patch", patchPath) -} - -func TestMultipleArtifactsPatternDownload(t *testing.T) { - am := NewArtifactManager() - - // Job 1: Upload multiple build artifacts - am.SetCurrentJob("build") - for _, platform := range []string{"linux", "windows", "macos"} { - err := am.RecordUpload(&ArtifactUpload{ - Name: "build-" + platform, - Paths: []string{"/build/" + platform + "/app"}, - JobName: "build", - }) - require.NoError(t, err) - } - - // Job 2: Download all build artifacts with pattern (no merge) - am.SetCurrentJob("test") - err := am.RecordDownload(&ArtifactDownload{ - Pattern: "build-*", - Path: "/tmp/artifacts", - MergeMultiple: false, - JobName: "test", - DependsOn: []string{"build"}, - }) - require.NoError(t, err) - - // Validate - errors := am.ValidateAllDownloads() - assert.Empty(t, errors) - - // Test path computation for each artifact - download := am.GetDownloadsForJob("test")[0] - - linuxUpload := am.FindUploadedArtifact("build-linux", []string{"build"}) - require.NotNil(t, linuxUpload) - linuxPath := am.ComputeDownloadPath(download, linuxUpload, "linux/app") - assert.Equal(t, "/tmp/artifacts/build-linux/linux/app", linuxPath) - - windowsUpload := am.FindUploadedArtifact("build-windows", []string{"build"}) - require.NotNil(t, windowsUpload) - windowsPath := am.ComputeDownloadPath(download, windowsUpload, "windows/app") - assert.Equal(t, "/tmp/artifacts/build-windows/windows/app", windowsPath) -} - -func TestPatternDownloadWithMerge(t *testing.T) { - am := NewArtifactManager() - - // Upload multiple artifacts - am.SetCurrentJob("job1") - err := am.RecordUpload(&ArtifactUpload{ - Name: "logs-part1", - Paths: []string{"/logs/part1.txt"}, - JobName: "job1", - }) - require.NoError(t, err) - - err = am.RecordUpload(&ArtifactUpload{ - Name: "logs-part2", - Paths: []string{"/logs/part2.txt"}, - JobName: "job1", - }) - require.NoError(t, err) - - // Download with merge - am.SetCurrentJob("job2") - err = am.RecordDownload(&ArtifactDownload{ - Pattern: "logs-*", - Path: "/tmp/all-logs", - MergeMultiple: true, - JobName: "job2", - DependsOn: []string{"job1"}, - }) - require.NoError(t, err) - - // Validate - errors := am.ValidateAllDownloads() - assert.Empty(t, errors) - - // With merge, files go directly to path (no artifact subdirectories) - download := am.GetDownloadsForJob("job2")[0] - - part1Upload := am.FindUploadedArtifact("logs-part1", []string{"job1"}) - require.NotNil(t, part1Upload) - part1Path := am.ComputeDownloadPath(download, part1Upload, "part1.txt") - assert.Equal(t, "/tmp/all-logs/part1.txt", part1Path) - - part2Upload := am.FindUploadedArtifact("logs-part2", []string{"job1"}) - require.NotNil(t, part2Upload) - part2Path := am.ComputeDownloadPath(download, part2Upload, "part2.txt") - assert.Equal(t, "/tmp/all-logs/part2.txt", part2Path) -} - -// TestCommonParentStripping tests that common parent directories are stripped -// when multiple files are uploaded, simulating GitHub Actions behavior -func TestCommonParentStripping(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("upload-job") - - // Upload files with common parent /tmp/gh-aw/ - err := am.RecordUpload(&ArtifactUpload{ - Name: "test-artifact", - Paths: []string{ - "/tmp/gh-aw/aw-prompts/prompt.txt", - "/tmp/gh-aw/aw.patch", - }, - JobName: "upload-job", - }) - require.NoError(t, err) - - uploads := am.GetUploadsForJob("upload-job") - require.Len(t, uploads, 1) - upload := uploads[0] - - // Verify normalized paths have common parent stripped - assert.NotNil(t, upload.NormalizedPaths) - assert.Equal(t, "aw-prompts/prompt.txt", upload.NormalizedPaths["/tmp/gh-aw/aw-prompts/prompt.txt"]) - assert.Equal(t, "aw.patch", upload.NormalizedPaths["/tmp/gh-aw/aw.patch"]) - - // Verify download paths use normalized paths - am.SetCurrentJob("download-job") - download := &ArtifactDownload{ - Name: "test-artifact", - Path: "/workspace", - JobName: "download-job", - DependsOn: []string{"upload-job"}, - } - - // Download should use the normalized paths (with common parent stripped) - promptPath := am.ComputeDownloadPath(download, upload, "/tmp/gh-aw/aw-prompts/prompt.txt") - assert.Equal(t, "/workspace/aw-prompts/prompt.txt", promptPath) - - patchPath := am.ComputeDownloadPath(download, upload, "/tmp/gh-aw/aw.patch") - assert.Equal(t, "/workspace/aw.patch", patchPath) -} - -// TestCommonParentStrippingNestedPaths tests common parent stripping with nested paths -func TestCommonParentStrippingNestedPaths(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("build") - - // Upload files with deeper nesting - err := am.RecordUpload(&ArtifactUpload{ - Name: "build-outputs", - Paths: []string{ - "/home/runner/work/project/dist/app.js", - "/home/runner/work/project/dist/styles.css", - "/home/runner/work/project/dist/assets/logo.png", - }, - JobName: "build", - }) - require.NoError(t, err) - - upload := am.GetUploadsForJob("build")[0] - - // Common parent should be /home/runner/work/project/dist - assert.NotNil(t, upload.NormalizedPaths) - assert.Equal(t, "app.js", upload.NormalizedPaths["/home/runner/work/project/dist/app.js"]) - assert.Equal(t, "styles.css", upload.NormalizedPaths["/home/runner/work/project/dist/styles.css"]) - assert.Equal(t, "assets/logo.png", upload.NormalizedPaths["/home/runner/work/project/dist/assets/logo.png"]) -} - -// TestCommonParentStrippingSingleFile tests that single file uploads work correctly -func TestCommonParentStrippingSingleFile(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("job1") - - // Upload single file - err := am.RecordUpload(&ArtifactUpload{ - Name: "single-file", - Paths: []string{"/tmp/gh-aw/report.pdf"}, - JobName: "job1", - }) - require.NoError(t, err) - - upload := am.GetUploadsForJob("job1")[0] - - // Single file should be normalized to just its base name - assert.NotNil(t, upload.NormalizedPaths) - assert.Equal(t, "report.pdf", upload.NormalizedPaths["/tmp/gh-aw/report.pdf"]) - - // Download should use the normalized path - download := &ArtifactDownload{ - Name: "single-file", - Path: "/downloads", - JobName: "job2", - DependsOn: []string{"job1"}, - } - - path := am.ComputeDownloadPath(download, upload, "/tmp/gh-aw/report.pdf") - assert.Equal(t, "/downloads/report.pdf", path) -} - -// TestCommonParentStrippingNoCommonParent tests files with no common parent -func TestCommonParentStrippingNoCommonParent(t *testing.T) { - am := NewArtifactManager() - am.SetCurrentJob("job1") - - // Upload files from completely different paths - err := am.RecordUpload(&ArtifactUpload{ - Name: "mixed-files", - Paths: []string{ - "/tmp/file1.txt", - "/var/file2.txt", - }, - JobName: "job1", - }) - require.NoError(t, err) - - upload := am.GetUploadsForJob("job1")[0] - - // No common parent (beyond root), should use base names - assert.NotNil(t, upload.NormalizedPaths) - assert.Equal(t, "file1.txt", upload.NormalizedPaths["/tmp/file1.txt"]) - assert.Equal(t, "file2.txt", upload.NormalizedPaths["/var/file2.txt"]) -} - -// TestCommonParentWithPatternDownload tests common parent stripping with pattern downloads -func TestCommonParentWithPatternDownload(t *testing.T) { - am := NewArtifactManager() - - // Job 1: Upload with common parent - am.SetCurrentJob("build") - err := am.RecordUpload(&ArtifactUpload{ - Name: "build-linux", - Paths: []string{ - "/build/output/linux/app", - "/build/output/linux/lib.so", - }, - JobName: "build", - }) - require.NoError(t, err) - - // Job 2: Download with pattern - am.SetCurrentJob("deploy") - download := &ArtifactDownload{ - Pattern: "build-*", - Path: "/deploy", - MergeMultiple: false, - JobName: "deploy", - DependsOn: []string{"build"}, - } - - upload := am.GetUploadsForJob("build")[0] - - // With pattern download (no merge), files go to path/artifact-name/normalized-path - appPath := am.ComputeDownloadPath(download, upload, "/build/output/linux/app") - assert.Equal(t, "/deploy/build-linux/app", appPath) - - libPath := am.ComputeDownloadPath(download, upload, "/build/output/linux/lib.so") - assert.Equal(t, "/deploy/build-linux/lib.so", libPath) -} diff --git a/pkg/workflow/artifact_manager_workflows_integration_test.go b/pkg/workflow/artifact_manager_workflows_integration_test.go deleted file mode 100644 index 1d2964cd9b..0000000000 --- a/pkg/workflow/artifact_manager_workflows_integration_test.go +++ /dev/null @@ -1,566 +0,0 @@ -//go:build integration - -package workflow - -import ( - "fmt" - "os" - "path/filepath" - "sort" - "strings" - "testing" - - "github.com/github/gh-aw/pkg/stringutil" - - "github.com/goccy/go-yaml" - "github.com/stretchr/testify/require" -) - -// JobArtifacts holds upload and download information for a job -type JobArtifacts struct { - Uploads []*ArtifactUpload - Downloads []*ArtifactDownload -} - -// TestGenerateArtifactsReference compiles all agentic workflows and generates -// a reference document mapping artifacts to file paths per job. -// This document is meant to be used by agents to generate file paths in JavaScript and Go. -func TestGenerateArtifactsReference(t *testing.T) { - // Find all workflow markdown files - workflowsDir := filepath.Join("..", "..", ".github", "workflows") - entries, err := os.ReadDir(workflowsDir) - require.NoError(t, err, "Failed to read workflows directory") - - // Collect workflow files (exclude campaign files and lock files) - var workflowFiles []string - for _, entry := range entries { - if entry.IsDir() { - continue - } - name := entry.Name() - if strings.HasSuffix(name, ".md") && - !strings.HasSuffix(name, ".lock.yml") && - !strings.Contains(name, ".campaign.") { - workflowFiles = append(workflowFiles, filepath.Join(workflowsDir, name)) - } - } - - t.Logf("Found %d workflow files to process", len(workflowFiles)) - - // Map to store artifacts per workflow - workflowArtifacts := make(map[string]map[string]*JobArtifacts) // workflow -> job -> artifacts - - // Compile each workflow and extract artifact information - // Use dry-run mode (noEmit) so we don't write lock.yml files - compiler := NewCompiler() - compiler.SetNoEmit(true) // Enable dry-run mode - validate without generating lock files - successCount := 0 - - for _, workflowPath := range workflowFiles { - workflowName := filepath.Base(workflowPath) - - // Parse the workflow - workflowData, err := compiler.ParseWorkflowFile(workflowPath) - if err != nil { - t.Logf("Warning: Failed to parse %s: %v", workflowName, err) - continue - } - - // Try to compile the workflow - err = compiler.CompileWorkflowData(workflowData, workflowPath) - if err != nil { - // Some workflows may fail compilation for various reasons (missing permissions, etc.) - // We'll skip these for the artifact analysis - t.Logf("Warning: Failed to compile %s: %v", workflowName, err) - continue - } - - // Read the compiled lock file to extract artifact information - lockPath := stringutil.MarkdownToLockFile(workflowPath) - lockContent, err := os.ReadFile(lockPath) - if err != nil { - t.Logf("Warning: Failed to read lock file for %s: %v", workflowName, err) - continue - } - - // Parse the lock file to extract artifact steps - jobs := extractArtifactsFromYAML(string(lockContent), workflowName, t) - - if len(jobs) > 0 { - workflowArtifacts[workflowName] = jobs - successCount++ - } - } - - t.Logf("Successfully analyzed %d workflows with artifacts", successCount) - - // Build a global summary of artifacts by job name - artifactsByJob := buildArtifactsSummary(workflowArtifacts) - - // Generate the markdown reference document - markdown := generateArtifactsMarkdown(workflowArtifacts, artifactsByJob) - - // Write to scratchpad/artifacts.md - specsDir := filepath.Join("..", "..", "specs") - err = os.MkdirAll(specsDir, 0755) - require.NoError(t, err, "Failed to create specs directory") - - artifactsPath := filepath.Join(specsDir, "artifacts.md") - err = os.WriteFile(artifactsPath, []byte(markdown), 0644) - require.NoError(t, err, "Failed to write artifacts.md") - - t.Logf("Generated artifacts reference at %s", artifactsPath) -} - -// extractArtifactsFromYAML parses compiled YAML and extracts artifact upload/download information -func extractArtifactsFromYAML(yamlContent string, workflowName string, t *testing.T) map[string]*JobArtifacts { - // Parse YAML - var workflow map[string]interface{} - err := yaml.Unmarshal([]byte(yamlContent), &workflow) - if err != nil { - t.Logf("Warning: Failed to parse YAML for %s: %v", workflowName, err) - return nil - } - - // Get jobs - jobsRaw, ok := workflow["jobs"].(map[string]interface{}) - if !ok { - return nil - } - - result := make(map[string]*JobArtifacts) - - for jobName, jobDataRaw := range jobsRaw { - jobData, ok := jobDataRaw.(map[string]interface{}) - if !ok { - continue - } - - steps, ok := jobData["steps"].([]interface{}) - if !ok { - continue - } - - jobArtifacts := &JobArtifacts{} - hasArtifacts := false - - for _, stepRaw := range steps { - step, ok := stepRaw.(map[string]interface{}) - if !ok { - continue - } - - uses, ok := step["uses"].(string) - if !ok { - continue - } - - // Check for upload-artifact - if strings.Contains(uses, "actions/upload-artifact@") { - upload := &ArtifactUpload{ - JobName: jobName, - } - - // Extract 'with' parameters - withParams, ok := step["with"].(map[string]interface{}) - if ok { - if name, ok := withParams["name"].(string); ok { - upload.Name = name - } - // Handle path which could be a string or multiline string - if pathStr, ok := withParams["path"].(string); ok { - // Split by newlines and trim whitespace - lines := strings.Split(pathStr, "\n") - for _, line := range lines { - line = strings.TrimSpace(line) - if line != "" { - upload.Paths = append(upload.Paths, line) - } - } - } - } - - if upload.Name != "" { - jobArtifacts.Uploads = append(jobArtifacts.Uploads, upload) - hasArtifacts = true - } - } - - // Check for download-artifact - if strings.Contains(uses, "actions/download-artifact@") { - download := &ArtifactDownload{ - JobName: jobName, - } - - // Extract 'with' parameters - withParams, ok := step["with"].(map[string]interface{}) - if ok { - if name, ok := withParams["name"].(string); ok { - download.Name = name - } - if pattern, ok := withParams["pattern"].(string); ok { - download.Pattern = pattern - } - if pathStr, ok := withParams["path"].(string); ok { - download.Path = pathStr - } - if merge, ok := withParams["merge-multiple"].(bool); ok { - download.MergeMultiple = merge - } - } - - // Try to infer dependencies from job needs - if needs, ok := jobData["needs"].([]interface{}); ok { - for _, need := range needs { - if needStr, ok := need.(string); ok { - download.DependsOn = append(download.DependsOn, needStr) - } - } - } else if needStr, ok := jobData["needs"].(string); ok { - download.DependsOn = []string{needStr} - } - - if download.Name != "" || download.Pattern != "" { - jobArtifacts.Downloads = append(jobArtifacts.Downloads, download) - hasArtifacts = true - } - } - } - - if hasArtifacts { - result[jobName] = jobArtifacts - } - } - - return result -} - -// ArtifactSummary holds merged artifact information for a job across all workflows -type ArtifactSummary struct { - JobName string - Uploads map[string]*ArtifactUploadInfo // artifact name -> upload info - Downloads map[string]*ArtifactDownloadInfo // artifact name/pattern -> download info -} - -// ArtifactUploadInfo holds merged upload information -type ArtifactUploadInfo struct { - ArtifactName string - Paths map[string]bool // unique paths across all workflows - Workflows []string // workflows that upload this artifact -} - -// ArtifactDownloadInfo holds merged download information -type ArtifactDownloadInfo struct { - Identifier string // artifact name or pattern - DownloadPaths map[string]bool // unique download paths - Workflows []string // workflows that download this - MergeMultiple bool -} - -// buildArtifactsSummary creates a summary of artifacts by job name, merging duplicates -func buildArtifactsSummary(workflowArtifacts map[string]map[string]*JobArtifacts) map[string]*ArtifactSummary { - summary := make(map[string]*ArtifactSummary) - - for workflowName, jobs := range workflowArtifacts { - for jobName, artifacts := range jobs { - // Get or create job summary - if summary[jobName] == nil { - summary[jobName] = &ArtifactSummary{ - JobName: jobName, - Uploads: make(map[string]*ArtifactUploadInfo), - Downloads: make(map[string]*ArtifactDownloadInfo), - } - } - jobSummary := summary[jobName] - - // Merge uploads - for _, upload := range artifacts.Uploads { - if upload.Name == "" { - continue - } - - if jobSummary.Uploads[upload.Name] == nil { - jobSummary.Uploads[upload.Name] = &ArtifactUploadInfo{ - ArtifactName: upload.Name, - Paths: make(map[string]bool), - Workflows: []string{}, - } - } - uploadInfo := jobSummary.Uploads[upload.Name] - - // Add paths - for _, path := range upload.Paths { - uploadInfo.Paths[path] = true - } - - // Add workflow if not already present - if !artifactContainsWorkflow(uploadInfo.Workflows, workflowName) { - uploadInfo.Workflows = append(uploadInfo.Workflows, workflowName) - } - } - - // Merge downloads - for _, download := range artifacts.Downloads { - identifier := download.Name - if identifier == "" { - identifier = download.Pattern - } - if identifier == "" { - continue - } - - if jobSummary.Downloads[identifier] == nil { - jobSummary.Downloads[identifier] = &ArtifactDownloadInfo{ - Identifier: identifier, - DownloadPaths: make(map[string]bool), - Workflows: []string{}, - MergeMultiple: download.MergeMultiple, - } - } - downloadInfo := jobSummary.Downloads[identifier] - - // Add download path - if download.Path != "" { - downloadInfo.DownloadPaths[download.Path] = true - } - - // Add workflow if not already present - if !artifactContainsWorkflow(downloadInfo.Workflows, workflowName) { - downloadInfo.Workflows = append(downloadInfo.Workflows, workflowName) - } - } - } - } - - return summary -} - -// artifactContainsWorkflow checks if a string slice contains a value -func artifactContainsWorkflow(slice []string, value string) bool { - for _, item := range slice { - if item == value { - return true - } - } - return false -} - -// generateArtifactsMarkdown generates a markdown document with artifact information -func generateArtifactsMarkdown(workflowArtifacts map[string]map[string]*JobArtifacts, artifactsByJob map[string]*ArtifactSummary) string { - var sb strings.Builder - - sb.WriteString("\n\n") - sb.WriteString("# Artifact File Locations Reference\n\n") - sb.WriteString("This document provides a reference for artifact file locations across all agentic workflows.\n") - sb.WriteString("It is generated automatically and meant to be used by agents when generating file paths in JavaScript and Go code.\n\n") - sb.WriteString("## Overview\n\n") - sb.WriteString("When artifacts are uploaded, GitHub Actions strips the common parent directory from file paths.\n") - sb.WriteString("When artifacts are downloaded, files are extracted based on the download mode:\n\n") - sb.WriteString("- **Download by name**: Files extracted directly to `path/` (e.g., `path/file.txt`)\n") - sb.WriteString("- **Download by pattern (no merge)**: Files in `path/artifact-name/` (e.g., `path/artifact-1/file.txt`)\n") - sb.WriteString("- **Download by pattern (merge)**: Files extracted directly to `path/` (e.g., `path/file.txt`)\n\n") - - // Add summary section - sb.WriteString("## Summary by Job\n\n") - sb.WriteString("This section provides an overview of artifacts organized by job name, with duplicates merged across workflows.\n\n") - - // Sort job names for consistent output - jobNames := make([]string, 0, len(artifactsByJob)) - for jobName := range artifactsByJob { - jobNames = append(jobNames, jobName) - } - sort.Strings(jobNames) - - for _, jobName := range jobNames { - summary := artifactsByJob[jobName] - - fmt.Fprintf(&sb, "### Job: `%s`\n\n", jobName) - - // Uploads summary - if len(summary.Uploads) > 0 { - sb.WriteString("**Artifacts Uploaded:**\n\n") - - // Sort artifact names - uploadNames := make([]string, 0, len(summary.Uploads)) - for name := range summary.Uploads { - uploadNames = append(uploadNames, name) - } - sort.Strings(uploadNames) - - for _, name := range uploadNames { - info := summary.Uploads[name] - fmt.Fprintf(&sb, "- `%s`\n", info.ArtifactName) - - // Sort and list paths - paths := make([]string, 0, len(info.Paths)) - for path := range info.Paths { - paths = append(paths, path) - } - sort.Strings(paths) - - sb.WriteString(" - **Paths**: ") - for i, path := range paths { - if i > 0 { - sb.WriteString(", ") - } - fmt.Fprintf(&sb, "`%s`", path) - } - sb.WriteString("\n") - - // Sort and list workflows - sort.Strings(info.Workflows) - fmt.Fprintf(&sb, " - **Used in**: %d workflow(s) - %s\n", len(info.Workflows), strings.Join(info.Workflows, ", ")) - } - sb.WriteString("\n") - } - - // Downloads summary - if len(summary.Downloads) > 0 { - sb.WriteString("**Artifacts Downloaded:**\n\n") - - // Sort identifiers - downloadIds := make([]string, 0, len(summary.Downloads)) - for id := range summary.Downloads { - downloadIds = append(downloadIds, id) - } - sort.Strings(downloadIds) - - for _, id := range downloadIds { - info := summary.Downloads[id] - fmt.Fprintf(&sb, "- `%s`\n", info.Identifier) - - // Sort and list download paths - paths := make([]string, 0, len(info.DownloadPaths)) - for path := range info.DownloadPaths { - paths = append(paths, path) - } - sort.Strings(paths) - - sb.WriteString(" - **Download paths**: ") - for i, path := range paths { - if i > 0 { - sb.WriteString(", ") - } - fmt.Fprintf(&sb, "`%s`", path) - } - sb.WriteString("\n") - - // Sort and list workflows - sort.Strings(info.Workflows) - fmt.Fprintf(&sb, " - **Used in**: %d workflow(s) - %s\n", len(info.Workflows), strings.Join(info.Workflows, ", ")) - } - sb.WriteString("\n") - } - } - - sb.WriteString("## Workflows\n\n") - - // Sort workflow names for consistent output - workflowNames := make([]string, 0, len(workflowArtifacts)) - for name := range workflowArtifacts { - workflowNames = append(workflowNames, name) - } - sort.Strings(workflowNames) - - for _, workflowName := range workflowNames { - jobs := workflowArtifacts[workflowName] - - fmt.Fprintf(&sb, "### %s\n\n", workflowName) - - // Sort job names - jobNames := make([]string, 0, len(jobs)) - for jobName := range jobs { - jobNames = append(jobNames, jobName) - } - sort.Strings(jobNames) - - for _, jobName := range jobNames { - artifacts := jobs[jobName] - - fmt.Fprintf(&sb, "#### Job: `%s`\n\n", jobName) - - // Uploads - if len(artifacts.Uploads) > 0 { - sb.WriteString("**Uploads:**\n\n") - for _, upload := range artifacts.Uploads { - fmt.Fprintf(&sb, "- **Artifact**: `%s`\n", upload.Name) - sb.WriteString(" - **Upload paths**:\n") - for _, path := range upload.Paths { - fmt.Fprintf(&sb, " - `%s`\n", path) - } - - if len(upload.NormalizedPaths) > 0 { - sb.WriteString(" - **Paths in artifact** (after common parent stripping):\n") - - // Sort normalized paths for consistent output - var normalizedKeys []string - for key := range upload.NormalizedPaths { - normalizedKeys = append(normalizedKeys, key) - } - sort.Strings(normalizedKeys) - - for _, key := range normalizedKeys { - normalizedPath := upload.NormalizedPaths[key] - fmt.Fprintf(&sb, " - `%s` → `%s`\n", key, normalizedPath) - } - } - sb.WriteString("\n") - } - } - - // Downloads - if len(artifacts.Downloads) > 0 { - sb.WriteString("**Downloads:**\n\n") - for _, download := range artifacts.Downloads { - if download.Name != "" { - fmt.Fprintf(&sb, "- **Artifact**: `%s` (by name)\n", download.Name) - } else if download.Pattern != "" { - fmt.Fprintf(&sb, "- **Pattern**: `%s`", download.Pattern) - if download.MergeMultiple { - sb.WriteString(" (merge-multiple: true)\n") - } else { - sb.WriteString(" (merge-multiple: false)\n") - } - } - fmt.Fprintf(&sb, " - **Download path**: `%s`\n", download.Path) - if len(download.DependsOn) > 0 { - fmt.Fprintf(&sb, " - **Depends on jobs**: %v\n", download.DependsOn) - } - sb.WriteString("\n") - } - } - } - } - - sb.WriteString("## Usage Examples\n\n") - sb.WriteString("### JavaScript (actions/github-script)\n\n") - sb.WriteString("```javascript\n") - sb.WriteString("// Reading a file from a downloaded artifact\n") - sb.WriteString("const fs = require('fs');\n") - sb.WriteString("const path = require('path');\n\n") - sb.WriteString("// If artifact 'build-output' was downloaded to '/tmp/artifacts'\n") - sb.WriteString("// and contains 'dist/app.js' (after common parent stripping)\n") - sb.WriteString("const filePath = path.join('/tmp/artifacts', 'dist', 'app.js');\n") - sb.WriteString("const content = fs.readFileSync(filePath, 'utf8');\n") - sb.WriteString("```\n\n") - sb.WriteString("### Go\n\n") - sb.WriteString("```go\n") - sb.WriteString("// Reading a file from a downloaded artifact\n") - sb.WriteString("import (\n") - sb.WriteString(" \"os\"\n") - sb.WriteString(" \"path/filepath\"\n") - sb.WriteString(")\n\n") - sb.WriteString("// If artifact 'build-output' was downloaded to '/tmp/artifacts'\n") - sb.WriteString("// and contains 'dist/app.js' (after common parent stripping)\n") - sb.WriteString("filePath := filepath.Join(\"/tmp/artifacts\", \"dist\", \"app.js\")\n") - sb.WriteString("content, err := os.ReadFile(filePath)\n") - sb.WriteString("```\n\n") - sb.WriteString("## Notes\n\n") - sb.WriteString("- This document is auto-generated from workflow analysis\n") - sb.WriteString("- Actual file paths may vary based on the workflow execution context\n") - sb.WriteString("- Always verify file existence before reading in production code\n") - sb.WriteString("- Common parent directories are automatically stripped during upload\n") - sb.WriteString("- Use `ComputeDownloadPath()` from the artifact manager for accurate path computation\n") - - return sb.String() -} diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index 4a793e8aa6..a437d3b491 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -169,11 +169,6 @@ func (cm *CheckoutManager) add(cfg *CheckoutConfig) { } } -// HasUserCheckouts returns true if any user-supplied checkouts were registered. -func (cm *CheckoutManager) HasUserCheckouts() bool { - return len(cm.ordered) > 0 -} - // GetDefaultCheckoutOverride returns the resolved checkout for the default workspace // (empty path, empty repository). Returns nil if the user did not configure one. func (cm *CheckoutManager) GetDefaultCheckoutOverride() *resolvedCheckout { diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index 9b001d08c7..9da2e7d005 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -14,7 +14,7 @@ import ( func TestNewCheckoutManager(t *testing.T) { t.Run("empty configs produces empty manager", func(t *testing.T) { cm := NewCheckoutManager(nil) - assert.False(t, cm.HasUserCheckouts(), "empty manager should report no user checkouts") + // HasUserCheckouts removed (dead code) assert.Nil(t, cm.GetDefaultCheckoutOverride(), "empty manager should have no default override") }) @@ -23,7 +23,7 @@ func TestNewCheckoutManager(t *testing.T) { cm := NewCheckoutManager([]*CheckoutConfig{ {FetchDepth: &depth}, }) - assert.True(t, cm.HasUserCheckouts(), "should have user checkouts") + // HasUserCheckouts removed (dead code) override := cm.GetDefaultCheckoutOverride() require.NotNil(t, override, "should have default override") require.NotNil(t, override.fetchDepth, "fetch depth should be set") diff --git a/pkg/workflow/claude_engine.go b/pkg/workflow/claude_engine.go index e0fd2c3fdc..4197be967d 100644 --- a/pkg/workflow/claude_engine.go +++ b/pkg/workflow/claude_engine.go @@ -29,7 +29,6 @@ func NewClaudeEngine() *ClaudeEngine { supportsMaxTurns: true, // Claude supports max-turns feature supportsWebFetch: true, // Claude has built-in WebFetch support supportsWebSearch: true, // Claude has built-in WebSearch support - supportsFirewall: true, // Claude supports network firewalling via AWF supportsLLMGateway: false, // Claude does not support LLM gateway }, } diff --git a/pkg/workflow/codex_engine.go b/pkg/workflow/codex_engine.go index e626ab5fb4..d8265dfe76 100644 --- a/pkg/workflow/codex_engine.go +++ b/pkg/workflow/codex_engine.go @@ -41,7 +41,6 @@ func NewCodexEngine() *CodexEngine { supportsMaxTurns: false, // Codex does not support max-turns feature supportsWebFetch: false, // Codex does not have built-in web-fetch support supportsWebSearch: true, // Codex has built-in web-search support - supportsFirewall: true, // Codex supports network firewalling via AWF supportsLLMGateway: true, // Codex supports LLM gateway on port 10001 }, } diff --git a/pkg/workflow/compiler_safe_outputs_config.go b/pkg/workflow/compiler_safe_outputs_config.go index d73eec00bc..476ed54955 100644 --- a/pkg/workflow/compiler_safe_outputs_config.go +++ b/pkg/workflow/compiler_safe_outputs_config.go @@ -75,14 +75,6 @@ func (b *handlerConfigBuilder) AddIfNotEmpty(key string, value string) *handlerC return b } -// AddIfTrue adds a boolean field only if the value is true -func (b *handlerConfigBuilder) AddIfTrue(key string, value bool) *handlerConfigBuilder { - if value { - b.config[key] = true - } - return b -} - // AddStringSlice adds a string slice field only if the slice is not empty func (b *handlerConfigBuilder) AddStringSlice(key string, value []string) *handlerConfigBuilder { if len(value) > 0 { diff --git a/pkg/workflow/copilot_engine.go b/pkg/workflow/copilot_engine.go index 8aefe1eab5..462e6e7380 100644 --- a/pkg/workflow/copilot_engine.go +++ b/pkg/workflow/copilot_engine.go @@ -43,7 +43,6 @@ func NewCopilotEngine() *CopilotEngine { supportsMaxContinuations: true, // Copilot CLI supports --autopilot with --max-autopilot-continues supportsWebFetch: true, // Copilot CLI has built-in web-fetch support supportsWebSearch: false, // Copilot CLI does not have built-in web-search support - supportsFirewall: true, // Copilot supports network firewalling via AWF supportsPlugins: true, // Copilot supports plugin installation supportsLLMGateway: true, // Copilot supports LLM gateway on port 10003 }, diff --git a/pkg/workflow/domains.go b/pkg/workflow/domains.go index d8c5492612..634cdccbe7 100644 --- a/pkg/workflow/domains.go +++ b/pkg/workflow/domains.go @@ -454,18 +454,6 @@ func extractPlaywrightDomains(tools map[string]any) []string { return []string{} } -// mergeDomainsWithNetwork combines default domains with NetworkPermissions allowed domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func mergeDomainsWithNetwork(defaultDomains []string, network *NetworkPermissions) string { - return mergeDomainsWithNetworkAndTools(defaultDomains, network, nil) -} - -// mergeDomainsWithNetworkAndTools combines default domains with NetworkPermissions allowed domains and HTTP MCP server domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func mergeDomainsWithNetworkAndTools(defaultDomains []string, network *NetworkPermissions, tools map[string]any) string { - return mergeDomainsWithNetworkToolsAndRuntimes(defaultDomains, network, tools, nil) -} - // mergeDomainsWithNetworkToolsAndRuntimes combines default domains with NetworkPermissions, HTTP MCP server domains, and runtime ecosystem domains // Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag func mergeDomainsWithNetworkToolsAndRuntimes(defaultDomains []string, network *NetworkPermissions, tools map[string]any, runtimes map[string]any) string { @@ -538,68 +526,18 @@ func GetAllowedDomainsForEngine(engine constants.EngineName, network *NetworkPer return mergeDomainsWithNetworkToolsAndRuntimes(engineDefaultDomains[engine], network, tools, runtimes) } -// GetCopilotAllowedDomains merges Copilot default domains with NetworkPermissions allowed domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func GetCopilotAllowedDomains(network *NetworkPermissions) string { - return mergeDomainsWithNetwork(CopilotDefaultDomains, network) -} - -// GetCopilotAllowedDomainsWithSafeInputs merges Copilot default domains with NetworkPermissions allowed domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -// Deprecated: hasSafeInputs is no longer used; call GetCopilotAllowedDomains instead -func GetCopilotAllowedDomainsWithSafeInputs(network *NetworkPermissions, hasSafeInputs bool) string { - return GetCopilotAllowedDomains(network) -} - -// GetCopilotAllowedDomainsWithTools merges Copilot default domains with NetworkPermissions allowed domains and HTTP MCP server domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func GetCopilotAllowedDomainsWithTools(network *NetworkPermissions, tools map[string]any) string { - return mergeDomainsWithNetworkAndTools(CopilotDefaultDomains, network, tools) -} - // GetCopilotAllowedDomainsWithToolsAndRuntimes merges Copilot default domains with NetworkPermissions, HTTP MCP server domains, and runtime ecosystem domains // Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag func GetCopilotAllowedDomainsWithToolsAndRuntimes(network *NetworkPermissions, tools map[string]any, runtimes map[string]any) string { return GetAllowedDomainsForEngine(constants.CopilotEngine, network, tools, runtimes) } -// GetCodexAllowedDomains merges Codex default domains with NetworkPermissions allowed domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func GetCodexAllowedDomains(network *NetworkPermissions) string { - return mergeDomainsWithNetwork(CodexDefaultDomains, network) -} - -// GetCodexAllowedDomainsWithTools merges Codex default domains with NetworkPermissions allowed domains and HTTP MCP server domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func GetCodexAllowedDomainsWithTools(network *NetworkPermissions, tools map[string]any) string { - return mergeDomainsWithNetworkAndTools(CodexDefaultDomains, network, tools) -} - // GetCodexAllowedDomainsWithToolsAndRuntimes merges Codex default domains with NetworkPermissions, HTTP MCP server domains, and runtime ecosystem domains // Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag func GetCodexAllowedDomainsWithToolsAndRuntimes(network *NetworkPermissions, tools map[string]any, runtimes map[string]any) string { return GetAllowedDomainsForEngine(constants.CodexEngine, network, tools, runtimes) } -// GetClaudeAllowedDomains merges Claude default domains with NetworkPermissions allowed domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func GetClaudeAllowedDomains(network *NetworkPermissions) string { - return mergeDomainsWithNetwork(ClaudeDefaultDomains, network) -} - -// GetClaudeAllowedDomainsWithSafeInputs merges Claude default domains with NetworkPermissions allowed domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -// Deprecated: hasSafeInputs is no longer used; call GetClaudeAllowedDomains instead -func GetClaudeAllowedDomainsWithSafeInputs(network *NetworkPermissions, hasSafeInputs bool) string { - return GetClaudeAllowedDomains(network) -} - -// GetClaudeAllowedDomainsWithTools merges Claude default domains with NetworkPermissions allowed domains and HTTP MCP server domains -// Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag -func GetClaudeAllowedDomainsWithTools(network *NetworkPermissions, tools map[string]any) string { - return mergeDomainsWithNetworkAndTools(ClaudeDefaultDomains, network, tools) -} - // GetClaudeAllowedDomainsWithToolsAndRuntimes merges Claude default domains with NetworkPermissions, HTTP MCP server domains, and runtime ecosystem domains // Returns a deduplicated, sorted, comma-separated string suitable for AWF's --allow-domains flag func GetClaudeAllowedDomainsWithToolsAndRuntimes(network *NetworkPermissions, tools map[string]any, runtimes map[string]any) string { diff --git a/pkg/workflow/domains_protocol_test.go b/pkg/workflow/domains_protocol_test.go index 0d350720d8..6327f1892f 100644 --- a/pkg/workflow/domains_protocol_test.go +++ b/pkg/workflow/domains_protocol_test.go @@ -6,6 +6,8 @@ import ( "slices" "strings" "testing" + + "github.com/github/gh-aw/pkg/constants" ) // TestProtocolSpecificDomains tests that domains with protocol prefixes are correctly handled @@ -97,7 +99,7 @@ func TestGetCopilotAllowedDomainsWithProtocol(t *testing.T) { }, } - result := GetCopilotAllowedDomains(network) + result := GetAllowedDomainsForEngine(constants.CopilotEngine, network, nil, nil) // Should contain protocol-specific domains if !strings.Contains(result, "https://secure.example.com") { @@ -121,7 +123,7 @@ func TestGetClaudeAllowedDomainsWithProtocol(t *testing.T) { }, } - result := GetClaudeAllowedDomains(network) + result := GetAllowedDomainsForEngine(constants.ClaudeEngine, network, nil, nil) // Should contain protocol-specific domain if !strings.Contains(result, "https://api.example.com") { diff --git a/pkg/workflow/domains_sort_test.go b/pkg/workflow/domains_sort_test.go index 66b366d6b0..5f340860f5 100644 --- a/pkg/workflow/domains_sort_test.go +++ b/pkg/workflow/domains_sort_test.go @@ -5,6 +5,8 @@ package workflow import ( "strings" "testing" + + "github.com/github/gh-aw/pkg/constants" ) // TestGetAllowedDomainsSorted tests that domains are returned in sorted order @@ -245,7 +247,7 @@ func TestGetCopilotAllowedDomainsSorted(t *testing.T) { permissions := &NetworkPermissions{ Allowed: []string{"zebra.com", "alpha.com", "python"}, } - domainsStr := GetCopilotAllowedDomains(permissions) + domainsStr := GetAllowedDomainsForEngine(constants.CopilotEngine, permissions, nil, nil) // Split the CSV and verify sorted domains := strings.Split(domainsStr, ",") @@ -260,7 +262,7 @@ func TestGetCopilotAllowedDomainsSorted(t *testing.T) { permissions := &NetworkPermissions{ Allowed: []string{"example.com", "example.com", "test.org"}, } - domainsStr := GetCopilotAllowedDomains(permissions) + domainsStr := GetAllowedDomainsForEngine(constants.CopilotEngine, permissions, nil, nil) // Split the CSV and verify no duplicates domains := strings.Split(domainsStr, ",") diff --git a/pkg/workflow/domains_test.go b/pkg/workflow/domains_test.go index c6d0f41a45..0d3cf98c84 100644 --- a/pkg/workflow/domains_test.go +++ b/pkg/workflow/domains_test.go @@ -327,49 +327,6 @@ func TestCodexDefaultDomains(t *testing.T) { } } -func TestGetCodexAllowedDomains(t *testing.T) { - t.Run("nil network permissions returns only defaults", func(t *testing.T) { - result := GetCodexAllowedDomains(nil) - // Should contain default Codex domains, sorted - if result != "172.30.0.1,api.openai.com,host.docker.internal,openai.com" { - t.Errorf("Expected '172.30.0.1,api.openai.com,host.docker.internal,openai.com', got %q", result) - } - }) - - t.Run("with network permissions merges domains", func(t *testing.T) { - network := &NetworkPermissions{ - Allowed: []string{"example.com"}, - } - result := GetCodexAllowedDomains(network) - // Should contain both default Codex domains and user-specified domain - if result != "172.30.0.1,api.openai.com,example.com,host.docker.internal,openai.com" { - t.Errorf("Expected '172.30.0.1,api.openai.com,example.com,host.docker.internal,openai.com', got %q", result) - } - }) - - t.Run("deduplicates domains", func(t *testing.T) { - network := &NetworkPermissions{ - Allowed: []string{"api.openai.com", "example.com"}, - } - result := GetCodexAllowedDomains(network) - // api.openai.com should not appear twice - if result != "172.30.0.1,api.openai.com,example.com,host.docker.internal,openai.com" { - t.Errorf("Expected '172.30.0.1,api.openai.com,example.com,host.docker.internal,openai.com', got %q", result) - } - }) - - t.Run("empty allowed list returns only defaults", func(t *testing.T) { - network := &NetworkPermissions{ - Allowed: []string{}, - } - result := GetCodexAllowedDomains(network) - // Empty allowed list should still return Codex defaults - if result != "172.30.0.1,api.openai.com,host.docker.internal,openai.com" { - t.Errorf("Expected '172.30.0.1,api.openai.com,host.docker.internal,openai.com', got %q", result) - } - }) -} - func TestClaudeDefaultDomains(t *testing.T) { // Verify that critical Claude domains are present criticalDomains := []string{ @@ -400,45 +357,6 @@ func TestClaudeDefaultDomains(t *testing.T) { } } -func TestGetClaudeAllowedDomains(t *testing.T) { - t.Run("returns Claude defaults when no network permissions", func(t *testing.T) { - result := GetClaudeAllowedDomains(nil) - // Should contain Claude default domains - if !strings.Contains(result, "api.anthropic.com") { - t.Error("Expected api.anthropic.com in result") - } - if !strings.Contains(result, "anthropic.com") { - t.Error("Expected anthropic.com in result") - } - }) - - t.Run("merges network permissions with Claude defaults", func(t *testing.T) { - network := &NetworkPermissions{ - Allowed: []string{"custom.example.com"}, - } - result := GetClaudeAllowedDomains(network) - // Should contain both Claude defaults and custom domain - if !strings.Contains(result, "api.anthropic.com") { - t.Error("Expected api.anthropic.com in result") - } - if !strings.Contains(result, "custom.example.com") { - t.Error("Expected custom.example.com in result") - } - }) - - t.Run("domains are sorted", func(t *testing.T) { - result := GetClaudeAllowedDomains(nil) - // Should be comma-separated and sorted - domains := strings.Split(result, ",") - for i := 1; i < len(domains); i++ { - if domains[i-1] > domains[i] { - t.Errorf("Domains not sorted: %s > %s", domains[i-1], domains[i]) - break - } - } - }) -} - // TestGetAllowedDomains_ModeDefaultsWithAllowedList verifies that when there's an Allowed list // with multiple ecosystems, it processes and expands all of them func TestGetAllowedDomains_ModeDefaultsWithAllowedList(t *testing.T) { diff --git a/pkg/workflow/engine_firewall_support.go b/pkg/workflow/engine_firewall_support.go index 245c8e1b2d..4be0f8acd7 100644 --- a/pkg/workflow/engine_firewall_support.go +++ b/pkg/workflow/engine_firewall_support.go @@ -49,7 +49,7 @@ func (c *Compiler) checkNetworkSupport(engine CodingAgentEngine, networkPermissi engineFirewallSupportLog.Printf("Checking network support: engine=%s, strict_mode=%t", engine.GetID(), c.strictMode) // First, check for explicit firewall disable - if err := c.checkFirewallDisable(engine, networkPermissions); err != nil { + if err := c.checkFirewallDisable(networkPermissions); err != nil { return err } @@ -60,36 +60,14 @@ func (c *Compiler) checkNetworkSupport(engine CodingAgentEngine, networkPermissi return nil } - // Check if engine supports firewall - if engine.SupportsFirewall() { - engineFirewallSupportLog.Printf("Engine supports firewall: %s", engine.GetID()) - // Engine supports firewall, no issue - return nil - } - - engineFirewallSupportLog.Printf("Warning: engine does not support firewall but network restrictions exist: %s", engine.GetID()) - // Engine does not support firewall, but network restrictions are present - message := fmt.Sprintf( - "Selected engine '%s' does not support network firewalling; workflow specifies network restrictions (network.allowed). Network may not be sandboxed.", - engine.GetID(), - ) - - if c.strictMode { - // In strict mode, this is an error - return errors.New("strict mode: engine must support firewall when network restrictions (network.allowed) are set") - } - - // In non-strict mode, emit a warning - fmt.Fprintln(os.Stderr, console.FormatWarningMessage(message)) - c.IncrementWarningCount() - + engineFirewallSupportLog.Printf("Engine supports firewall: %s", engine.GetID()) return nil } // checkFirewallDisable validates firewall: "disable" configuration // - Warning if allowed != * (unrestricted) -// - Error in strict mode if allowed is not * or engine does not support firewall -func (c *Compiler) checkFirewallDisable(engine CodingAgentEngine, networkPermissions *NetworkPermissions) error { +// - Error in strict mode if allowed is not * +func (c *Compiler) checkFirewallDisable(networkPermissions *NetworkPermissions) error { if networkPermissions == nil || networkPermissions.Firewall == nil { return nil } @@ -111,11 +89,6 @@ func (c *Compiler) checkFirewallDisable(engine CodingAgentEngine, networkPermiss fmt.Fprintln(os.Stderr, console.FormatWarningMessage(message)) c.IncrementWarningCount() } - - // Also check if engine doesn't support firewall in strict mode when there are no restrictions - if c.strictMode && !hasRestrictions && !engine.SupportsFirewall() { - return fmt.Errorf("strict mode: engine '%s' does not support firewall", engine.GetID()) - } } return nil diff --git a/pkg/workflow/engine_firewall_support_test.go b/pkg/workflow/engine_firewall_support_test.go index c925353903..e70fa04e59 100644 --- a/pkg/workflow/engine_firewall_support_test.go +++ b/pkg/workflow/engine_firewall_support_test.go @@ -7,29 +7,6 @@ import ( "testing" ) -func TestSupportsFirewall(t *testing.T) { - t.Run("copilot engine supports firewall", func(t *testing.T) { - engine := NewCopilotEngine() - if !engine.SupportsFirewall() { - t.Error("Copilot engine should support firewall") - } - }) - - t.Run("claude engine supports firewall", func(t *testing.T) { - engine := NewClaudeEngine() - if !engine.SupportsFirewall() { - t.Error("Claude engine should support firewall") - } - }) - - t.Run("codex engine supports firewall", func(t *testing.T) { - engine := NewCodexEngine() - if !engine.SupportsFirewall() { - t.Error("Codex engine should support firewall") - } - }) -} - func TestHasNetworkRestrictions(t *testing.T) { t.Run("nil permissions have no restrictions", func(t *testing.T) { if hasNetworkRestrictions(nil) { @@ -209,7 +186,6 @@ func TestCheckNetworkSupport_StrictMode(t *testing.T) { func TestCheckFirewallDisable(t *testing.T) { t.Run("firewall enabled - no validation", func(t *testing.T) { compiler := NewCompiler() - engine := NewCopilotEngine() perms := &NetworkPermissions{ Allowed: []string{"example.com"}, Firewall: &FirewallConfig{ @@ -217,7 +193,7 @@ func TestCheckFirewallDisable(t *testing.T) { }, } - err := compiler.checkFirewallDisable(engine, perms) + err := compiler.checkFirewallDisable(perms) if err != nil { t.Errorf("Expected no error when firewall is enabled, got: %v", err) } @@ -225,7 +201,6 @@ func TestCheckFirewallDisable(t *testing.T) { t.Run("firewall disabled with no restrictions - no warning", func(t *testing.T) { compiler := NewCompiler() - engine := NewCopilotEngine() perms := &NetworkPermissions{ Firewall: &FirewallConfig{ Enabled: false, @@ -233,7 +208,7 @@ func TestCheckFirewallDisable(t *testing.T) { } initialWarnings := compiler.warningCount - err := compiler.checkFirewallDisable(engine, perms) + err := compiler.checkFirewallDisable(perms) if err != nil { t.Errorf("Expected no error when firewall is disabled with no restrictions, got: %v", err) } @@ -244,7 +219,6 @@ func TestCheckFirewallDisable(t *testing.T) { t.Run("firewall disabled with restrictions - warning emitted", func(t *testing.T) { compiler := NewCompiler() - engine := NewCopilotEngine() perms := &NetworkPermissions{ Allowed: []string{"example.com"}, Firewall: &FirewallConfig{ @@ -253,7 +227,7 @@ func TestCheckFirewallDisable(t *testing.T) { } initialWarnings := compiler.warningCount - err := compiler.checkFirewallDisable(engine, perms) + err := compiler.checkFirewallDisable(perms) if err != nil { t.Errorf("Expected no error in non-strict mode, got: %v", err) } @@ -265,7 +239,6 @@ func TestCheckFirewallDisable(t *testing.T) { t.Run("strict mode: firewall disabled with restrictions - error", func(t *testing.T) { compiler := NewCompiler() compiler.strictMode = true - engine := NewCopilotEngine() perms := &NetworkPermissions{ Allowed: []string{"example.com"}, Firewall: &FirewallConfig{ @@ -273,7 +246,7 @@ func TestCheckFirewallDisable(t *testing.T) { }, } - err := compiler.checkFirewallDisable(engine, perms) + err := compiler.checkFirewallDisable(perms) if err == nil { t.Error("Expected error in strict mode when firewall is disabled with restrictions") } @@ -284,12 +257,11 @@ func TestCheckFirewallDisable(t *testing.T) { t.Run("nil firewall config - no validation", func(t *testing.T) { compiler := NewCompiler() - engine := NewCopilotEngine() perms := &NetworkPermissions{ Allowed: []string{"example.com"}, } - err := compiler.checkFirewallDisable(engine, perms) + err := compiler.checkFirewallDisable(perms) if err != nil { t.Errorf("Expected no error when firewall config is nil, got: %v", err) } diff --git a/pkg/workflow/engine_helpers.go b/pkg/workflow/engine_helpers.go index f2d9dd94d6..33e3adb4d4 100644 --- a/pkg/workflow/engine_helpers.go +++ b/pkg/workflow/engine_helpers.go @@ -107,40 +107,6 @@ func GetBaseInstallationSteps(config EngineInstallConfig, workflowData *Workflow return steps } -// ExtractAgentIdentifier extracts the agent identifier (filename without extension) from an agent file path. -// This is used by the Copilot CLI which expects agent identifiers, not full paths. -// -// Parameters: -// - agentFile: The relative path to the agent file (e.g., ".github/agents/test-agent.md" or ".github/agents/test-agent.agent.md") -// -// Returns: -// - string: The agent identifier (e.g., "test-agent") -// -// Example: -// -// identifier := ExtractAgentIdentifier(".github/agents/my-agent.md") -// // Returns: "my-agent" -// -// identifier := ExtractAgentIdentifier(".github/agents/my-agent.agent.md") -// // Returns: "my-agent" -func ExtractAgentIdentifier(agentFile string) string { - engineHelpersLog.Printf("Extracting agent identifier from: %s", agentFile) - // Extract the base filename from the path - lastSlash := strings.LastIndex(agentFile, "/") - filename := agentFile - if lastSlash >= 0 { - filename = agentFile[lastSlash+1:] - } - - // Remove extensions in order: .agent.md, then .md, then .agent - // This handles all possible agent file naming conventions - filename = strings.TrimSuffix(filename, ".agent.md") - filename = strings.TrimSuffix(filename, ".md") - filename = strings.TrimSuffix(filename, ".agent") - - return filename -} - // ResolveAgentFilePath returns the properly quoted agent file path with GITHUB_WORKSPACE prefix. // This helper extracts the common pattern shared by Copilot, Codex, and Claude engines. // @@ -299,49 +265,6 @@ func FilterEnvForSecrets(env map[string]string, allowedNamesAndKeys []string) ma return filtered } -// GetHostedToolcachePathSetup returns a shell command that adds all runtime binaries -// from /opt/hostedtoolcache to PATH. This includes Node.js, Python, Go, Ruby, and other -// runtimes installed via actions/setup-* steps. -// -// The hostedtoolcache directory structure is: /opt/hostedtoolcache////bin -// This function generates a command that finds all bin directories and adds them to PATH. -// -// IMPORTANT: The command uses GH_AW_TOOL_BINS (computed by GetToolBinsSetup) which contains -// the specific tool paths from environment variables like GOROOT, JAVA_HOME, etc. These paths -// are computed on the RUNNER side and passed to the container as a literal value via --env, -// avoiding shell injection risks from variable expansion inside the container. -// -// This ensures that the version configured by actions/setup-* takes precedence over other -// versions that may exist in hostedtoolcache. Without this, the generic `find` command -// returns directories in alphabetical order, causing older versions (e.g., Go 1.22.12) -// to shadow newer ones (e.g., Go 1.25.6) because "1.22" < "1.25" alphabetically. -// -// This is used by all engine implementations (Copilot, Claude, Codex) to ensure consistent -// access to runtime tools inside the agent container. -// -// Returns: -// - string: A shell command that sets up PATH with all hostedtoolcache binaries -// -// Example output: -// -// export PATH="$GH_AW_TOOL_BINS$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\n' ':')$PATH" -func GetHostedToolcachePathSetup() string { - // Use GH_AW_TOOL_BINS which is computed on the runner side by GetToolBinsSetup() - // and passed to the container via --env. This avoids shell injection risks from - // expanding variables like GOROOT inside the container. - // - // GH_AW_TOOL_BINS contains paths like "/opt/hostedtoolcache/go/1.25.6/x64/bin:" - // computed from GOROOT, JAVA_HOME, etc. on the runner where they are trusted. - - // Generic find for all other hostedtoolcache binaries (Node.js, Python, etc.) - genericFind := `$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\n' ':')` - - // Build the raw PATH string, then sanitize it using GetSanitizedPATHExport() - // to remove empty elements, leading/trailing colons, and collapse multiple colons - rawPath := fmt.Sprintf(`$GH_AW_TOOL_BINS%s$PATH`, genericFind) - return GetSanitizedPATHExport(rawPath) -} - // GetNpmBinPathSetup returns a simple shell command that adds hostedtoolcache bin directories // to PATH. This is specifically for npm-installed CLIs (like Claude and Codex) that need // to find their binaries installed via `npm install -g`. @@ -364,85 +287,6 @@ func GetNpmBinPathSetup() string { return `export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\n' ':')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true` } -// GetSanitizedPATHExport returns a shell command that sets PATH to the given value -// with sanitization to remove security risks from malformed PATH entries. -// -// The sanitization removes: -// - Leading colons (e.g., ":/usr/bin" -> "/usr/bin") -// - Trailing colons (e.g., "/usr/bin:" -> "/usr/bin") -// - Empty elements (e.g., "/a::/b" -> "/a:/b", multiple colons collapsed to one) -// -// Empty PATH elements are a security risk because they cause the current directory -// to be searched for executables, which could allow malicious code execution. -// -// The sanitization logic is implemented in actions/setup/sh/sanitize_path.sh and -// is sourced at runtime from /opt/gh-aw/actions/sanitize_path.sh. -// -// Parameters: -// - rawPath: The unsanitized PATH value (may contain shell expansions like $PATH) -// -// Returns: -// - string: A shell command that sources the sanitize script to export the sanitized PATH -// -// Example: -// -// GetSanitizedPATHExport("$GH_AW_TOOL_BINS$PATH") -// // Returns: source /opt/gh-aw/actions/sanitize_path.sh "$GH_AW_TOOL_BINS$PATH" -func GetSanitizedPATHExport(rawPath string) string { - // Source the sanitize_path.sh script which handles: - // 1. Remove leading colons - // 2. Remove trailing colons - // 3. Collapse multiple colons into single colons - // 4. Export the sanitized PATH - return fmt.Sprintf(`source /opt/gh-aw/actions/sanitize_path.sh "%s"`, rawPath) -} - -// GetToolBinsSetup returns a shell command that computes the GH_AW_TOOL_BINS environment -// variable from specific tool paths (GOROOT, JAVA_HOME, etc.). -// -// This command should be run on the RUNNER side before invoking AWF, and the resulting -// GH_AW_TOOL_BINS should be passed to the container via --env. This ensures the paths -// are computed where they are trusted, avoiding shell injection risks. -// -// The computed paths are prepended to PATH (via GetHostedToolcachePathSetup) before the -// generic find results, ensuring versions set by actions/setup-* take precedence over -// alphabetically-earlier versions in hostedtoolcache. -// -// Returns: -// - string: A shell command that sets GH_AW_TOOL_BINS -// -// Example output when GOROOT=/opt/hostedtoolcache/go/1.25.6/x64 and JAVA_HOME=/opt/hostedtoolcache/Java/17.0.0/x64: -// -// GH_AW_TOOL_BINS="/opt/hostedtoolcache/go/1.25.6/x64/bin:/opt/hostedtoolcache/Java/17.0.0/x64/bin:" -func GetToolBinsSetup() string { - // Build GH_AW_TOOL_BINS from specific tool paths on the runner side. - // Each path is only added if the corresponding env var is set and non-empty. - // This runs on the runner where the env vars are trusted values from actions/setup-*. - // - // Tools with /bin subdirectory: - // - Go: Detected via `go env GOROOT` (actions/setup-go doesn't export GOROOT) - // - JAVA_HOME: Java installation root (actions/setup-java) - // - CARGO_HOME: Cargo/Rust installation (rustup) - // - GEM_HOME: Ruby gems (actions/setup-ruby) - // - CONDA: Conda installation - // - // Tools where the path IS the bin directory (no /bin suffix needed): - // - PIPX_BIN_DIR: pipx binary directory - // - SWIFT_PATH: Swift binary path - // - DOTNET_ROOT: .NET root (binaries are in root, not /bin) - return `GH_AW_TOOL_BINS=""; command -v go >/dev/null 2>&1 && GH_AW_TOOL_BINS="$(go env GOROOT)/bin:$GH_AW_TOOL_BINS"; [ -n "$JAVA_HOME" ] && GH_AW_TOOL_BINS="$JAVA_HOME/bin:$GH_AW_TOOL_BINS"; [ -n "$CARGO_HOME" ] && GH_AW_TOOL_BINS="$CARGO_HOME/bin:$GH_AW_TOOL_BINS"; [ -n "$GEM_HOME" ] && GH_AW_TOOL_BINS="$GEM_HOME/bin:$GH_AW_TOOL_BINS"; [ -n "$CONDA" ] && GH_AW_TOOL_BINS="$CONDA/bin:$GH_AW_TOOL_BINS"; [ -n "$PIPX_BIN_DIR" ] && GH_AW_TOOL_BINS="$PIPX_BIN_DIR:$GH_AW_TOOL_BINS"; [ -n "$SWIFT_PATH" ] && GH_AW_TOOL_BINS="$SWIFT_PATH:$GH_AW_TOOL_BINS"; [ -n "$DOTNET_ROOT" ] && GH_AW_TOOL_BINS="$DOTNET_ROOT:$GH_AW_TOOL_BINS"; export GH_AW_TOOL_BINS` -} - -// GetToolBinsEnvArg returns the AWF --env argument for passing GH_AW_TOOL_BINS to the container. -// This should be used after GetToolBinsSetup() has been run to compute the value. -// -// Returns: -// - []string: AWF arguments ["--env", "GH_AW_TOOL_BINS=$GH_AW_TOOL_BINS"] -func GetToolBinsEnvArg() []string { - // Pre-wrap in double quotes so shellEscapeArg preserves them (allowing shell expansion) - return []string{"--env", "\"GH_AW_TOOL_BINS=$GH_AW_TOOL_BINS\""} -} - // EngineHasValidateSecretStep checks if the engine provides a validate-secret step. // This is used to determine whether the secret_verification_result job output should be added. // diff --git a/pkg/workflow/engine_helpers_test.go b/pkg/workflow/engine_helpers_test.go index 54cdddb10c..3f129360a9 100644 --- a/pkg/workflow/engine_helpers_test.go +++ b/pkg/workflow/engine_helpers_test.go @@ -214,85 +214,6 @@ func TestResolveAgentFilePathFormat(t *testing.T) { } } -// TestExtractAgentIdentifier tests extracting agent identifier from file paths -func TestExtractAgentIdentifier(t *testing.T) { - tests := []struct { - name string - input string - expected string - }{ - { - name: "basic agent file path", - input: ".github/agents/test-agent.md", - expected: "test-agent", - }, - { - name: "path with spaces", - input: ".github/agents/my agent file.md", - expected: "my agent file", - }, - { - name: "deeply nested path", - input: ".github/copilot/instructions/deep/nested/agent.md", - expected: "agent", - }, - { - name: "simple filename", - input: "agent.md", - expected: "agent", - }, - { - name: "path with special characters", - input: ".github/agents/test-agent_v2.0.md", - expected: "test-agent_v2.0", - }, - { - name: "cli-consistency-checker example", - input: ".github/agents/cli-consistency-checker.md", - expected: "cli-consistency-checker", - }, - { - name: "path without extension", - input: ".github/agents/test-agent", - expected: "test-agent", - }, - { - name: "custom agent file simple path", - input: ".github/agents/test-agent.agent.md", - expected: "test-agent", - }, - { - name: "custom agent file with path", - input: "../agents/technical-doc-writer.agent.md", - expected: "technical-doc-writer", - }, - { - name: "custom agent file with underscores", - input: ".github/agents/my_custom_agent.agent.md", - expected: "my_custom_agent", - }, - { - name: "agent file with only .agent extension", - input: ".github/agents/test-agent.agent", - expected: "test-agent", - }, - { - name: "agent file with .agent extension in path", - input: "../agents/my-agent.agent", - expected: "my-agent", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ExtractAgentIdentifier(tt.input) - if result != tt.expected { - t.Errorf("ExtractAgentIdentifier(%q) = %q, want %q", tt.input, result, tt.expected) - } - }) - } -} - // TestShellVariableExpansionInAgentPath tests that agent paths allow shell variable expansion func TestShellVariableExpansionInAgentPath(t *testing.T) { agentFile := ".github/agents/test-agent.md" @@ -349,250 +270,6 @@ func TestShellEscapeArgWithFullyQuotedAgentPath(t *testing.T) { } } -// TestGetHostedToolcachePathSetup tests the hostedtoolcache PATH setup helper -func TestGetHostedToolcachePathSetup(t *testing.T) { - pathSetup := GetHostedToolcachePathSetup() - - // Should use find command to locate bin directories in hostedtoolcache - if !strings.Contains(pathSetup, "/opt/hostedtoolcache") { - t.Errorf("PATH setup should reference /opt/hostedtoolcache, got: %s", pathSetup) - } - - // Should look for bin directories - if !strings.Contains(pathSetup, "-name bin") { - t.Errorf("PATH setup should search for bin directories, got: %s", pathSetup) - } - - // Should use maxdepth 4 to reach /opt/hostedtoolcache////bin - if !strings.Contains(pathSetup, "-maxdepth 4") { - t.Errorf("PATH setup should use -maxdepth 4, got: %s", pathSetup) - } - - // Should suppress errors with 2>/dev/null - if !strings.Contains(pathSetup, "2>/dev/null") { - t.Errorf("PATH setup should suppress errors with 2>/dev/null, got: %s", pathSetup) - } - - // Should source the sanitize_path.sh script - if !strings.Contains(pathSetup, "source /opt/gh-aw/actions/sanitize_path.sh") { - t.Errorf("PATH setup should source sanitize_path.sh script, got: %s", pathSetup) - } - - // Should preserve existing PATH by including $PATH in the raw path - if !strings.Contains(pathSetup, "$PATH") { - t.Errorf("PATH setup should include $PATH to preserve existing PATH, got: %s", pathSetup) - } -} - -// TestGetHostedToolcachePathSetup_Consistency verifies the PATH setup produces consistent output -func TestGetHostedToolcachePathSetup_Consistency(t *testing.T) { - // Call multiple times to ensure consistent output - first := GetHostedToolcachePathSetup() - second := GetHostedToolcachePathSetup() - - if first != second { - t.Errorf("GetHostedToolcachePathSetup should return consistent results, got:\n%s\nvs:\n%s", first, second) - } -} - -// TestGetHostedToolcachePathSetup_UsesToolBins verifies that GetHostedToolcachePathSetup -// uses $GH_AW_TOOL_BINS to get specific tool paths computed by GetToolBinsSetup. -func TestGetHostedToolcachePathSetup_UsesToolBins(t *testing.T) { - pathSetup := GetHostedToolcachePathSetup() - - // Should use $GH_AW_TOOL_BINS for specific tool paths - if !strings.Contains(pathSetup, "$GH_AW_TOOL_BINS") { - t.Errorf("PATH setup should use $GH_AW_TOOL_BINS, got: %s", pathSetup) - } - - // Verify ordering: $GH_AW_TOOL_BINS should come BEFORE the find command - toolBinsIdx := strings.Index(pathSetup, "$GH_AW_TOOL_BINS") - findIdx := strings.Index(pathSetup, "find /opt/hostedtoolcache") - if toolBinsIdx > findIdx { - t.Errorf("$GH_AW_TOOL_BINS should come before find command in PATH setup, got: %s", pathSetup) - } -} - -// TestGetToolBinsSetup verifies that GetToolBinsSetup computes specific tool paths -// for all supported runtimes (Go, Java, Rust, Conda, Ruby, pipx, Swift, .NET). -func TestGetToolBinsSetup(t *testing.T) { - toolBinsSetup := GetToolBinsSetup() - - // Should use `go env GOROOT` for Go (actions/setup-go doesn't export GOROOT env var) - if !strings.Contains(toolBinsSetup, "command -v go") || !strings.Contains(toolBinsSetup, "$(go env GOROOT)/bin") { - t.Errorf("GetToolBinsSetup should use `go env GOROOT` for Go, got: %s", toolBinsSetup) - } - - // Should check JAVA_HOME for Java - if !strings.Contains(toolBinsSetup, "JAVA_HOME") || !strings.Contains(toolBinsSetup, "$JAVA_HOME/bin") { - t.Errorf("GetToolBinsSetup should handle JAVA_HOME, got: %s", toolBinsSetup) - } - - // Should check CARGO_HOME for Rust - if !strings.Contains(toolBinsSetup, "CARGO_HOME") || !strings.Contains(toolBinsSetup, "$CARGO_HOME/bin") { - t.Errorf("GetToolBinsSetup should handle CARGO_HOME, got: %s", toolBinsSetup) - } - - // Should check CONDA for Conda - if !strings.Contains(toolBinsSetup, `"$CONDA"`) || !strings.Contains(toolBinsSetup, "$CONDA/bin") { - t.Errorf("GetToolBinsSetup should handle CONDA, got: %s", toolBinsSetup) - } - - // Should check GEM_HOME for Ruby - if !strings.Contains(toolBinsSetup, "GEM_HOME") || !strings.Contains(toolBinsSetup, "$GEM_HOME/bin") { - t.Errorf("GetToolBinsSetup should handle GEM_HOME, got: %s", toolBinsSetup) - } - - // Should check PIPX_BIN_DIR for pipx (no /bin suffix) - if !strings.Contains(toolBinsSetup, "PIPX_BIN_DIR") || !strings.Contains(toolBinsSetup, "$PIPX_BIN_DIR:") { - t.Errorf("GetToolBinsSetup should handle PIPX_BIN_DIR, got: %s", toolBinsSetup) - } - - // Should check SWIFT_PATH for Swift (no /bin suffix) - if !strings.Contains(toolBinsSetup, "SWIFT_PATH") || !strings.Contains(toolBinsSetup, "$SWIFT_PATH:") { - t.Errorf("GetToolBinsSetup should handle SWIFT_PATH, got: %s", toolBinsSetup) - } - - // Should check DOTNET_ROOT for .NET (no /bin suffix) - if !strings.Contains(toolBinsSetup, "DOTNET_ROOT") || !strings.Contains(toolBinsSetup, "$DOTNET_ROOT:") { - t.Errorf("GetToolBinsSetup should handle DOTNET_ROOT, got: %s", toolBinsSetup) - } - - // Should export GH_AW_TOOL_BINS - if !strings.Contains(toolBinsSetup, "export GH_AW_TOOL_BINS") { - t.Errorf("GetToolBinsSetup should export GH_AW_TOOL_BINS, got: %s", toolBinsSetup) - } -} - -// TestGetToolBinsEnvArg verifies that GetToolBinsEnvArg returns the correct AWF argument. -func TestGetToolBinsEnvArg(t *testing.T) { - envArg := GetToolBinsEnvArg() - - if len(envArg) != 2 { - t.Errorf("GetToolBinsEnvArg should return 2 elements (--env and value), got: %d", len(envArg)) - } - - if envArg[0] != "--env" { - t.Errorf("First element should be --env, got: %s", envArg[0]) - } - - if envArg[1] != "\"GH_AW_TOOL_BINS=$GH_AW_TOOL_BINS\"" { - t.Errorf("Second element should be \"GH_AW_TOOL_BINS=$GH_AW_TOOL_BINS\" (with outer double quotes), got: %s", envArg[1]) - } -} - -// TestGetSanitizedPATHExport verifies that GetSanitizedPATHExport produces correct shell commands. -func TestGetSanitizedPATHExport(t *testing.T) { - result := GetSanitizedPATHExport("/usr/bin:/usr/local/bin") - - // Should source the sanitize_path.sh script from /opt/gh-aw/actions/ - if !strings.Contains(result, "source /opt/gh-aw/actions/sanitize_path.sh") { - t.Errorf("GetSanitizedPATHExport should source sanitize_path.sh, got: %s", result) - } - - // Should include the raw path as an argument - if !strings.Contains(result, "/usr/bin:/usr/local/bin") { - t.Errorf("GetSanitizedPATHExport should include the raw path, got: %s", result) - } -} - -// TestGetSanitizedPATHExport_ShellExecution tests that the sanitize_path.sh script -// correctly sanitizes various malformed PATH inputs when executed in a real shell. -// This test uses the script directly from actions/setup/sh/ since /opt/gh-aw/actions/ -// only exists at runtime. -func TestGetSanitizedPATHExport_ShellExecution(t *testing.T) { - // Get the path to the sanitize_path.sh script relative to this test file - _, thisFile, _, ok := runtime.Caller(0) - if !ok { - t.Fatal("Failed to get current file path") - } - // Navigate from pkg/workflow/ to actions/setup/sh/ - scriptPath := filepath.Join(filepath.Dir(thisFile), "..", "..", "actions", "setup", "sh", "sanitize_path.sh") - if _, err := os.Stat(scriptPath); os.IsNotExist(err) { - t.Fatalf("sanitize_path.sh script not found at %s", scriptPath) - } - - tests := []struct { - name string - input string - expected string - }{ - { - name: "already clean PATH", - input: "/usr/bin:/usr/local/bin", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "leading colon", - input: ":/usr/bin:/usr/local/bin", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "trailing colon", - input: "/usr/bin:/usr/local/bin:", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "multiple leading colons", - input: ":::/usr/bin:/usr/local/bin", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "multiple trailing colons", - input: "/usr/bin:/usr/local/bin:::", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "internal empty elements", - input: "/usr/bin::/usr/local/bin", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "multiple internal empty elements", - input: "/usr/bin:::/usr/local/bin", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "combined leading trailing and internal", - input: ":/usr/bin:::/usr/local/bin:", - expected: "/usr/bin:/usr/local/bin", - }, - { - name: "all colons", - input: ":::", - expected: "", - }, - { - name: "empty string", - input: "", - expected: "", - }, - { - name: "single path no colons", - input: "/usr/bin", - expected: "/usr/bin", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Source the script directly with the input and echo the resulting PATH - shellCmd := fmt.Sprintf(`source '%s' '%s' && echo "$PATH"`, scriptPath, tt.input) - - cmd := exec.Command("bash", "-c", shellCmd) - output, err := cmd.Output() - if err != nil { - t.Fatalf("Failed to execute shell command: %v\nCommand: %s", err, shellCmd) - } - - result := strings.TrimSpace(string(output)) - if result != tt.expected { - t.Errorf("Sanitized PATH = %q, want %q\nShell command: %s", result, tt.expected, shellCmd) - } - }) - } -} - func TestGetNpmBinPathSetup(t *testing.T) { pathSetup := GetNpmBinPathSetup() diff --git a/pkg/workflow/expression_builder.go b/pkg/workflow/expression_builder.go index bbc0f23818..b2f857b210 100644 --- a/pkg/workflow/expression_builder.go +++ b/pkg/workflow/expression_builder.go @@ -105,11 +105,6 @@ func BuildBooleanLiteral(value bool) *BooleanLiteralNode { return &BooleanLiteralNode{Value: value} } -// BuildNumberLiteral creates a number literal node -func BuildNumberLiteral(value string) *NumberLiteralNode { - return &NumberLiteralNode{Value: value} -} - // BuildNullLiteral creates a null literal node func BuildNullLiteral() *ExpressionNode { return &ExpressionNode{Expression: "null"} @@ -130,37 +125,11 @@ func BuildNotEquals(left ConditionNode, right ConditionNode) *ComparisonNode { return BuildComparison(left, "!=", right) } -// BuildContains creates a contains() function call node -func BuildContains(array ConditionNode, value ConditionNode) *ContainsNode { - return &ContainsNode{Array: array, Value: value} -} - // BuildFunctionCall creates a function call node func BuildFunctionCall(functionName string, args ...ConditionNode) *FunctionCallNode { return &FunctionCallNode{FunctionName: functionName, Arguments: args} } -// BuildTernary creates a ternary conditional expression -func BuildTernary(condition ConditionNode, trueValue ConditionNode, falseValue ConditionNode) *TernaryNode { - return &TernaryNode{Condition: condition, TrueValue: trueValue, FalseValue: falseValue} -} - -// BuildLabelContains creates a condition to check if an issue/PR contains a specific label -func BuildLabelContains(labelName string) *ContainsNode { - return BuildContains( - BuildPropertyAccess("github.event.issue.labels.*.name"), - BuildStringLiteral(labelName), - ) -} - -// BuildActionEquals creates a condition to check if the event action equals a specific value -func BuildActionEquals(action string) *ComparisonNode { - return BuildEquals( - BuildPropertyAccess("github.event.action"), - BuildStringLiteral(action), - ) -} - // BuildNotFromFork creates a condition to check that a pull request is not from a forked repository // This prevents the job from running on forked PRs where write permissions are not available // Uses repository ID comparison instead of full name for more reliable matching @@ -257,22 +226,6 @@ func BuildEventTypeEquals(eventType string) *ComparisonNode { ) } -// BuildRefStartsWith creates a condition to check if github.ref starts with a prefix -func BuildRefStartsWith(prefix string) *FunctionCallNode { - return BuildFunctionCall("startsWith", - BuildPropertyAccess("github.ref"), - BuildStringLiteral(prefix), - ) -} - -// BuildExpressionWithDescription creates an expression node with an optional description -func BuildExpressionWithDescription(expression, description string) *ExpressionNode { - return &ExpressionNode{ - Expression: expression, - Description: description, - } -} - // BuildDisjunction creates a disjunction node (OR operation) from the given terms // Handles arrays of size 0, 1, or more correctly // The multiline parameter controls whether to render each term on a separate line @@ -283,38 +236,6 @@ func BuildDisjunction(multiline bool, terms ...ConditionNode) *DisjunctionNode { } } -// BuildPRCommentCondition creates a condition to check if the event is a comment on a pull request -// This checks for: -// - issue_comment on a PR (github.event.issue.pull_request != null) -// - pull_request_review_comment -// - pull_request_review -func BuildPRCommentCondition() ConditionNode { - // issue_comment event on a PR - issueCommentOnPR := BuildAnd( - BuildEventTypeEquals("issue_comment"), - BuildComparison( - BuildPropertyAccess("github.event.issue.pull_request"), - "!=", - &ExpressionNode{Expression: "null"}, - ), - ) - - // pull_request_review_comment event - prReviewComment := BuildEventTypeEquals("pull_request_review_comment") - - // pull_request_review event - prReview := BuildEventTypeEquals("pull_request_review") - - // Combine all conditions with OR - return &DisjunctionNode{ - Terms: []ConditionNode{ - issueCommentOnPR, - prReviewComment, - prReview, - }, - } -} - // RenderConditionAsIf renders a ConditionNode as an 'if' condition with proper YAML indentation func RenderConditionAsIf(yaml *strings.Builder, condition ConditionNode, indent string) { yaml.WriteString(" if: |\n") @@ -326,23 +247,3 @@ func RenderConditionAsIf(yaml *strings.Builder, condition ConditionNode, indent yaml.WriteString(indent + line + "\n") } } - -// AddDetectionSuccessCheck adds a check for detection success to an existing condition. -// Detection runs inline in the agent job and outputs detection_success. -// This ensures safe output jobs only run when threat detection passes. -func AddDetectionSuccessCheck(existingCondition string) string { - // Build the detection success check referencing the agent job's detection_success output - detectionSuccess := BuildComparison( - BuildPropertyAccess(fmt.Sprintf("needs.%s.outputs.detection_success", constants.AgentJobName)), - "==", - BuildStringLiteral("true"), - ) - - // If there's an existing condition, AND it with the detection check - if existingCondition != "" { - return fmt.Sprintf("(%s) && (%s)", existingCondition, detectionSuccess.Render()) - } - - // If no existing condition, just return the detection check - return detectionSuccess.Render() -} diff --git a/pkg/workflow/expression_coverage_test.go b/pkg/workflow/expression_coverage_test.go index fe26ca217c..3f5ab4ba11 100644 --- a/pkg/workflow/expression_coverage_test.go +++ b/pkg/workflow/expression_coverage_test.go @@ -6,69 +6,6 @@ import ( "testing" ) -// TestParenthesesNodeRender tests the ParenthesesNode Render method -func TestParenthesesNodeRender(t *testing.T) { - tests := []struct { - name string - child ConditionNode - expected string - }{ - { - name: "simple expression", - child: &ExpressionNode{Expression: "github.event_name == 'issues'"}, - expected: "(github.event_name == 'issues')", - }, - { - name: "nested expression", - child: &AndNode{ - Left: &ExpressionNode{Expression: "condition1"}, - Right: &ExpressionNode{Expression: "condition2"}, - }, - expected: "((condition1) && (condition2))", - }, - { - name: "function call", - child: &FunctionCallNode{ - FunctionName: "contains", - Arguments: []ConditionNode{ - BuildPropertyAccess("github.event.labels"), - BuildStringLiteral("bug"), - }, - }, - expected: "(contains(github.event.labels, 'bug'))", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - node := &ParenthesesNode{Child: tt.child} - result := node.Render() - if result != tt.expected { - t.Errorf("ParenthesesNode.Render() = %v, expected %v", result, tt.expected) - } - }) - } -} - -// TestAddDetectionSuccessCheckEmptyCondition tests AddDetectionSuccessCheck with empty condition -func TestAddDetectionSuccessCheckEmptyCondition(t *testing.T) { - result := AddDetectionSuccessCheck("") - expected := "needs.agent.outputs.detection_success == 'true'" - if result != expected { - t.Errorf("AddDetectionSuccessCheck(\"\") = %v, expected %v", result, expected) - } -} - -// TestAddDetectionSuccessCheckWithExistingCondition tests AddDetectionSuccessCheck with existing condition -func TestAddDetectionSuccessCheckWithExistingCondition(t *testing.T) { - existingCondition := "github.event.action == 'opened'" - result := AddDetectionSuccessCheck(existingCondition) - expected := "(github.event.action == 'opened') && (needs.agent.outputs.detection_success == 'true')" - if result != expected { - t.Errorf("AddDetectionSuccessCheck() = %v, expected %v", result, expected) - } -} - // TestBuildFromAllowedForksEmptyList tests BuildFromAllowedForks with empty list func TestBuildFromAllowedForksEmptyList(t *testing.T) { result := BuildFromAllowedForks([]string{}) @@ -194,22 +131,6 @@ func TestVisitExpressionTreeWithDifferentNodeTypes(t *testing.T) { expectedCount: 0, description: "FunctionCallNode should not be visited (not ExpressionNode)", }, - { - name: "TernaryNode", - node: BuildTernary( - &ExpressionNode{Expression: "condition"}, - &ExpressionNode{Expression: "true_value"}, - &ExpressionNode{Expression: "false_value"}, - ), - expectedCount: 0, - description: "TernaryNode should not recurse (not in visitor)", - }, - { - name: "ContainsNode", - node: BuildContains(BuildPropertyAccess("array"), BuildStringLiteral("value")), - expectedCount: 0, - description: "ContainsNode should not be visited (not ExpressionNode)", - }, { name: "DisjunctionNode with multiple terms", node: &DisjunctionNode{ diff --git a/pkg/workflow/expression_nodes.go b/pkg/workflow/expression_nodes.go index bbf62d0920..01d5e0379a 100644 --- a/pkg/workflow/expression_nodes.go +++ b/pkg/workflow/expression_nodes.go @@ -56,15 +56,6 @@ func (n *NotNode) Render() string { return fmt.Sprintf("!(%s)", n.Child.Render()) } -// ParenthesesNode wraps a condition in parentheses for proper YAML interpretation -type ParenthesesNode struct { - Child ConditionNode -} - -func (p *ParenthesesNode) Render() string { - return fmt.Sprintf("(%s)", p.Child.Render()) -} - // DisjunctionNode represents an OR operation with multiple terms to avoid deep nesting type DisjunctionNode struct { Terms []ConditionNode @@ -169,15 +160,6 @@ func (b *BooleanLiteralNode) Render() string { return "false" } -// NumberLiteralNode represents a numeric literal value -type NumberLiteralNode struct { - Value string -} - -func (n *NumberLiteralNode) Render() string { - return n.Value -} - // ComparisonNode represents comparison operations like ==, !=, <, >, <=, >= type ComparisonNode struct { Left ConditionNode @@ -188,24 +170,3 @@ type ComparisonNode struct { func (c *ComparisonNode) Render() string { return fmt.Sprintf("%s %s %s", c.Left.Render(), c.Operator, c.Right.Render()) } - -// TernaryNode represents ternary conditional expressions like condition ? true_value : false_value -type TernaryNode struct { - Condition ConditionNode - TrueValue ConditionNode - FalseValue ConditionNode -} - -func (t *TernaryNode) Render() string { - return fmt.Sprintf("%s ? %s : %s", t.Condition.Render(), t.TrueValue.Render(), t.FalseValue.Render()) -} - -// ContainsNode represents array membership checks using contains() function -type ContainsNode struct { - Array ConditionNode - Value ConditionNode -} - -func (c *ContainsNode) Render() string { - return fmt.Sprintf("contains(%s, %s)", c.Array.Render(), c.Value.Render()) -} diff --git a/pkg/workflow/expressions_test.go b/pkg/workflow/expressions_test.go index 800eb7b2c2..46e8eca950 100644 --- a/pkg/workflow/expressions_test.go +++ b/pkg/workflow/expressions_test.go @@ -327,39 +327,6 @@ func TestBooleanLiteralNode_Render(t *testing.T) { } } -func TestNumberLiteralNode_Render(t *testing.T) { - tests := []struct { - name string - value string - expected string - }{ - { - name: "integer", - value: "42", - expected: "42", - }, - { - name: "decimal", - value: "3.14", - expected: "3.14", - }, - { - name: "negative number", - value: "-10", - expected: "-10", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - node := &NumberLiteralNode{Value: tt.value} - if result := node.Render(); result != tt.expected { - t.Errorf("Expected '%s', got '%s'", tt.expected, result) - } - }) - } -} - func TestComparisonNode_Render(t *testing.T) { tests := []struct { name string @@ -379,21 +346,21 @@ func TestComparisonNode_Render(t *testing.T) { name: "inequality comparison", left: &PropertyAccessNode{PropertyPath: "github.event.issue.number"}, operator: "!=", - right: &NumberLiteralNode{Value: "0"}, + right: &ExpressionNode{Expression: "0"}, expected: "github.event.issue.number != 0", }, { name: "greater than comparison", left: &PropertyAccessNode{PropertyPath: "github.event.issue.comments"}, operator: ">", - right: &NumberLiteralNode{Value: "5"}, + right: &ExpressionNode{Expression: "5"}, expected: "github.event.issue.comments > 5", }, { name: "less than or equal comparison", left: &PropertyAccessNode{PropertyPath: "github.run_number"}, operator: "<=", - right: &NumberLiteralNode{Value: "100"}, + right: &ExpressionNode{Expression: "100"}, expected: "github.run_number <= 100", }, } @@ -412,194 +379,6 @@ func TestComparisonNode_Render(t *testing.T) { } } -func TestTernaryNode_Render(t *testing.T) { - tests := []struct { - name string - condition ConditionNode - trueValue ConditionNode - falseValue ConditionNode - expected string - }{ - { - name: "simple ternary", - condition: &ComparisonNode{ - Left: &PropertyAccessNode{PropertyPath: "github.event.action"}, - Operator: "==", - Right: &StringLiteralNode{Value: "opened"}, - }, - trueValue: &StringLiteralNode{Value: "new"}, - falseValue: &StringLiteralNode{Value: "existing"}, - expected: "github.event.action == 'opened' ? 'new' : 'existing'", - }, - { - name: "ternary with boolean literals", - condition: &PropertyAccessNode{PropertyPath: "github.event.pull_request.draft"}, - trueValue: &StringLiteralNode{Value: "draft"}, - falseValue: &StringLiteralNode{Value: "ready"}, - expected: "github.event.pull_request.draft ? 'draft' : 'ready'", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - node := &TernaryNode{ - Condition: tt.condition, - TrueValue: tt.trueValue, - FalseValue: tt.falseValue, - } - if result := node.Render(); result != tt.expected { - t.Errorf("Expected '%s', got '%s'", tt.expected, result) - } - }) - } -} - -func TestContainsNode_Render(t *testing.T) { - tests := []struct { - name string - array ConditionNode - value ConditionNode - expected string - }{ - { - name: "contains with property and string", - array: &PropertyAccessNode{PropertyPath: "github.event.issue.labels"}, - value: &StringLiteralNode{Value: "bug"}, - expected: "contains(github.event.issue.labels, 'bug')", - }, - { - name: "contains with nested property", - array: &PropertyAccessNode{PropertyPath: "github.event.pull_request.requested_reviewers"}, - value: &PropertyAccessNode{PropertyPath: "github.actor"}, - expected: "contains(github.event.pull_request.requested_reviewers, github.actor)", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - node := &ContainsNode{ - Array: tt.array, - Value: tt.value, - } - if result := node.Render(); result != tt.expected { - t.Errorf("Expected '%s', got '%s'", tt.expected, result) - } - }) - } -} - -// TestGitHubActionsArrayMatching tests the specific array matching technique mentioned in the issue -func TestGitHubActionsArrayMatching(t *testing.T) { - // Test the array matching pattern from GitHub Actions docs - // Example: contains(github.event.issue.labels.*.name, 'bug') - tests := []struct { - name string - pattern ConditionNode - expected string - }{ - { - name: "label matching with contains", - pattern: &ContainsNode{ - Array: &PropertyAccessNode{PropertyPath: "github.event.issue.labels.*.name"}, - Value: &StringLiteralNode{Value: "bug"}, - }, - expected: "contains(github.event.issue.labels.*.name, 'bug')", - }, - { - name: "multiple label matching with OR", - pattern: &OrNode{ - Left: &ContainsNode{ - Array: &PropertyAccessNode{PropertyPath: "github.event.issue.labels.*.name"}, - Value: &StringLiteralNode{Value: "bug"}, - }, - Right: &ContainsNode{ - Array: &PropertyAccessNode{PropertyPath: "github.event.issue.labels.*.name"}, - Value: &StringLiteralNode{Value: "enhancement"}, - }, - }, - expected: "(contains(github.event.issue.labels.*.name, 'bug')) || (contains(github.event.issue.labels.*.name, 'enhancement'))", - }, - { - name: "complex array matching with conditions", - pattern: &AndNode{ - Left: &ContainsNode{ - Array: &PropertyAccessNode{PropertyPath: "github.event.issue.labels.*.name"}, - Value: &StringLiteralNode{Value: "priority-high"}, - }, - Right: &ComparisonNode{ - Left: &PropertyAccessNode{PropertyPath: "github.event.action"}, - Operator: "==", - Right: &StringLiteralNode{Value: "opened"}, - }, - }, - expected: "(contains(github.event.issue.labels.*.name, 'priority-high')) && (github.event.action == 'opened')", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if result := tt.pattern.Render(); result != tt.expected { - t.Errorf("Expected '%s', got '%s'", tt.expected, result) - } - }) - } -} - -// TestComplexGitHubActionsExpressions tests complex real-world GitHub Actions expressions -func TestComplexGitHubActionsExpressions(t *testing.T) { - tests := []struct { - name string - expression ConditionNode - expected string - }{ - { - name: "conditional workflow run based on labels and action", - expression: &AndNode{ - Left: &OrNode{ - Left: &ComparisonNode{ - Left: &PropertyAccessNode{PropertyPath: "github.event.action"}, - Operator: "==", - Right: &StringLiteralNode{Value: "opened"}, - }, - Right: &ComparisonNode{ - Left: &PropertyAccessNode{PropertyPath: "github.event.action"}, - Operator: "==", - Right: &StringLiteralNode{Value: "synchronize"}, - }, - }, - Right: &ContainsNode{ - Array: &PropertyAccessNode{PropertyPath: "github.event.pull_request.labels.*.name"}, - Value: &StringLiteralNode{Value: "auto-deploy"}, - }, - }, - expected: "((github.event.action == 'opened') || (github.event.action == 'synchronize')) && (contains(github.event.pull_request.labels.*.name, 'auto-deploy'))", - }, - { - name: "ternary expression for environment selection", - expression: &TernaryNode{ - Condition: &FunctionCallNode{ - FunctionName: "startsWith", - Arguments: []ConditionNode{ - &PropertyAccessNode{PropertyPath: "github.ref"}, - &StringLiteralNode{Value: "refs/heads/main"}, - }, - }, - TrueValue: &StringLiteralNode{Value: "production"}, - FalseValue: &StringLiteralNode{Value: "staging"}, - }, - expected: "startsWith(github.ref, 'refs/heads/main') ? 'production' : 'staging'", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if result := tt.expression.Render(); result != tt.expected { - t.Errorf("Expected '%s', got '%s'", tt.expected, result) - } - }) - } -} - // TestHelperFunctions tests the helper functions for building expressions func TestHelperFunctions(t *testing.T) { t.Run("BuildPropertyAccess", func(t *testing.T) { @@ -626,14 +405,6 @@ func TestHelperFunctions(t *testing.T) { } }) - t.Run("BuildNumberLiteral", func(t *testing.T) { - node := BuildNumberLiteral("42") - expected := "42" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) - t.Run("BuildEquals", func(t *testing.T) { node := BuildEquals( BuildPropertyAccess("github.event.action"), @@ -645,28 +416,6 @@ func TestHelperFunctions(t *testing.T) { } }) - t.Run("BuildNotEquals", func(t *testing.T) { - node := BuildNotEquals( - BuildPropertyAccess("github.event.issue.number"), - BuildNumberLiteral("0"), - ) - expected := "github.event.issue.number != 0" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) - - t.Run("BuildContains", func(t *testing.T) { - node := BuildContains( - BuildPropertyAccess("github.event.issue.labels.*.name"), - BuildStringLiteral("bug"), - ) - expected := "contains(github.event.issue.labels.*.name, 'bug')" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) - t.Run("BuildFunctionCall", func(t *testing.T) { node := BuildFunctionCall("startsWith", BuildPropertyAccess("github.ref"), @@ -678,37 +427,10 @@ func TestHelperFunctions(t *testing.T) { } }) - t.Run("BuildTernary", func(t *testing.T) { - node := BuildTernary( - BuildEquals(BuildPropertyAccess("github.event.action"), BuildStringLiteral("opened")), - BuildStringLiteral("new"), - BuildStringLiteral("existing"), - ) - expected := "github.event.action == 'opened' ? 'new' : 'existing'" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) } // TestConvenienceHelpers tests the convenience helper functions func TestConvenienceHelpers(t *testing.T) { - t.Run("BuildLabelContains", func(t *testing.T) { - node := BuildLabelContains("bug") - expected := "contains(github.event.issue.labels.*.name, 'bug')" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) - - t.Run("BuildActionEquals", func(t *testing.T) { - node := BuildActionEquals("opened") - expected := "github.event.action == 'opened'" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) - t.Run("BuildEventTypeEquals", func(t *testing.T) { node := BuildEventTypeEquals("push") expected := "github.event_name == 'push'" @@ -717,13 +439,6 @@ func TestConvenienceHelpers(t *testing.T) { } }) - t.Run("BuildRefStartsWith", func(t *testing.T) { - node := BuildRefStartsWith("refs/heads/main") - expected := "startsWith(github.ref, 'refs/heads/main')" - if result := node.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - }) } // TestRealWorldExpressionPatterns tests common expression patterns used in GitHub Actions @@ -741,14 +456,6 @@ func TestRealWorldExpressionPatterns(t *testing.T) { ), expected: "github.ref == 'refs/heads/main'", }, - { - name: "run on PR with specific label", - expression: &AndNode{ - Left: BuildEventTypeEquals("pull_request"), - Right: BuildLabelContains("deploy"), - }, - expected: "(github.event_name == 'pull_request') && (contains(github.event.issue.labels.*.name, 'deploy'))", - }, { name: "skip draft PRs", expression: &AndNode{ @@ -759,26 +466,6 @@ func TestRealWorldExpressionPatterns(t *testing.T) { }, expected: "(github.event_name == 'pull_request') && (!(github.event.pull_request.draft))", }, - { - name: "conditional deployment environment", - expression: BuildTernary( - BuildRefStartsWith("refs/heads/main"), - BuildStringLiteral("production"), - BuildStringLiteral("staging"), - ), - expected: "startsWith(github.ref, 'refs/heads/main') ? 'production' : 'staging'", - }, - { - name: "run on multiple event actions", - expression: &DisjunctionNode{ - Terms: []ConditionNode{ - BuildActionEquals("opened"), - BuildActionEquals("synchronize"), - BuildActionEquals("reopened"), - }, - }, - expected: "github.event.action == 'opened' || github.event.action == 'synchronize' || github.event.action == 'reopened'", - }, } for _, tt := range tests { @@ -927,22 +614,9 @@ func TestRenderMultilineMethod(t *testing.T) { // TestHelperFunctionsForMultiline tests the new helper functions func TestHelperFunctionsForMultiline(t *testing.T) { - t.Run("BuildExpressionWithDescription", func(t *testing.T) { - expr := BuildExpressionWithDescription("github.event_name == 'issues'", "Check if this is an issue event") - - expected := "github.event_name == 'issues'" - if result := expr.Render(); result != expected { - t.Errorf("Expected '%s', got '%s'", expected, result) - } - - if expr.Description != "Check if this is an issue event" { - t.Errorf("Expected description 'Check if this is an issue event', got '%s'", expr.Description) - } - }) - t.Run("BuildDisjunction with multiline", func(t *testing.T) { - term1 := BuildExpressionWithDescription("github.event_name == 'issues'", "Handle issue events") - term2 := BuildExpressionWithDescription("github.event_name == 'pull_request'", "Handle PR events") + term1 := &ExpressionNode{Expression: "github.event_name == 'issues'", Description: "Handle issue events"} + term2 := &ExpressionNode{Expression: "github.event_name == 'pull_request'", Description: "Handle PR events"} disjunction := BuildDisjunction(true, term1, term2) @@ -957,7 +631,7 @@ func TestHelperFunctionsForMultiline(t *testing.T) { }) t.Run("BuildDisjunction with single term", func(t *testing.T) { - term := BuildExpressionWithDescription("github.event_name == 'issues'", "Handle issue events") + term := &ExpressionNode{Expression: "github.event_name == 'issues'", Description: "Handle issue events"} // Test with multiline=false disjunctionSingle := BuildDisjunction(false, term) diff --git a/pkg/workflow/firewall_disable_integration_test.go b/pkg/workflow/firewall_disable_integration_test.go index 6c09ab59d0..dc2f2b2ca9 100644 --- a/pkg/workflow/firewall_disable_integration_test.go +++ b/pkg/workflow/firewall_disable_integration_test.go @@ -41,9 +41,8 @@ func TestFirewallDisableIntegration(t *testing.T) { } // Check validation triggers warning - engine := NewCopilotEngine() initialWarnings := compiler.warningCount - err := compiler.checkFirewallDisable(engine, networkPerms) + err := compiler.checkFirewallDisable(networkPerms) if err != nil { t.Errorf("Expected no error in non-strict mode, got: %v", err) } @@ -72,8 +71,7 @@ func TestFirewallDisableIntegration(t *testing.T) { t.Fatal("Expected network permissions to be extracted") } - engine := NewCopilotEngine() - err := compiler.checkFirewallDisable(engine, networkPerms) + err := compiler.checkFirewallDisable(networkPerms) if err == nil { t.Error("Expected error in strict mode when firewall is disabled with allowed domains") } diff --git a/pkg/workflow/frontmatter_types.go b/pkg/workflow/frontmatter_types.go index 2b3bd80892..e365f9b8c8 100644 --- a/pkg/workflow/frontmatter_types.go +++ b/pkg/workflow/frontmatter_types.go @@ -514,28 +514,6 @@ func ExtractMapField(frontmatter map[string]any, key string) map[string]any { return make(map[string]any) } -// ExtractStringField is a convenience wrapper for extracting string fields. -// Returns empty string if the key doesn't exist or cannot be converted. -func ExtractStringField(frontmatter map[string]any, key string) string { - var result string - err := unmarshalFromMap(frontmatter, key, &result) - if err != nil { - return "" - } - return result -} - -// ExtractIntField is a convenience wrapper for extracting integer fields. -// Returns 0 if the key doesn't exist or cannot be converted. -func ExtractIntField(frontmatter map[string]any, key string) int { - var result int - err := unmarshalFromMap(frontmatter, key, &result) - if err != nil { - return 0 - } - return result -} - // ToMap converts FrontmatterConfig back to map[string]any for backward compatibility // This allows gradual migration from map[string]any to strongly-typed config func (fc *FrontmatterConfig) ToMap() map[string]any { diff --git a/pkg/workflow/gemini_engine.go b/pkg/workflow/gemini_engine.go index ac8cd8ee55..563a6c679d 100644 --- a/pkg/workflow/gemini_engine.go +++ b/pkg/workflow/gemini_engine.go @@ -26,7 +26,6 @@ func NewGeminiEngine() *GeminiEngine { supportsMaxTurns: false, supportsWebFetch: false, supportsWebSearch: false, - supportsFirewall: true, // Gemini supports network firewalling via AWF supportsPlugins: false, supportsLLMGateway: true, // Gemini supports LLM gateway on port 10003 }, diff --git a/pkg/workflow/gemini_engine_test.go b/pkg/workflow/gemini_engine_test.go index 1faa411d57..a4bf6058fa 100644 --- a/pkg/workflow/gemini_engine_test.go +++ b/pkg/workflow/gemini_engine_test.go @@ -25,7 +25,6 @@ func TestGeminiEngine(t *testing.T) { assert.False(t, engine.SupportsMaxTurns(), "Should not support max turns") assert.False(t, engine.SupportsWebFetch(), "Should not support built-in web fetch") assert.False(t, engine.SupportsWebSearch(), "Should not support built-in web search") - assert.True(t, engine.SupportsFirewall(), "Should support firewall/AWF") assert.False(t, engine.SupportsPlugins(), "Should not support plugins") assert.Equal(t, 10003, engine.SupportsLLMGateway(), "Should support LLM gateway on port 10003") }) diff --git a/pkg/workflow/http_mcp_domains_test.go b/pkg/workflow/http_mcp_domains_test.go index e55841a589..c4f943afa8 100644 --- a/pkg/workflow/http_mcp_domains_test.go +++ b/pkg/workflow/http_mcp_domains_test.go @@ -183,7 +183,7 @@ func TestGetCodexAllowedDomainsWithTools(t *testing.T) { }, } - result := GetCodexAllowedDomainsWithTools(network, tools) + result := GetAllowedDomainsForEngine(constants.CodexEngine, network, tools, nil) // Should include Codex defaults, GitHub ecosystem, and Tavily domain require.Contains(t, result, "mcp.tavily.com", "Should include HTTP MCP domain") @@ -203,7 +203,7 @@ func TestGetCopilotAllowedDomainsWithTools(t *testing.T) { }, } - result := GetCopilotAllowedDomainsWithTools(network, tools) + result := GetAllowedDomainsForEngine(constants.CopilotEngine, network, tools, nil) // Should include Copilot defaults, Python ecosystem, and custom HTTP MCP domain require.Contains(t, result, "api.custom.com", "Should include HTTP MCP domain") @@ -224,7 +224,7 @@ func TestGetClaudeAllowedDomainsWithTools(t *testing.T) { }, } - result := GetClaudeAllowedDomainsWithTools(network, tools) + result := GetAllowedDomainsForEngine(constants.ClaudeEngine, network, tools, nil) // Should include Claude defaults, Node ecosystem, and example HTTP MCP domain require.Contains(t, result, "mcp.example.org", "Should include HTTP MCP domain") @@ -304,7 +304,7 @@ func TestGetCopilotAllowedDomainsWithPlaywright(t *testing.T) { "playwright": map[string]any{}, } - result := GetCopilotAllowedDomainsWithTools(network, tools) + result := GetAllowedDomainsForEngine(constants.CopilotEngine, network, tools, nil) // Should include Copilot defaults and Playwright ecosystem domains require.Contains(t, result, "playwright.download.prss.microsoft.com", "Should include Playwright download domain") @@ -322,7 +322,7 @@ func TestGetCodexAllowedDomainsWithPlaywright(t *testing.T) { "playwright": map[string]any{}, } - result := GetCodexAllowedDomainsWithTools(network, tools) + result := GetAllowedDomainsForEngine(constants.CodexEngine, network, tools, nil) // Should include Codex defaults and Playwright ecosystem domains require.Contains(t, result, "playwright.download.prss.microsoft.com", "Should include Playwright download domain") diff --git a/pkg/workflow/js.go b/pkg/workflow/js.go index 737f4c70d5..1eaa09a651 100644 --- a/pkg/workflow/js.go +++ b/pkg/workflow/js.go @@ -68,7 +68,6 @@ func init() { // All getter functions return empty strings since embedded scripts were removed func getAddCommentScript() string { return "" } -func getAddLabelsScript() string { return "" } func getAssignToAgentScript() string { return "" } func getCreateCodeScanningAlertScript() string { return "" } func getCreateDiscussionScript() string { return "" } diff --git a/pkg/workflow/known_needs_expressions.go b/pkg/workflow/known_needs_expressions.go index 4bc47595df..3dacb69aac 100644 --- a/pkg/workflow/known_needs_expressions.go +++ b/pkg/workflow/known_needs_expressions.go @@ -182,64 +182,6 @@ func normalizeOutputNameForEnvVar(outputName string) string { return normalizeJobNameForEnvVar(outputName) } -// getSafeOutputJobNames returns a list of safe output job names based on the configuration -func getSafeOutputJobNames(data *WorkflowData) []string { - var jobNames []string - - if data.SafeOutputs == nil { - return jobNames - } - - // These are the standard safe output job names that can be generated - if data.SafeOutputs.CreateIssues != nil { - jobNames = append(jobNames, "create_issue") - } - if data.SafeOutputs.CreateDiscussions != nil { - jobNames = append(jobNames, "create_discussion") - } - if data.SafeOutputs.AddComments != nil { - jobNames = append(jobNames, "add_comment") - } - if data.SafeOutputs.CreatePullRequests != nil { - jobNames = append(jobNames, "create_pull_request") - } - // Add the consolidated safe outputs job if it exists - // This is always named "safe_outputs" when multiple types are configured - if hasMultipleSafeOutputTypes(data.SafeOutputs) { - jobNames = append(jobNames, "safe_outputs") - } - - // Also add custom safe-job names from safe-jobs configuration - if data.SafeOutputs.Jobs != nil { - for jobName := range data.SafeOutputs.Jobs { - jobNames = append(jobNames, jobName) - } - } - - // Sort for consistent output - sort.Strings(jobNames) - - return jobNames -} - -// hasMultipleSafeOutputTypes checks if multiple safe output types are configured -func hasMultipleSafeOutputTypes(config *SafeOutputsConfig) bool { - count := 0 - if config.CreateIssues != nil { - count++ - } - if config.CreateDiscussions != nil { - count++ - } - if config.AddComments != nil { - count++ - } - if config.CreatePullRequests != nil { - count++ - } - return count > 1 -} - // getCustomJobsBeforeActivation returns a list of custom job names that run before the activation job // A custom job runs before activation ONLY if it explicitly depends on pre_activation // Note: Jobs without explicit 'needs' will automatically get 'needs: activation' added by the compiler, @@ -319,22 +261,3 @@ func parseNeedsField(needsField any) []string { return []string{} } } - -// getCustomJobNames returns a list of all custom job names from frontmatter -func getCustomJobNames(data *WorkflowData) []string { - var jobNames []string - - if data.Jobs == nil { - return jobNames - } - - // Extract job names from the Jobs map - for jobName := range data.Jobs { - jobNames = append(jobNames, jobName) - } - - // Sort for consistent output - sort.Strings(jobNames) - - return jobNames -} diff --git a/pkg/workflow/known_needs_expressions_test.go b/pkg/workflow/known_needs_expressions_test.go index 63f9149cbb..7ab0657e3f 100644 --- a/pkg/workflow/known_needs_expressions_test.go +++ b/pkg/workflow/known_needs_expressions_test.go @@ -204,59 +204,6 @@ func TestNormalizeOutputNameForEnvVar(t *testing.T) { } } -func TestGetSafeOutputJobNames(t *testing.T) { - tests := []struct { - name string - data *WorkflowData - expectedJobs []string - }{ - { - name: "no safe outputs", - data: &WorkflowData{}, - expectedJobs: []string{}, - }, - { - name: "single create-issues", - data: &WorkflowData{ - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{}, - }, - }, - expectedJobs: []string{"create_issue"}, - }, - { - name: "multiple safe output types", - data: &WorkflowData{ - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{}, - CreateDiscussions: &CreateDiscussionsConfig{}, - }, - }, - expectedJobs: []string{"create_discussion", "create_issue", "safe_outputs"}, - }, - { - name: "with custom safe-jobs", - data: &WorkflowData{ - SafeOutputs: &SafeOutputsConfig{ - CreateIssues: &CreateIssuesConfig{}, - Jobs: map[string]*SafeJobConfig{ - "my_custom_job": {}, - }, - }, - }, - expectedJobs: []string{"create_issue", "my_custom_job"}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - jobNames := getSafeOutputJobNames(tt.data) - assert.ElementsMatch(t, tt.expectedJobs, jobNames, - "Safe output job names mismatch") - }) - } -} - func TestGetCustomJobsBeforeActivation(t *testing.T) { tests := []struct { name string @@ -359,50 +306,6 @@ func TestGetCustomJobsBeforeActivation(t *testing.T) { } } -func TestGetCustomJobNames(t *testing.T) { - tests := []struct { - name string - data *WorkflowData - expectedJobs []string - }{ - { - name: "no custom jobs", - data: &WorkflowData{}, - expectedJobs: []string{}, - }, - { - name: "single custom job", - data: &WorkflowData{ - Jobs: map[string]any{ - "custom_job": map[string]any{ - "runs-on": "ubuntu-latest", - }, - }, - }, - expectedJobs: []string{"custom_job"}, - }, - { - name: "multiple custom jobs", - data: &WorkflowData{ - Jobs: map[string]any{ - "job_a": map[string]any{}, - "job_b": map[string]any{}, - "job_c": map[string]any{}, - }, - }, - expectedJobs: []string{"job_a", "job_b", "job_c"}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - jobNames := getCustomJobNames(tt.data) - assert.ElementsMatch(t, tt.expectedJobs, jobNames, - "Custom job names mismatch") - }) - } -} - func TestGenerateKnownNeedsExpressions_EnvVarFormat(t *testing.T) { data := &WorkflowData{} mappings := generateKnownNeedsExpressions(data, true) diff --git a/pkg/workflow/map_helpers.go b/pkg/workflow/map_helpers.go index a86572d92f..a603058cfd 100644 --- a/pkg/workflow/map_helpers.go +++ b/pkg/workflow/map_helpers.go @@ -18,14 +18,9 @@ // // Type Conversion: // - parseIntValue() - Safely parse numeric types to int with truncation warnings -// - isEmptyOrNil() - Check if a value is empty, nil, or zero // // Map Operations: // - filterMapKeys() - Create new map excluding specified keys -// - getMapFieldAsString() - Safely extract a string field from a map[string]any -// - getMapFieldAsMap() - Safely extract a nested map from a map[string]any -// - getMapFieldAsBool() - Safely extract a boolean field from a map[string]any -// - getMapFieldAsInt() - Safely extract an integer field from a map[string]any // // These utilities handle common type conversion and map manipulation patterns that // occur frequently during YAML-to-struct parsing and configuration processing. @@ -33,8 +28,6 @@ package workflow import ( - "strings" - "github.com/github/gh-aw/pkg/logger" ) @@ -83,221 +76,3 @@ func filterMapKeys(original map[string]any, excludeKeys ...string) map[string]an } return result } - -// isEmptyOrNil evaluates whether a value represents an empty or absent state. -// This consolidates various emptiness checks across the codebase into a single -// reusable function. The function handles multiple value types with appropriate -// emptiness semantics for each. -// -// Returns true when encountering: -// - nil values (representing absence) -// - strings that are empty or contain only whitespace -// - numeric types equal to zero -// - boolean false -// - collections (slices, maps) with no elements -// -// Usage pattern: -// -// if isEmptyOrNil(configValue) { -// return NewValidationError("fieldName", "", "required field missing", "provide a value") -// } -func isEmptyOrNil(candidate any) bool { - // Handle nil case first - if candidate == nil { - return true - } - - // Type-specific emptiness checks using reflection-free approach - switch typedValue := candidate.(type) { - case string: - // String is empty if blank after trimming whitespace - return len(strings.TrimSpace(typedValue)) == 0 - case int: - return typedValue == 0 - case int8: - return typedValue == 0 - case int16: - return typedValue == 0 - case int32: - return typedValue == 0 - case int64: - return typedValue == 0 - case uint: - return typedValue == 0 - case uint8: - return typedValue == 0 - case uint16: - return typedValue == 0 - case uint32: - return typedValue == 0 - case uint64: - return typedValue == 0 - case float32: - return typedValue == 0.0 - case float64: - return typedValue == 0.0 - case bool: - // false represents empty boolean state - return !typedValue - case []any: - return len(typedValue) == 0 - case map[string]any: - return len(typedValue) == 0 - } - - // Non-nil values of unrecognized types are considered non-empty - return false -} - -// getMapFieldAsString retrieves a string value from a configuration map with safe type handling. -// This function wraps the common pattern of extracting string fields from map[string]any structures -// that result from YAML parsing, providing consistent error behavior and logging. -// -// The function returns the fallback value in these scenarios: -// - Source map is nil -// - Requested key doesn't exist in map -// - Value at key is not a string type -// -// Parameters: -// - source: The configuration map to query -// - fieldKey: The key to look up in the map -// - fallback: Value returned when extraction fails -// -// Example usage: -// -// titleValue := getMapFieldAsString(frontmatter, "title", "") -// if titleValue == "" { -// return NewValidationError("title", "", "title required", "provide a title") -// } -func getMapFieldAsString(source map[string]any, fieldKey string, fallback string) string { - // Early return for nil map - if source == nil { - return fallback - } - - // Attempt to retrieve value - retrievedValue, keyFound := source[fieldKey] - if !keyFound { - return fallback - } - - // Verify type before returning - stringValue, isString := retrievedValue.(string) - if !isString { - mapHelpersLog.Printf("Type mismatch for key %q: expected string, found %T", fieldKey, retrievedValue) - return fallback - } - - return stringValue -} - -// getMapFieldAsMap retrieves a nested map value from a configuration map with safe type handling. -// This consolidates the pattern of extracting nested configuration sections while handling -// type mismatches gracefully. Returns nil when the field cannot be extracted as a map. -// -// Parameters: -// - source: The parent configuration map -// - fieldKey: The key identifying the nested map -// -// Example usage: -// -// toolsSection := getMapFieldAsMap(config, "tools") -// if toolsSection != nil { -// playwrightConfig := getMapFieldAsMap(toolsSection, "playwright") -// } -func getMapFieldAsMap(source map[string]any, fieldKey string) map[string]any { - // Guard against nil source - if source == nil { - return nil - } - - // Look up the field - retrievedValue, keyFound := source[fieldKey] - if !keyFound { - return nil - } - - // Type assert to nested map - mapValue, isMap := retrievedValue.(map[string]any) - if !isMap { - mapHelpersLog.Printf("Type mismatch for key %q: expected map[string]any, found %T", fieldKey, retrievedValue) - return nil - } - - return mapValue -} - -// getMapFieldAsBool retrieves a boolean value from a configuration map with safe type handling. -// This wraps the pattern of extracting boolean configuration flags while providing consistent -// fallback behavior when the value is missing or has an unexpected type. -// -// Parameters: -// - source: The configuration map to query -// - fieldKey: The key to look up -// - fallback: Value returned when extraction fails -// -// Example usage: -// -// sandboxEnabled := getMapFieldAsBool(config, "sandbox", false) -// if sandboxEnabled { -// // Enable sandbox mode -// } -func getMapFieldAsBool(source map[string]any, fieldKey string, fallback bool) bool { - // Handle nil source - if source == nil { - return fallback - } - - // Retrieve value from map - retrievedValue, keyFound := source[fieldKey] - if !keyFound { - return fallback - } - - // Verify boolean type - booleanValue, isBoolean := retrievedValue.(bool) - if !isBoolean { - mapHelpersLog.Printf("Type mismatch for key %q: expected bool, found %T", fieldKey, retrievedValue) - return fallback - } - - return booleanValue -} - -// getMapFieldAsInt retrieves an integer value from a configuration map with automatic numeric type conversion. -// This function handles the common pattern of extracting numeric config values that may be represented -// as various numeric types in YAML (int, int64, float64, uint64). It delegates to parseIntValue for -// the actual type conversion logic. -// -// Parameters: -// - source: The configuration map to query -// - fieldKey: The key to look up -// - fallback: Value returned when extraction or conversion fails -// -// Example usage: -// -// retentionDays := getMapFieldAsInt(config, "retention-days", 30) -// if err := validateIntRange(retentionDays, 1, 90, "retention-days"); err != nil { -// return err -// } -func getMapFieldAsInt(source map[string]any, fieldKey string, fallback int) int { - // Guard against nil source - if source == nil { - return fallback - } - - // Look up the value - retrievedValue, keyFound := source[fieldKey] - if !keyFound { - return fallback - } - - // Attempt numeric conversion using existing utility - convertedInt, conversionOk := parseIntValue(retrievedValue) - if !conversionOk { - mapHelpersLog.Printf("Failed to convert key %q to int: got %T", fieldKey, retrievedValue) - return fallback - } - - return convertedInt -} diff --git a/pkg/workflow/mcp_renderer.go b/pkg/workflow/mcp_renderer.go index c497939d83..efbe88e453 100644 --- a/pkg/workflow/mcp_renderer.go +++ b/pkg/workflow/mcp_renderer.go @@ -998,12 +998,7 @@ func RenderJSONMCPConfig( if options.GatewayConfig.PayloadDir != "" { fmt.Fprintf(&configBuilder, ",\n \"payloadDir\": \"%s\"", options.GatewayConfig.PayloadDir) } - if options.GatewayConfig.PayloadPathPrefix != "" { - fmt.Fprintf(&configBuilder, ",\n \"payloadPathPrefix\": \"%s\"", options.GatewayConfig.PayloadPathPrefix) - } - if options.GatewayConfig.PayloadSizeThreshold > 0 { - fmt.Fprintf(&configBuilder, ",\n \"payloadSizeThreshold\": %d", options.GatewayConfig.PayloadSizeThreshold) - } + configBuilder.WriteString("\n") configBuilder.WriteString(" }\n") } else { diff --git a/pkg/workflow/permissions_factory.go b/pkg/workflow/permissions_factory.go index 5095c625c5..0a7ef8fe1e 100644 --- a/pkg/workflow/permissions_factory.go +++ b/pkg/workflow/permissions_factory.go @@ -90,16 +90,6 @@ func NewPermissionsContentsReadIssuesWritePRWrite() *Permissions { }) } -// NewPermissionsContentsReadIssuesWritePRWriteDiscussionsWrite creates permissions with contents: read, issues: write, pull-requests: write, discussions: write -func NewPermissionsContentsReadIssuesWritePRWriteDiscussionsWrite() *Permissions { - return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ - PermissionContents: PermissionRead, - PermissionIssues: PermissionWrite, - PermissionPullRequests: PermissionWrite, - PermissionDiscussions: PermissionWrite, - }) -} - // NewPermissionsActionsWrite creates permissions with actions: write // This is required for dispatching workflows via workflow_dispatch func NewPermissionsActionsWrite() *Permissions { @@ -108,17 +98,6 @@ func NewPermissionsActionsWrite() *Permissions { }) } -// NewPermissionsActionsWriteContentsWriteIssuesWritePRWrite creates permissions with actions: write, contents: write, issues: write, pull-requests: write -// This is required for the replaceActorsForAssignable GraphQL mutation used to assign GitHub Copilot coding agent to issues -func NewPermissionsActionsWriteContentsWriteIssuesWritePRWrite() *Permissions { - return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ - PermissionActions: PermissionWrite, - PermissionContents: PermissionWrite, - PermissionIssues: PermissionWrite, - PermissionPullRequests: PermissionWrite, - }) -} - // NewPermissionsContentsWrite creates permissions with contents: write func NewPermissionsContentsWrite() *Permissions { return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ @@ -144,13 +123,6 @@ func NewPermissionsContentsWriteIssuesWritePRWrite() *Permissions { }) } -// NewPermissionsDiscussionsWrite creates permissions with discussions: write -func NewPermissionsDiscussionsWrite() *Permissions { - return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ - PermissionDiscussions: PermissionWrite, - }) -} - // NewPermissionsContentsReadDiscussionsWrite creates permissions with contents: read and discussions: write func NewPermissionsContentsReadDiscussionsWrite() *Permissions { return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ @@ -202,22 +174,3 @@ func NewPermissionsContentsReadProjectsWrite() *Permissions { PermissionOrganizationProj: PermissionWrite, }) } - -// NewPermissionsContentsWritePRReadIssuesRead creates permissions with contents: write, pull-requests: read, issues: read -func NewPermissionsContentsWritePRReadIssuesRead() *Permissions { - return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ - PermissionContents: PermissionWrite, - PermissionPullRequests: PermissionRead, - PermissionIssues: PermissionRead, - }) -} - -// NewPermissionsContentsWriteIssuesWritePRWriteDiscussionsWrite creates permissions with contents: write, issues: write, pull-requests: write, discussions: write -func NewPermissionsContentsWriteIssuesWritePRWriteDiscussionsWrite() *Permissions { - return NewPermissionsFromMap(map[PermissionScope]PermissionLevel{ - PermissionContents: PermissionWrite, - PermissionIssues: PermissionWrite, - PermissionPullRequests: PermissionWrite, - PermissionDiscussions: PermissionWrite, - }) -} diff --git a/pkg/workflow/safe_inputs_firewall_test.go b/pkg/workflow/safe_inputs_firewall_test.go index c364b92808..87a5b45425 100644 --- a/pkg/workflow/safe_inputs_firewall_test.go +++ b/pkg/workflow/safe_inputs_firewall_test.go @@ -5,6 +5,8 @@ package workflow import ( "strings" "testing" + + "github.com/github/gh-aw/pkg/constants" ) // TestSafeInputsWithFirewallIncludesHostDockerInternal tests that host.docker.internal @@ -59,7 +61,7 @@ func TestGetCopilotAllowedDomainsWithSafeInputs(t *testing.T) { Allowed: []string{"github.com"}, } - result := GetCopilotAllowedDomainsWithSafeInputs(network, true) + result := GetAllowedDomainsForEngine(constants.CopilotEngine, network, nil, nil) if !strings.Contains(result, "host.docker.internal") { t.Errorf("Expected result to contain 'host.docker.internal', got: %s", result) @@ -75,7 +77,7 @@ func TestGetCopilotAllowedDomainsWithSafeInputs(t *testing.T) { Allowed: []string{"github.com"}, } - result := GetCopilotAllowedDomainsWithSafeInputs(network, false) + result := GetAllowedDomainsForEngine(constants.CopilotEngine, network, nil, nil) // host.docker.internal is now in default domains, so it's always included if !strings.Contains(result, "host.docker.internal") { @@ -92,7 +94,7 @@ func TestGetCopilotAllowedDomainsWithSafeInputs(t *testing.T) { Allowed: []string{"github.com"}, } - result := GetCopilotAllowedDomains(network) + result := GetAllowedDomainsForEngine(constants.CopilotEngine, network, nil, nil) // host.docker.internal is now in default domains if !strings.Contains(result, "host.docker.internal") { diff --git a/pkg/workflow/safe_output_parser.go b/pkg/workflow/safe_output_parser.go index 9e479e5ecd..9ed7ad4723 100644 --- a/pkg/workflow/safe_output_parser.go +++ b/pkg/workflow/safe_output_parser.go @@ -76,22 +76,6 @@ func ParseFilterConfig(configMap map[string]any) SafeOutputFilterConfig { return config } -// ParseDiscussionFilterConfig parses filter config plus required-category for discussion operations. -func ParseDiscussionFilterConfig(configMap map[string]any) SafeOutputDiscussionFilterConfig { - config := SafeOutputDiscussionFilterConfig{ - SafeOutputFilterConfig: ParseFilterConfig(configMap), - } - - // Parse required-category - if requiredCategory, exists := configMap["required-category"]; exists { - if categoryStr, ok := requiredCategory.(string); ok { - config.RequiredCategory = categoryStr - } - } - - return config -} - // parseRequiredLabelsFromConfig extracts and validates required-labels from a config map. // Returns a slice of label strings, or nil if not present or invalid. func parseRequiredLabelsFromConfig(configMap map[string]any) []string { @@ -103,66 +87,3 @@ func parseRequiredLabelsFromConfig(configMap map[string]any) []string { func parseRequiredTitlePrefixFromConfig(configMap map[string]any) string { return extractStringFromMap(configMap, "required-title-prefix", safeOutputParserLog) } - -// ParseCloseJobConfig parses common close job fields from a config map. -// Returns the parsed CloseJobConfig and a boolean indicating if there was a validation error. -func ParseCloseJobConfig(configMap map[string]any) (CloseJobConfig, bool) { - config := CloseJobConfig{} - - // Parse target config - targetConfig, isInvalid := ParseTargetConfig(configMap) - if isInvalid { - return config, true - } - config.SafeOutputTargetConfig = targetConfig - - // Parse filter config - config.SafeOutputFilterConfig = ParseFilterConfig(configMap) - - return config, false -} - -// ParseListJobConfig parses common list job fields from a config map. -// Returns the parsed ListJobConfig and a boolean indicating if there was a validation error. -func ParseListJobConfig(configMap map[string]any, allowedKey string) (ListJobConfig, bool) { - config := ListJobConfig{} - - // Parse target config - targetConfig, isInvalid := ParseTargetConfig(configMap) - if isInvalid { - return config, true - } - config.SafeOutputTargetConfig = targetConfig - - // Parse allowed list (using the specified key like "allowed", "reviewers", etc.) - if allowed, exists := configMap[allowedKey]; exists { - // Handle single string format - if allowedStr, ok := allowed.(string); ok { - config.Allowed = []string{allowedStr} - } else if allowedArray, ok := allowed.([]any); ok { - // Handle array format - for _, item := range allowedArray { - if itemStr, ok := item.(string); ok { - config.Allowed = append(config.Allowed, itemStr) - } - } - } - } - - // Parse blocked list - if blocked, exists := configMap["blocked"]; exists { - // Handle single string format - if blockedStr, ok := blocked.(string); ok { - config.Blocked = []string{blockedStr} - } else if blockedArray, ok := blocked.([]any); ok { - // Handle array format - for _, item := range blockedArray { - if itemStr, ok := item.(string); ok { - config.Blocked = append(config.Blocked, itemStr) - } - } - } - } - - return config, false -} diff --git a/pkg/workflow/schemas/mcp-gateway-config.schema.json b/pkg/workflow/schemas/mcp-gateway-config.schema.json index 5fc0eae654..7c31da0a93 100644 --- a/pkg/workflow/schemas/mcp-gateway-config.schema.json +++ b/pkg/workflow/schemas/mcp-gateway-config.schema.json @@ -205,6 +205,16 @@ "description": "Directory path for storing large payload JSON files for authenticated clients. MUST be an absolute path: Unix paths start with '/', Windows paths start with a drive letter followed by ':\\'. Relative paths, empty strings, and paths that don't follow these conventions are not allowed.", "minLength": 1, "pattern": "^(/|[A-Za-z]:\\\\)" + }, + "payloadSizeThreshold": { + "type": "integer", + "description": "Size threshold in bytes for writing payloads to files instead of inlining them in the response. Payloads larger than this threshold are written to files in payloadDir. Defaults to 524288 (512KB) if not specified.", + "minimum": 1 + }, + "payloadPathPrefix": { + "type": "string", + "description": "Optional path prefix for payload file paths as seen from within agent containers. Use this when the payload directory is mounted at a different path inside the container than on the host.", + "minLength": 1 } }, "required": ["port", "domain", "apiKey"], diff --git a/pkg/workflow/validation_helpers.go b/pkg/workflow/validation_helpers.go index 86a84bb3e3..e2065c67a0 100644 --- a/pkg/workflow/validation_helpers.go +++ b/pkg/workflow/validation_helpers.go @@ -7,12 +7,6 @@ // # Available Helper Functions // // - validateIntRange() - Validates that an integer value is within a specified range -// - ValidateRequired() - Validates that a required field is not empty -// - ValidateMaxLength() - Validates that a field does not exceed maximum length -// - ValidateMinLength() - Validates that a field meets minimum length requirement -// - ValidateInList() - Validates that a value is in an allowed list -// - ValidatePositiveInt() - Validates that a value is a positive integer -// - ValidateNonNegativeInt() - Validates that a value is a non-negative integer // - validateMountStringFormat() - Parses and validates a "source:dest:mode" mount string // // # Design Rationale @@ -31,8 +25,6 @@ package workflow import ( "errors" "fmt" - "slices" - "strconv" "strings" "github.com/github/gh-aw/pkg/logger" @@ -68,87 +60,6 @@ func validateIntRange(value, min, max int, fieldName string) error { return nil } -// ValidateRequired validates that a required field is not empty -func ValidateRequired(field, value string) error { - if strings.TrimSpace(value) == "" { - validationHelpersLog.Printf("Required field validation failed: field=%s", field) - return NewValidationError( - field, - value, - "field is required and cannot be empty", - fmt.Sprintf("Provide a non-empty value for '%s'", field), - ) - } - return nil -} - -// ValidateMaxLength validates that a field does not exceed maximum length -func ValidateMaxLength(field, value string, maxLength int) error { - if len(value) > maxLength { - return NewValidationError( - field, - value, - fmt.Sprintf("field exceeds maximum length of %d characters (actual: %d)", maxLength, len(value)), - fmt.Sprintf("Shorten '%s' to %d characters or less", field, maxLength), - ) - } - return nil -} - -// ValidateMinLength validates that a field meets minimum length requirement -func ValidateMinLength(field, value string, minLength int) error { - if len(value) < minLength { - return NewValidationError( - field, - value, - fmt.Sprintf("field is shorter than minimum length of %d characters (actual: %d)", minLength, len(value)), - fmt.Sprintf("Ensure '%s' is at least %d characters long", field, minLength), - ) - } - return nil -} - -// ValidateInList validates that a value is in an allowed list -func ValidateInList(field, value string, allowedValues []string) error { - if slices.Contains(allowedValues, value) { - return nil - } - - validationHelpersLog.Printf("List validation failed: field=%s, value=%s not in allowed list", field, value) - return NewValidationError( - field, - value, - fmt.Sprintf("value is not in allowed list: %v", allowedValues), - fmt.Sprintf("Choose one of the allowed values for '%s': %s", field, strings.Join(allowedValues, ", ")), - ) -} - -// ValidatePositiveInt validates that a value is a positive integer -func ValidatePositiveInt(field string, value int) error { - if value <= 0 { - return NewValidationError( - field, - strconv.Itoa(value), - "value must be a positive integer", - fmt.Sprintf("Provide a positive integer value for '%s'", field), - ) - } - return nil -} - -// ValidateNonNegativeInt validates that a value is a non-negative integer -func ValidateNonNegativeInt(field string, value int) error { - if value < 0 { - return NewValidationError( - field, - strconv.Itoa(value), - "value must be a non-negative integer", - fmt.Sprintf("Provide a non-negative integer value for '%s'", field), - ) - } - return nil -} - // validateMountStringFormat parses a mount string and validates its basic format. // Expected format: "source:destination:mode" where mode is "ro" or "rw". // Returns (source, dest, mode, nil) on success, or ("", "", "", error) on failure. diff --git a/pkg/workflow/validation_helpers_test.go b/pkg/workflow/validation_helpers_test.go index b6ea6df8cb..c4885e23b2 100644 --- a/pkg/workflow/validation_helpers_test.go +++ b/pkg/workflow/validation_helpers_test.go @@ -310,412 +310,6 @@ func TestValidateIntRangeWithRealWorldValues(t *testing.T) { } } -func TestValidateRequired(t *testing.T) { - t.Run("valid non-empty value", func(t *testing.T) { - err := ValidateRequired("title", "my title") - assert.NoError(t, err) - }) - - t.Run("empty value fails", func(t *testing.T) { - err := ValidateRequired("title", "") - require.Error(t, err) - assert.Contains(t, err.Error(), "field is required") - assert.Contains(t, err.Error(), "Provide a non-empty value") - }) - - t.Run("whitespace-only value fails", func(t *testing.T) { - err := ValidateRequired("title", " ") - require.Error(t, err) - assert.Contains(t, err.Error(), "cannot be empty") - }) -} - -func TestValidateMaxLength(t *testing.T) { - t.Run("value within limit", func(t *testing.T) { - err := ValidateMaxLength("title", "short", 100) - assert.NoError(t, err) - }) - - t.Run("value at limit", func(t *testing.T) { - err := ValidateMaxLength("title", "12345", 5) - assert.NoError(t, err) - }) - - t.Run("value exceeds limit", func(t *testing.T) { - err := ValidateMaxLength("title", "too long value", 5) - require.Error(t, err) - assert.Contains(t, err.Error(), "exceeds maximum length") - assert.Contains(t, err.Error(), "Shorten") - }) -} - -func TestValidateMinLength(t *testing.T) { - t.Run("value meets minimum", func(t *testing.T) { - err := ValidateMinLength("title", "hello", 3) - assert.NoError(t, err) - }) - - t.Run("value below minimum", func(t *testing.T) { - err := ValidateMinLength("title", "hi", 5) - require.Error(t, err) - assert.Contains(t, err.Error(), "shorter than minimum length") - assert.Contains(t, err.Error(), "at least 5 characters") - }) -} - -func TestValidateInList(t *testing.T) { - allowedValues := []string{"open", "closed", "draft"} - - t.Run("value in list", func(t *testing.T) { - err := ValidateInList("status", "open", allowedValues) - assert.NoError(t, err) - }) - - t.Run("value not in list", func(t *testing.T) { - err := ValidateInList("status", "invalid", allowedValues) - require.Error(t, err) - assert.Contains(t, err.Error(), "not in allowed list") - assert.Contains(t, err.Error(), "open, closed, draft") - }) -} - -func TestValidatePositiveInt(t *testing.T) { - t.Run("positive integer", func(t *testing.T) { - err := ValidatePositiveInt("count", 5) - assert.NoError(t, err) - }) - - t.Run("zero fails", func(t *testing.T) { - err := ValidatePositiveInt("count", 0) - require.Error(t, err) - assert.Contains(t, err.Error(), "must be a positive integer") - }) - - t.Run("negative fails", func(t *testing.T) { - err := ValidatePositiveInt("count", -1) - require.Error(t, err) - assert.Contains(t, err.Error(), "must be a positive integer") - }) -} - -func TestValidateNonNegativeInt(t *testing.T) { - t.Run("positive integer", func(t *testing.T) { - err := ValidateNonNegativeInt("count", 5) - assert.NoError(t, err) - }) - - t.Run("zero is valid", func(t *testing.T) { - err := ValidateNonNegativeInt("count", 0) - assert.NoError(t, err) - }) - - t.Run("negative fails", func(t *testing.T) { - err := ValidateNonNegativeInt("count", -1) - require.Error(t, err) - assert.Contains(t, err.Error(), "must be a non-negative integer") - }) -} - -// TestIsEmptyOrNil tests the isEmptyOrNil helper function -func TestIsEmptyOrNil(t *testing.T) { - tests := []struct { - name string - value any - expected bool - }{ - // Nil values - {"nil value", nil, true}, - - // String values - {"empty string", "", true}, - {"whitespace string", " ", true}, - {"non-empty string", "hello", false}, - - // Integer values - {"zero int", 0, true}, - {"positive int", 5, false}, - {"negative int", -1, false}, - {"zero int64", int64(0), true}, - {"positive int64", int64(5), false}, - - // Unsigned integer values - {"zero uint", uint(0), true}, - {"positive uint", uint(5), false}, - {"zero uint64", uint64(0), true}, - {"positive uint64", uint64(5), false}, - - // Float values - {"zero float32", float32(0), true}, - {"positive float32", float32(5.5), false}, - {"zero float64", float64(0), true}, - {"positive float64", float64(5.5), false}, - - // Boolean values - {"false bool", false, true}, - {"true bool", true, false}, - - // Slice values - {"empty slice", []any{}, true}, - {"non-empty slice", []any{1, 2}, false}, - - // Map values - {"empty map", map[string]any{}, true}, - {"non-empty map", map[string]any{"key": "value"}, false}, - - // Other types - {"struct value", struct{ field string }{"value"}, false}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := isEmptyOrNil(tt.value) - assert.Equal(t, tt.expected, result, "isEmptyOrNil(%v) = %v, want %v", tt.value, result, tt.expected) - }) - } -} - -// TestGetMapFieldAsString tests the getMapFieldAsString helper function -func TestGetMapFieldAsString(t *testing.T) { - tests := []struct { - name string - m map[string]any - key string - defaultVal string - expected string - }{ - { - name: "extract existing string", - m: map[string]any{"title": "Test Title"}, - key: "title", - defaultVal: "", - expected: "Test Title", - }, - { - name: "missing key returns default", - m: map[string]any{"other": "value"}, - key: "title", - defaultVal: "default", - expected: "default", - }, - { - name: "non-string value returns default", - m: map[string]any{"title": 123}, - key: "title", - defaultVal: "default", - expected: "default", - }, - { - name: "nil map returns default", - m: nil, - key: "title", - defaultVal: "default", - expected: "default", - }, - { - name: "empty string value", - m: map[string]any{"title": ""}, - key: "title", - defaultVal: "default", - expected: "", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := getMapFieldAsString(tt.m, tt.key, tt.defaultVal) - assert.Equal(t, tt.expected, result) - }) - } -} - -// TestGetMapFieldAsMap tests the getMapFieldAsMap helper function -func TestGetMapFieldAsMap(t *testing.T) { - tests := []struct { - name string - m map[string]any - key string - expected map[string]any - }{ - { - name: "extract existing nested map", - m: map[string]any{ - "network": map[string]any{ - "allowed-domains": "example.com", - }, - }, - key: "network", - expected: map[string]any{"allowed-domains": "example.com"}, - }, - { - name: "missing key returns nil", - m: map[string]any{"other": "value"}, - key: "network", - expected: nil, - }, - { - name: "non-map value returns nil", - m: map[string]any{"network": "not a map"}, - key: "network", - expected: nil, - }, - { - name: "nil map returns nil", - m: nil, - key: "network", - expected: nil, - }, - { - name: "empty nested map", - m: map[string]any{"network": map[string]any{}}, - key: "network", - expected: map[string]any{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := getMapFieldAsMap(tt.m, tt.key) - assert.Equal(t, tt.expected, result) - }) - } -} - -// TestGetMapFieldAsBool tests the getMapFieldAsBool helper function -func TestGetMapFieldAsBool(t *testing.T) { - tests := []struct { - name string - m map[string]any - key string - defaultVal bool - expected bool - }{ - { - name: "extract true value", - m: map[string]any{"enabled": true}, - key: "enabled", - defaultVal: false, - expected: true, - }, - { - name: "extract false value", - m: map[string]any{"enabled": false}, - key: "enabled", - defaultVal: true, - expected: false, - }, - { - name: "missing key returns default", - m: map[string]any{"other": true}, - key: "enabled", - defaultVal: true, - expected: true, - }, - { - name: "non-bool value returns default", - m: map[string]any{"enabled": "true"}, - key: "enabled", - defaultVal: false, - expected: false, - }, - { - name: "nil map returns default", - m: nil, - key: "enabled", - defaultVal: true, - expected: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := getMapFieldAsBool(tt.m, tt.key, tt.defaultVal) - assert.Equal(t, tt.expected, result) - }) - } -} - -// TestGetMapFieldAsInt tests the getMapFieldAsInt helper function -func TestGetMapFieldAsInt(t *testing.T) { - tests := []struct { - name string - m map[string]any - key string - defaultVal int - expected int - }{ - { - name: "extract int value", - m: map[string]any{"max-size": 100}, - key: "max-size", - defaultVal: 0, - expected: 100, - }, - { - name: "extract int64 value", - m: map[string]any{"max-size": int64(200)}, - key: "max-size", - defaultVal: 0, - expected: 200, - }, - { - name: "extract float64 value", - m: map[string]any{"max-size": float64(300)}, - key: "max-size", - defaultVal: 0, - expected: 300, - }, - { - name: "extract uint64 value", - m: map[string]any{"max-size": uint64(400)}, - key: "max-size", - defaultVal: 0, - expected: 400, - }, - { - name: "missing key returns default", - m: map[string]any{"other": 100}, - key: "max-size", - defaultVal: 50, - expected: 50, - }, - { - name: "non-numeric value returns default", - m: map[string]any{"max-size": "100"}, - key: "max-size", - defaultVal: 50, - expected: 50, - }, - { - name: "nil map returns default", - m: nil, - key: "max-size", - defaultVal: 50, - expected: 50, - }, - { - name: "zero value", - m: map[string]any{"max-size": 0}, - key: "max-size", - defaultVal: 100, - expected: 0, - }, - { - name: "negative value", - m: map[string]any{"max-size": -10}, - key: "max-size", - defaultVal: 100, - expected: -10, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := getMapFieldAsInt(tt.m, tt.key, tt.defaultVal) - assert.Equal(t, tt.expected, result) - }) - } -} - // TestDirExists tests the fileutil.DirExists helper function func TestDirExists(t *testing.T) { t.Run("empty path returns false", func(t *testing.T) { From 5eb115406cc7048aa70be7257358a209db28d4c9 Mon Sep 17 00:00:00 2001 From: Peli de Halleux Date: Sat, 28 Feb 2026 06:21:43 -0800 Subject: [PATCH 09/15] Update pkg/workflow/checkout_manager_test.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- pkg/workflow/checkout_manager_test.go | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index 9da2e7d005..313f6dc681 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -469,12 +469,6 @@ func TestBuildCheckoutsPromptContent(t *testing.T) { assert.Contains(t, content, "${{ github.repository }}", "should reference github.repository expression for default checkout") }) - t.Run("path dot treated same as empty path", func(t *testing.T) { - emptyContent := buildCheckoutsPromptContent([]*CheckoutConfig{{Path: ""}}) - dotContent := buildCheckoutsPromptContent([]*CheckoutConfig{{Path: "."}}) - assert.Equal(t, emptyContent, dotContent, "empty path and '.' should produce identical output") - }) - t.Run("checkout with explicit repo shows full path", func(t *testing.T) { content := buildCheckoutsPromptContent([]*CheckoutConfig{ {Repository: "owner/target", Path: "./target"}, From 06a3c7eed733069ed8ee02d8c67839aa75077b1c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 14:27:52 +0000 Subject: [PATCH 10/15] fix: remove path:. from root checkout examples; fix duplicate renderGuardPoliciesJSON from merge Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .../content/docs/reference/cross-repository.md | 6 ++---- pkg/workflow/checkout_manager.go | 3 +-- pkg/workflow/mcp_renderer.go | 18 ------------------ 3 files changed, 3 insertions(+), 24 deletions(-) diff --git a/docs/src/content/docs/reference/cross-repository.md b/docs/src/content/docs/reference/cross-repository.md index 7f39bee941..14d72ac341 100644 --- a/docs/src/content/docs/reference/cross-repository.md +++ b/docs/src/content/docs/reference/cross-repository.md @@ -33,8 +33,7 @@ You can also use `checkout:` to check out additional repositories alongside the ```yaml wrap checkout: - - path: . - fetch-depth: 0 + - fetch-depth: 0 - repository: owner/other-repo path: ./libs/other ref: main @@ -147,8 +146,7 @@ on: types: [opened, synchronize] checkout: - - path: . - fetch-depth: 0 + - fetch-depth: 0 - repository: org/shared-libs path: ./libs/shared ref: main diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index a437d3b491..46b74b56fa 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -22,8 +22,7 @@ var checkoutManagerLog = logger.New("workflow:checkout_manager") // Or multiple checkouts: // // checkout: -// - path: . -// fetch-depth: 0 +// - fetch-depth: 0 // - repository: owner/other-repo // path: ./libs/other // ref: main diff --git a/pkg/workflow/mcp_renderer.go b/pkg/workflow/mcp_renderer.go index 453915ab5e..242aecaf25 100644 --- a/pkg/workflow/mcp_renderer.go +++ b/pkg/workflow/mcp_renderer.go @@ -912,24 +912,6 @@ func renderGuardPoliciesJSON(yaml *strings.Builder, policies map[string]any, ind fmt.Fprintf(yaml, "%s\"guard-policies\": %s\n", indent, string(jsonBytes)) } -// renderGuardPoliciesJSON renders a "guard-policies" JSON field at the given indent level. -// The policies map contains policy names (e.g., "allow-only") mapped to their configurations. -// Renders as the last field (no trailing comma) with the given base indent. -func renderGuardPoliciesJSON(yaml *strings.Builder, policies map[string]any, indent string) { - if len(policies) == 0 { - return - } - - // Marshal to JSON with indentation, then re-indent to match the current indent level - jsonBytes, err := json.MarshalIndent(policies, indent, " ") - if err != nil { - mcpRendererLog.Printf("Failed to marshal guard-policies: %v", err) - return - } - - fmt.Fprintf(yaml, "%s\"guard-policies\": %s\n", indent, string(jsonBytes)) -} - // RenderJSONMCPConfig renders MCP configuration in JSON format with the common mcpServers structure. // This shared function extracts the duplicate pattern from Claude, Copilot, and Custom engines. // From 7e0e292fdf2336a36f5914c04a3e9ff2e87fd079 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 14:34:53 +0000 Subject: [PATCH 11/15] fix: normalize path '.' to '' in add() so empty and dot paths always merge as root Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- pkg/workflow/checkout_manager.go | 7 ++++++- pkg/workflow/checkout_manager_test.go | 12 ++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/pkg/workflow/checkout_manager.go b/pkg/workflow/checkout_manager.go index 46b74b56fa..fb5ed9fe85 100644 --- a/pkg/workflow/checkout_manager.go +++ b/pkg/workflow/checkout_manager.go @@ -121,9 +121,14 @@ func (cm *CheckoutManager) add(cfg *CheckoutConfig) { return } + // Normalize path: "." and "" both refer to the workspace root. + normalizedPath := cfg.Path + if normalizedPath == "." { + normalizedPath = "" + } key := checkoutKey{ repository: cfg.Repository, - path: cfg.Path, + path: normalizedPath, } if idx, exists := cm.index[key]; exists { diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index 313f6dc681..c8fee65e2a 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -103,6 +103,18 @@ func TestCheckoutManagerMerging(t *testing.T) { assert.Len(t, cm.ordered, 1, "same path should be merged") assert.Equal(t, "main", cm.ordered[0].ref, "first-seen ref should win") }) + + t.Run("path dot and empty path are normalized to the same root checkout", func(t *testing.T) { + depth0 := 0 + cm := NewCheckoutManager([]*CheckoutConfig{ + {Path: ".", FetchDepth: nil}, + {Path: "", FetchDepth: &depth0}, + }) + assert.Len(t, cm.ordered, 1, "path '.' and '' should merge as the same root checkout") + assert.Equal(t, "", cm.ordered[0].key.path, "normalized path should be empty string") + require.NotNil(t, cm.ordered[0].fetchDepth, "fetch depth should be set from second config") + assert.Equal(t, 0, *cm.ordered[0].fetchDepth, "fetch depth 0 should win") + }) } // TestGenerateDefaultCheckoutStep verifies the default checkout step output. From 5bee2d3242c9efa055df26d7c5493fa1cf6bcaa9 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 14:45:42 +0000 Subject: [PATCH 12/15] fix: update golden files and codex test for payloadSizeThreshold removal; add current:true to smoke-codex checkout Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- .github/workflows/smoke-codex.lock.yml | 4 ++-- .github/workflows/smoke-codex.md | 3 ++- pkg/workflow/codex_engine_test.go | 3 +-- .../TestWasmGolden_CompileFixtures/basic-copilot.golden | 3 +-- .../TestWasmGolden_CompileFixtures/smoke-copilot.golden | 3 +-- .../TestWasmGolden_CompileFixtures/with-imports.golden | 3 +-- 6 files changed, 8 insertions(+), 11 deletions(-) diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index 5480128c2a..5dcad1c459 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -28,7 +28,7 @@ # - shared/gh.md # - shared/reporting.md # -# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"bb46b86a2eb0aa7f857448cb6c55f108cb59f8457436996aa7af27d40bc7bff5"} +# gh-aw-metadata: {"schema_version":"v1","frontmatter_hash":"3ebe89d515d272dc2a563f4cde2e033400a08293c12461af6ae3b7134c657493"} name: "Smoke Codex" "on": @@ -189,7 +189,7 @@ jobs: - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ {{/if}} - **checkouts**: The following repositories have been checked out and are available in the workspace: - - `$GITHUB_WORKSPACE` → `__GH_AW_GITHUB_REPOSITORY__` (cwd) + - `$GITHUB_WORKSPACE` → `__GH_AW_GITHUB_REPOSITORY__` (cwd) (**current** - this is the repository you are working on; use this as the target for all GitHub operations unless otherwise specified) GH_AW_PROMPT_EOF diff --git a/.github/workflows/smoke-codex.md b/.github/workflows/smoke-codex.md index bccabd34f9..1cce992416 100644 --- a/.github/workflows/smoke-codex.md +++ b/.github/workflows/smoke-codex.md @@ -63,7 +63,8 @@ safe-outputs: run-failure: "🌑 The shadows whisper... [{workflow_name}]({run_url}) {status}. The oracle requires further meditation..." timeout-minutes: 15 checkout: - fetch-depth: 2 + - fetch-depth: 2 + current: true --- # Smoke Test: Codex Engine Validation diff --git a/pkg/workflow/codex_engine_test.go b/pkg/workflow/codex_engine_test.go index 9b9bbcc2e2..b01db5b917 100644 --- a/pkg/workflow/codex_engine_test.go +++ b/pkg/workflow/codex_engine_test.go @@ -325,8 +325,7 @@ func TestCodexEngineRenderMCPConfig(t *testing.T) { "\"port\": $MCP_GATEWAY_PORT,", "\"domain\": \"${MCP_GATEWAY_DOMAIN}\",", "\"apiKey\": \"${MCP_GATEWAY_API_KEY}\",", - "\"payloadDir\": \"${MCP_GATEWAY_PAYLOAD_DIR}\",", - fmt.Sprintf("\"payloadSizeThreshold\": %d", constants.DefaultMCPGatewayPayloadSizeThreshold), + "\"payloadDir\": \"${MCP_GATEWAY_PAYLOAD_DIR}\"", "}", "}", "GH_AW_MCP_CONFIG_EOF", diff --git a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden index fe734c3d3f..fb321cbf4a 100644 --- a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden +++ b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/basic-copilot.golden @@ -337,8 +337,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden index 24b1b412ca..b44b87a0b9 100644 --- a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden +++ b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/smoke-copilot.golden @@ -513,8 +513,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF diff --git a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden index dd3bf5e9e6..15d0e95d55 100644 --- a/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden +++ b/pkg/workflow/testdata/wasm_golden/TestWasmGolden_CompileFixtures/with-imports.golden @@ -340,8 +340,7 @@ jobs: "port": $MCP_GATEWAY_PORT, "domain": "${MCP_GATEWAY_DOMAIN}", "apiKey": "${MCP_GATEWAY_API_KEY}", - "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}", - "payloadSizeThreshold": 524288 + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } GH_AW_MCP_CONFIG_EOF From 3b8e2832947580301cb9826981a7fbd47af9c9d2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 28 Feb 2026 15:02:57 +0000 Subject: [PATCH 13/15] fix: resolve lint issues - testifylint in checkout test, remove unused isDockerAvailable and schemaValidationLog Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com> --- pkg/cli/docker_build_integration_test.go | 8 ++++++++ pkg/cli/docker_images.go | 8 -------- pkg/parser/schema_validation.go | 3 --- pkg/workflow/checkout_manager_test.go | 2 +- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/pkg/cli/docker_build_integration_test.go b/pkg/cli/docker_build_integration_test.go index d09120f5fb..771264fb57 100644 --- a/pkg/cli/docker_build_integration_test.go +++ b/pkg/cli/docker_build_integration_test.go @@ -10,6 +10,14 @@ import ( "testing" ) +// isDockerAvailable checks if Docker is available on the system +func isDockerAvailable() bool { + cmd := exec.Command("docker", "version") + cmd.Stdout = nil + cmd.Stderr = nil + return cmd.Run() == nil +} + // TestDockerfile_Exists verifies the Dockerfile exists and has expected content func TestDockerfile_Exists(t *testing.T) { // Get the repository root diff --git a/pkg/cli/docker_images.go b/pkg/cli/docker_images.go index e8dce6a096..a10b48897a 100644 --- a/pkg/cli/docker_images.go +++ b/pkg/cli/docker_images.go @@ -226,14 +226,6 @@ func CheckAndPrepareDockerImages(ctx context.Context, useZizmor, usePoutine, use return nil } -// isDockerAvailable checks if Docker is available on the system -func isDockerAvailable() bool { - cmd := exec.Command("docker", "version") - cmd.Stdout = nil - cmd.Stderr = nil - return cmd.Run() == nil -} - // ResetDockerPullState resets the internal pull state (for testing) func ResetDockerPullState() { pullState.mu.Lock() diff --git a/pkg/parser/schema_validation.go b/pkg/parser/schema_validation.go index 3e4ead0fb0..3c12c4dbe5 100644 --- a/pkg/parser/schema_validation.go +++ b/pkg/parser/schema_validation.go @@ -5,11 +5,8 @@ import ( "maps" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" ) -var schemaValidationLog = logger.New("parser:schema_validation") - // sharedWorkflowForbiddenFields is a map for O(1) lookup of forbidden fields in shared workflows var sharedWorkflowForbiddenFields = buildForbiddenFieldsMap() diff --git a/pkg/workflow/checkout_manager_test.go b/pkg/workflow/checkout_manager_test.go index c8fee65e2a..0ece10301e 100644 --- a/pkg/workflow/checkout_manager_test.go +++ b/pkg/workflow/checkout_manager_test.go @@ -111,7 +111,7 @@ func TestCheckoutManagerMerging(t *testing.T) { {Path: "", FetchDepth: &depth0}, }) assert.Len(t, cm.ordered, 1, "path '.' and '' should merge as the same root checkout") - assert.Equal(t, "", cm.ordered[0].key.path, "normalized path should be empty string") + assert.Empty(t, cm.ordered[0].key.path, "normalized path should be empty string") require.NotNil(t, cm.ordered[0].fetchDepth, "fetch depth should be set from second config") assert.Equal(t, 0, *cm.ordered[0].fetchDepth, "fetch depth 0 should win") }) From 26887ab82365705dc96a5f409a0c628cdb1cf029 Mon Sep 17 00:00:00 2001 From: runner Date: Sat, 28 Feb 2026 16:28:59 +0000 Subject: [PATCH 14/15] Add changeset [skip-ci] --- .changeset/patch-document-current-checkout.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .changeset/patch-document-current-checkout.md diff --git a/.changeset/patch-document-current-checkout.md b/.changeset/patch-document-current-checkout.md new file mode 100644 index 0000000000..0bf85ef8a3 --- /dev/null +++ b/.changeset/patch-document-current-checkout.md @@ -0,0 +1,4 @@ +--- +"gh-aw": patch +--- +Introduce the `current` checkout metadata so the workspace prompt, CLI behavior, and docs can highlight the primary repository. From edf0893b36944578be64206589b915bb607e8aac Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 28 Feb 2026 16:30:20 +0000 Subject: [PATCH 15/15] ci: trigger CI checks