diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index 7cb26f3ca4..af5283c022 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -57,18 +57,24 @@ jobs: function main() { const fs = require("fs"); const crypto = require("crypto"); - // Generate a random filename for the output file - const randomId = crypto.randomBytes(8).toString("hex"); - const outputFile = `/tmp/aw_output_${randomId}.txt`; - // Ensure the /tmp directory exists - fs.mkdirSync("/tmp", { recursive: true }); + // Create the safe outputs directory structure + const safeOutputsDir = "/tmp/gh-aw/safe-outputs"; + const filesDir = `${safeOutputsDir}/files`; + const outputFile = `${safeOutputsDir}/safe_outputs.jsonl`; + // Ensure the safe outputs directory structure exists + fs.mkdirSync(safeOutputsDir, { recursive: true }); + fs.mkdirSync(filesDir, { recursive: true }); // We don't create the file, as the name is sufficiently random // and some engines (Claude) fails first Write to the file // if it exists and has not been read. - // Set the environment variable for subsequent steps + // Set the environment variables for subsequent steps core.exportVariable("GITHUB_AW_SAFE_OUTPUTS", outputFile); + core.exportVariable("GITHUB_AW_SAFE_OUTPUTS_DIR", safeOutputsDir); + core.exportVariable("GITHUB_AW_SAFE_OUTPUTS_FILES_DIR", filesDir); // Also set as step output for reference core.setOutput("output_file", outputFile); + core.setOutput("output_dir", safeOutputsDir); + core.setOutput("files_dir", filesDir); } main(); - name: Setup Safe Outputs Collector MCP @@ -85,6 +91,17 @@ jobs: const outputFile = process.env.GITHUB_AW_SAFE_OUTPUTS; if (!outputFile) throw new Error("GITHUB_AW_SAFE_OUTPUTS not set, no output file"); + // Validate required directory environment variables + const safeOutputsDir = process.env.GITHUB_AW_SAFE_OUTPUTS_DIR; + if (!safeOutputsDir) + throw new Error( + "GITHUB_AW_SAFE_OUTPUTS_DIR not set, no safe outputs directory" + ); + const filesDir = process.env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR; + if (!filesDir) + throw new Error( + "GITHUB_AW_SAFE_OUTPUTS_FILES_DIR not set, no files directory" + ); const SERVER_INFO = { name: "safe-outputs-mcp-server", version: "1.0.0" }; const debug = msg => process.stderr.write(`[${SERVER_INFO.name}] ${msg}\n`); function writeMessage(obj) { @@ -388,6 +405,99 @@ jobs: additionalProperties: false, }, }, + { + name: "push-to-orphaned-branch", + description: + "Upload a file to an orphaned branch and get a GitHub raw URL", + inputSchema: { + type: "object", + required: ["filename"], + properties: { + filename: { + type: "string", + description: + "Name of the file to upload. Screenshots and images can be uploaded using this safe output.", + }, + }, + additionalProperties: false, + }, + handler: args => { + const fs = require("fs"); + const path = require("path"); + const crypto = require("crypto"); + const { filename } = args; + if (!filename) { + throw new Error("filename is required"); + } + // Check if file exists + if (!fs.existsSync(filename)) { + throw new Error(`File not found: ${filename}`); + } + // Read file and encode as base64 + const fileContent = fs.readFileSync(filename); + // Check file size (10MB limit) + const fileSizeBytes = fileContent.length; + const maxSizeBytes = 10 * 1024 * 1024; // 10MB + if (fileSizeBytes > maxSizeBytes) { + throw new Error( + `File size ${Math.round(fileSizeBytes / 1024 / 1024)}MB exceeds 10MB limit` + ); + } + // Compute SHA256 hash of the file content + const hash = crypto.createHash("sha256"); + hash.update(fileContent); + const fileSha = hash.digest("hex"); + // Get file extension from original filename + const originalExtension = path.extname(filename); + // Validate file extension is reasonable (up to 5 alphanumeric characters) + if (originalExtension) { + const extWithoutDot = originalExtension.slice(1); // Remove the leading dot + if ( + extWithoutDot.length > 5 || + !/^[a-zA-Z0-9]+$/.test(extWithoutDot) + ) { + throw new Error( + `File extension '${originalExtension}' is not allowed. Extension must be up to 5 alphanumeric characters.` + ); + } + } + const shaFilename = fileSha + originalExtension; + // Copy file to safe outputs files directory with SHA-based filename + const targetFile = path.join(filesDir, shaFilename); + // Ensure directory exists + fs.mkdirSync(filesDir, { recursive: true }); + // Copy the file + fs.copyFileSync(filename, targetFile); + // Create the output entry without base64 content (file is now copied to safe outputs dir) + const entry = { + type: "push-to-orphaned-branch", + filename: shaFilename, + original_filename: path.basename(filename), + sha: fileSha, + }; + appendSafeOutput(entry); + // Get branch configuration if available + const branchConfig = + safeOutputsConfig["push-to-orphaned-branch"]?.branch; + const branchName = branchConfig || "assets/{workflow-name}"; + // Get repository information from environment or use placeholders + const owner = process.env.GITHUB_REPOSITORY_OWNER || "{owner}"; + const repo = process.env.GITHUB_REPOSITORY + ? process.env.GITHUB_REPOSITORY.split("/")[1] + : "{repo}"; + // Create template URL (will be resolved during GitHub Actions execution) + const templateUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branchName}/${shaFilename}`; + // Return response with SHA information and expected URL + return { + content: [ + { + type: "text", + text: `File uploaded successfully. SHA: ${fileSha}, Original filename: ${path.basename(filename)}, Expected URL: ${templateUrl}`, + }, + ], + }; + }, + }, { name: "missing-tool", description: @@ -532,6 +642,8 @@ jobs: "args": ["/tmp/safe-outputs/mcp-server.cjs"], "env": { "GITHUB_AW_SAFE_OUTPUTS": "${{ env.GITHUB_AW_SAFE_OUTPUTS }}", + "GITHUB_AW_SAFE_OUTPUTS_DIR": "${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}", + "GITHUB_AW_SAFE_OUTPUTS_FILES_DIR": "${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}", "GITHUB_AW_SAFE_OUTPUTS_CONFIG": ${{ toJSON(env.GITHUB_AW_SAFE_OUTPUTS_CONFIG) }} } } @@ -855,6 +967,8 @@ jobs: DISABLE_BUG_COMMAND: "1" GITHUB_AW_PROMPT: /tmp/aw-prompts/prompt.txt GITHUB_AW_SAFE_OUTPUTS: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} + GITHUB_AW_SAFE_OUTPUTS_DIR: ${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }} + GITHUB_AW_SAFE_OUTPUTS_FILES_DIR: ${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }} - name: Ensure log file exists if: always() run: | @@ -886,7 +1000,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: safe_output.jsonl - path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} + path: ${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }} if-no-files-found: warn - name: Ingest agent output id: collect_output diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml index c87b69025e..c20ba6d36a 100644 --- a/.github/workflows/dev.lock.yml +++ b/.github/workflows/dev.lock.yml @@ -236,23 +236,29 @@ jobs: function main() { const fs = require("fs"); const crypto = require("crypto"); - // Generate a random filename for the output file - const randomId = crypto.randomBytes(8).toString("hex"); - const outputFile = `/tmp/aw_output_${randomId}.txt`; - // Ensure the /tmp directory exists - fs.mkdirSync("/tmp", { recursive: true }); + // Create the safe outputs directory structure + const safeOutputsDir = "/tmp/gh-aw/safe-outputs"; + const filesDir = `${safeOutputsDir}/files`; + const outputFile = `${safeOutputsDir}/safe_outputs.jsonl`; + // Ensure the safe outputs directory structure exists + fs.mkdirSync(safeOutputsDir, { recursive: true }); + fs.mkdirSync(filesDir, { recursive: true }); // We don't create the file, as the name is sufficiently random // and some engines (Claude) fails first Write to the file // if it exists and has not been read. - // Set the environment variable for subsequent steps + // Set the environment variables for subsequent steps core.exportVariable("GITHUB_AW_SAFE_OUTPUTS", outputFile); + core.exportVariable("GITHUB_AW_SAFE_OUTPUTS_DIR", safeOutputsDir); + core.exportVariable("GITHUB_AW_SAFE_OUTPUTS_FILES_DIR", filesDir); // Also set as step output for reference core.setOutput("output_file", outputFile); + core.setOutput("output_dir", safeOutputsDir); + core.setOutput("files_dir", filesDir); } main(); - name: Setup Safe Outputs Collector MCP env: - GITHUB_AW_SAFE_OUTPUTS_CONFIG: "{\"missing-tool\":{\"enabled\":true}}" + GITHUB_AW_SAFE_OUTPUTS_CONFIG: "{\"create-issue\":true,\"missing-tool\":{\"enabled\":true},\"push-to-orphaned-branch\":{\"enabled\":true,\"max\":3}}" run: | mkdir -p /tmp/safe-outputs cat > /tmp/safe-outputs/mcp-server.cjs << 'EOF' @@ -264,6 +270,17 @@ jobs: const outputFile = process.env.GITHUB_AW_SAFE_OUTPUTS; if (!outputFile) throw new Error("GITHUB_AW_SAFE_OUTPUTS not set, no output file"); + // Validate required directory environment variables + const safeOutputsDir = process.env.GITHUB_AW_SAFE_OUTPUTS_DIR; + if (!safeOutputsDir) + throw new Error( + "GITHUB_AW_SAFE_OUTPUTS_DIR not set, no safe outputs directory" + ); + const filesDir = process.env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR; + if (!filesDir) + throw new Error( + "GITHUB_AW_SAFE_OUTPUTS_FILES_DIR not set, no files directory" + ); const SERVER_INFO = { name: "safe-outputs-mcp-server", version: "1.0.0" }; const debug = msg => process.stderr.write(`[${SERVER_INFO.name}] ${msg}\n`); function writeMessage(obj) { @@ -567,6 +584,99 @@ jobs: additionalProperties: false, }, }, + { + name: "push-to-orphaned-branch", + description: + "Upload a file to an orphaned branch and get a GitHub raw URL", + inputSchema: { + type: "object", + required: ["filename"], + properties: { + filename: { + type: "string", + description: + "Name of the file to upload. Screenshots and images can be uploaded using this safe output.", + }, + }, + additionalProperties: false, + }, + handler: args => { + const fs = require("fs"); + const path = require("path"); + const crypto = require("crypto"); + const { filename } = args; + if (!filename) { + throw new Error("filename is required"); + } + // Check if file exists + if (!fs.existsSync(filename)) { + throw new Error(`File not found: ${filename}`); + } + // Read file and encode as base64 + const fileContent = fs.readFileSync(filename); + // Check file size (10MB limit) + const fileSizeBytes = fileContent.length; + const maxSizeBytes = 10 * 1024 * 1024; // 10MB + if (fileSizeBytes > maxSizeBytes) { + throw new Error( + `File size ${Math.round(fileSizeBytes / 1024 / 1024)}MB exceeds 10MB limit` + ); + } + // Compute SHA256 hash of the file content + const hash = crypto.createHash("sha256"); + hash.update(fileContent); + const fileSha = hash.digest("hex"); + // Get file extension from original filename + const originalExtension = path.extname(filename); + // Validate file extension is reasonable (up to 5 alphanumeric characters) + if (originalExtension) { + const extWithoutDot = originalExtension.slice(1); // Remove the leading dot + if ( + extWithoutDot.length > 5 || + !/^[a-zA-Z0-9]+$/.test(extWithoutDot) + ) { + throw new Error( + `File extension '${originalExtension}' is not allowed. Extension must be up to 5 alphanumeric characters.` + ); + } + } + const shaFilename = fileSha + originalExtension; + // Copy file to safe outputs files directory with SHA-based filename + const targetFile = path.join(filesDir, shaFilename); + // Ensure directory exists + fs.mkdirSync(filesDir, { recursive: true }); + // Copy the file + fs.copyFileSync(filename, targetFile); + // Create the output entry without base64 content (file is now copied to safe outputs dir) + const entry = { + type: "push-to-orphaned-branch", + filename: shaFilename, + original_filename: path.basename(filename), + sha: fileSha, + }; + appendSafeOutput(entry); + // Get branch configuration if available + const branchConfig = + safeOutputsConfig["push-to-orphaned-branch"]?.branch; + const branchName = branchConfig || "assets/{workflow-name}"; + // Get repository information from environment or use placeholders + const owner = process.env.GITHUB_REPOSITORY_OWNER || "{owner}"; + const repo = process.env.GITHUB_REPOSITORY + ? process.env.GITHUB_REPOSITORY.split("/")[1] + : "{repo}"; + // Create template URL (will be resolved during GitHub Actions execution) + const templateUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branchName}/${shaFilename}`; + // Return response with SHA information and expected URL + return { + content: [ + { + type: "text", + text: `File uploaded successfully. SHA: ${fileSha}, Original filename: ${path.basename(filename)}, Expected URL: ${templateUrl}`, + }, + ], + }; + }, + }, { name: "missing-tool", description: @@ -686,7 +796,7 @@ jobs: - name: Setup MCPs env: GITHUB_AW_SAFE_OUTPUTS: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} - GITHUB_AW_SAFE_OUTPUTS_CONFIG: "{\"missing-tool\":{\"enabled\":true}}" + GITHUB_AW_SAFE_OUTPUTS_CONFIG: "{\"create-issue\":true,\"missing-tool\":{\"enabled\":true},\"push-to-orphaned-branch\":{\"enabled\":true,\"max\":3}}" run: | mkdir -p /tmp/mcp-config cat > /tmp/mcp-config/mcp-servers.json << 'EOF' @@ -706,11 +816,21 @@ jobs: "GITHUB_PERSONAL_ACCESS_TOKEN": "${{ secrets.GITHUB_TOKEN }}" } }, + "playwright": { + "command": "npx", + "args": [ + "@playwright/mcp@latest", + "--allowed-origins", + "localhost,127.0.0.1,*.github.com,github.com" + ] + }, "safe_outputs": { "command": "node", "args": ["/tmp/safe-outputs/mcp-server.cjs"], "env": { "GITHUB_AW_SAFE_OUTPUTS": "${{ env.GITHUB_AW_SAFE_OUTPUTS }}", + "GITHUB_AW_SAFE_OUTPUTS_DIR": "${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}", + "GITHUB_AW_SAFE_OUTPUTS_FILES_DIR": "${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}", "GITHUB_AW_SAFE_OUTPUTS_CONFIG": ${{ toJSON(env.GITHUB_AW_SAFE_OUTPUTS_CONFIG) }} } } @@ -724,12 +844,18 @@ jobs: run: | mkdir -p /tmp/aw-prompts cat > $GITHUB_AW_PROMPT << 'EOF' - Try to call a tool, `draw_pelican` that draws a pelican. + You have access to a `push-to-orphaned-branch` tool that can upload files (like screenshots) to an orphaned branch and return a GitHub raw URL. Use the expected URL from the response in your issue descriptions. + + Please: + 1. Build the documentation by running appropriate build commands + 2. Take a screenshot of the documentation using playwright + 3. Upload the screenshot using the push-to-orphaned-branch tool - it will give you a URL to use + 4. Create an issue describing the documentation status and include the screenshot using the URL provided by the upload tool --- - ## Reporting Missing Tools or Functionality + ## Creating an IssueUploading Files to Orphaned Branch, Reporting Missing Tools or Functionality **IMPORTANT**: To do the actions mentioned in the header of this section, use the **safe-outputs** tools, do NOT attempt to use `gh`, do NOT attempt to use the GitHub API. You don't have write access to the GitHub repo. EOF @@ -837,11 +963,32 @@ jobs: # - mcp__github__search_pull_requests # - mcp__github__search_repositories # - mcp__github__search_users + # - mcp__playwright__browser_click + # - mcp__playwright__browser_close + # - mcp__playwright__browser_console_messages + # - mcp__playwright__browser_drag + # - mcp__playwright__browser_evaluate + # - mcp__playwright__browser_file_upload + # - mcp__playwright__browser_fill_form + # - mcp__playwright__browser_handle_dialog + # - mcp__playwright__browser_hover + # - mcp__playwright__browser_install + # - mcp__playwright__browser_navigate + # - mcp__playwright__browser_navigate_back + # - mcp__playwright__browser_network_requests + # - mcp__playwright__browser_press_key + # - mcp__playwright__browser_resize + # - mcp__playwright__browser_select_option + # - mcp__playwright__browser_snapshot + # - mcp__playwright__browser_tabs + # - mcp__playwright__browser_take_screenshot + # - mcp__playwright__browser_type + # - mcp__playwright__browser_wait_for timeout-minutes: 5 run: | set -o pipefail # Execute Claude Code CLI with prompt from file - npx @anthropic-ai/claude-code@latest --print --max-turns 5 --mcp-config /tmp/mcp-config/mcp-servers.json --allowed-tools "ExitPlanMode,Glob,Grep,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_issue,mcp__github__get_issue_comments,mcp__github__get_job_logs,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issues,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_secret_scanning_alerts,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users" --debug --verbose --permission-mode bypassPermissions --output-format json --settings /tmp/.claude/settings.json "$(cat /tmp/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/dev.log + npx @anthropic-ai/claude-code@latest --print --max-turns 5 --mcp-config /tmp/mcp-config/mcp-servers.json --allowed-tools "ExitPlanMode,Glob,Grep,LS,NotebookRead,Read,Task,TodoWrite,Write,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_issue,mcp__github__get_issue_comments,mcp__github__get_job_logs,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issues,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_secret_scanning_alerts,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for" --debug --verbose --permission-mode bypassPermissions --output-format json --settings /tmp/.claude/settings.json "$(cat /tmp/aw-prompts/prompt.txt)" 2>&1 | tee /tmp/dev.log env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} DISABLE_TELEMETRY: "1" @@ -849,6 +996,8 @@ jobs: DISABLE_BUG_COMMAND: "1" GITHUB_AW_PROMPT: /tmp/aw-prompts/prompt.txt GITHUB_AW_SAFE_OUTPUTS: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} + GITHUB_AW_SAFE_OUTPUTS_DIR: ${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }} + GITHUB_AW_SAFE_OUTPUTS_FILES_DIR: ${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }} GITHUB_AW_SAFE_OUTPUTS_STAGED: "true" GITHUB_AW_MAX_TURNS: 5 - name: Ensure log file exists @@ -882,14 +1031,14 @@ jobs: uses: actions/upload-artifact@v4 with: name: safe_output.jsonl - path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} + path: ${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }} if-no-files-found: warn - name: Ingest agent output id: collect_output uses: actions/github-script@v7 env: GITHUB_AW_SAFE_OUTPUTS: ${{ env.GITHUB_AW_SAFE_OUTPUTS }} - GITHUB_AW_SAFE_OUTPUTS_CONFIG: "{\"missing-tool\":{\"enabled\":true}}" + GITHUB_AW_SAFE_OUTPUTS_CONFIG: "{\"create-issue\":true,\"missing-tool\":{\"enabled\":true},\"push-to-orphaned-branch\":{\"enabled\":true,\"max\":3}}" with: script: | async function main() { @@ -2191,6 +2340,415 @@ jobs: path: /tmp/dev.log if-no-files-found: warn + create_issue: + needs: dev + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + timeout-minutes: 10 + outputs: + issue_number: ${{ steps.create_issue.outputs.issue_number }} + issue_url: ${{ steps.create_issue.outputs.issue_url }} + steps: + - name: Create Output Issue + id: create_issue + uses: actions/github-script@v7 + env: + GITHUB_AW_AGENT_OUTPUT: ${{ needs.dev.outputs.output }} + GITHUB_AW_SAFE_OUTPUTS_STAGED: "true" + with: + script: | + async function main() { + // Check if we're in staged mode + const isStaged = process.env.GITHUB_AW_SAFE_OUTPUTS_STAGED === "true"; + // Read the validated output content from environment variable + const outputContent = process.env.GITHUB_AW_AGENT_OUTPUT; + if (!outputContent) { + core.info("No GITHUB_AW_AGENT_OUTPUT environment variable found"); + return; + } + if (outputContent.trim() === "") { + core.info("Agent output content is empty"); + return; + } + core.info(`Agent output content length: ${outputContent.length}`); + // Parse the validated output JSON + let validatedOutput; + try { + validatedOutput = JSON.parse(outputContent); + } catch (error) { + core.setFailed( + `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}` + ); + return; + } + if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) { + core.info("No valid items found in agent output"); + return; + } + // Find all create-issue items + const createIssueItems = validatedOutput.items.filter( + /** @param {any} item */ item => item.type === "create-issue" + ); + if (createIssueItems.length === 0) { + core.info("No create-issue items found in agent output"); + return; + } + core.info(`Found ${createIssueItems.length} create-issue item(s)`); + // If in staged mode, emit step summary instead of creating issues + if (isStaged) { + let summaryContent = "## 🎭 Staged Mode: Create Issues Preview\n\n"; + summaryContent += + "The following issues would be created if staged mode was disabled:\n\n"; + for (let i = 0; i < createIssueItems.length; i++) { + const item = createIssueItems[i]; + summaryContent += `### Issue ${i + 1}\n`; + summaryContent += `**Title:** ${item.title || "No title provided"}\n\n`; + if (item.body) { + summaryContent += `**Body:**\n${item.body}\n\n`; + } + if (item.labels && item.labels.length > 0) { + summaryContent += `**Labels:** ${item.labels.join(", ")}\n\n`; + } + summaryContent += "---\n\n"; + } + // Write to step summary + await core.summary.addRaw(summaryContent).write(); + core.info("📝 Issue creation preview written to step summary"); + return; + } + // Check if we're in an issue context (triggered by an issue event) + const parentIssueNumber = context.payload?.issue?.number; + // Parse labels from environment variable (comma-separated string) + const labelsEnv = process.env.GITHUB_AW_ISSUE_LABELS; + let envLabels = labelsEnv + ? labelsEnv + .split(",") + .map(/** @param {string} label */ label => label.trim()) + .filter(/** @param {string} label */ label => label) + : []; + const createdIssues = []; + // Process each create-issue item + for (let i = 0; i < createIssueItems.length; i++) { + const createIssueItem = createIssueItems[i]; + core.info( + `Processing create-issue item ${i + 1}/${createIssueItems.length}: title=${createIssueItem.title}, bodyLength=${createIssueItem.body.length}` + ); + // Merge environment labels with item-specific labels + let labels = [...envLabels]; + if (createIssueItem.labels && Array.isArray(createIssueItem.labels)) { + labels = [...labels, ...createIssueItem.labels].filter(Boolean); + } + // Extract title and body from the JSON item + let title = createIssueItem.title ? createIssueItem.title.trim() : ""; + let bodyLines = createIssueItem.body.split("\n"); + // If no title was found, use the body content as title (or a default) + if (!title) { + title = createIssueItem.body || "Agent Output"; + } + // Apply title prefix if provided via environment variable + const titlePrefix = process.env.GITHUB_AW_ISSUE_TITLE_PREFIX; + if (titlePrefix && !title.startsWith(titlePrefix)) { + title = titlePrefix + title; + } + if (parentIssueNumber) { + core.info("Detected issue context, parent issue #" + parentIssueNumber); + // Add reference to parent issue in the child issue body + bodyLines.push(`Related to #${parentIssueNumber}`); + } + // Add AI disclaimer with run id, run htmlurl + // Add AI disclaimer with workflow run information + const runId = context.runId; + const runUrl = context.payload.repository + ? `${context.payload.repository.html_url}/actions/runs/${runId}` + : `https://github.com/actions/runs/${runId}`; + bodyLines.push( + ``, + ``, + `> Generated by Agentic Workflow [Run](${runUrl})`, + "" + ); + // Prepare the body content + const body = bodyLines.join("\n").trim(); + core.info(`Creating issue with title: ${title}`); + core.info(`Labels: ${labels}`); + core.info(`Body length: ${body.length}`); + try { + // Create the issue using GitHub API + const { data: issue } = await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: title, + body: body, + labels: labels, + }); + core.info("Created issue #" + issue.number + ": " + issue.html_url); + createdIssues.push(issue); + // If we have a parent issue, add a comment to it referencing the new child issue + if (parentIssueNumber) { + try { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parentIssueNumber, + body: `Created related issue: #${issue.number}`, + }); + core.info("Added comment to parent issue #" + parentIssueNumber); + } catch (error) { + core.info( + `Warning: Could not add comment to parent issue: ${error instanceof Error ? error.message : String(error)}` + ); + } + } + // Set output for the last created issue (for backward compatibility) + if (i === createIssueItems.length - 1) { + core.setOutput("issue_number", issue.number); + core.setOutput("issue_url", issue.html_url); + } + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + // Special handling for disabled issues repository + if ( + errorMessage.includes("Issues has been disabled in this repository") + ) { + core.info( + `⚠ Cannot create issue "${title}": Issues are disabled for this repository` + ); + core.info( + "Consider enabling issues in repository settings if you want to create issues automatically" + ); + continue; // Skip this issue but continue processing others + } + core.error(`✗ Failed to create issue "${title}": ${errorMessage}`); + throw error; + } + } + // Write summary for all created issues + if (createdIssues.length > 0) { + let summaryContent = "\n\n## GitHub Issues\n"; + for (const issue of createdIssues) { + summaryContent += `- Issue #${issue.number}: [${issue.title}](${issue.html_url})\n`; + } + await core.summary.addRaw(summaryContent).write(); + } + core.info(`Successfully created ${createdIssues.length} issue(s)`); + } + await main(); + + push_to_orphaned_branch: + needs: dev + if: always() + runs-on: ubuntu-latest + permissions: + contents: write + actions: read + timeout-minutes: 10 + outputs: + commit_sha: ${{ steps.push_to_orphaned_branch.outputs.commit_sha }} + file_urls: ${{ steps.push_to_orphaned_branch.outputs.file_urls }} + uploaded_files: ${{ steps.push_to_orphaned_branch.outputs.uploaded_files }} + steps: + - name: Checkout repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 + - name: Configure Git credentials + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "${{ github.workflow }}" + echo "Git configured with standard GitHub Actions identity" + - name: Push to Orphaned Branch + id: push_to_orphaned_branch + uses: actions/github-script@v7 + env: + GH_TOKEN: ${{ github.token }} + GITHUB_AW_AGENT_OUTPUT: ${{ needs.dev.outputs.output }} + GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT: 3 + GITHUB_AW_ORPHANED_BRANCH_NAME: assets/Dev + GITHUB_AW_SAFE_OUTPUTS_STAGED: "true" + with: + script: | + const { execSync } = require("child_process"); + const fs = require("fs"); + // Get environment variables + const agentOutput = process.env.GITHUB_AW_AGENT_OUTPUT || "{}"; + const maxCount = parseInt( + process.env.GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT || "1" + ); + const branchName = + process.env.GITHUB_AW_ORPHANED_BRANCH_NAME || "assets/workflow"; + const isStaged = process.env.GITHUB_AW_SAFE_OUTPUTS_STAGED === "true"; + const repo = context.repo; + const owner = context.repo.owner; + core.info(`Processing agent output for orphaned branch upload`); + core.info(`Repository: ${owner}/${repo.repo}`); + core.info(`Max files allowed: ${maxCount}`); + core.info(`Branch name: ${branchName}`); + let parsedOutput; + try { + parsedOutput = JSON.parse(agentOutput); + } catch (error) { + core.setFailed( + `Failed to parse agent output: ${error instanceof Error ? error.message : String(error)}` + ); + return; + } + // Extract push-to-orphaned-branch items + const orphanedBranchItems = (parsedOutput.items || []).filter( + item => item.type === "push-to-orphaned-branch" + ); + if (orphanedBranchItems.length === 0) { + core.info("No orphaned branch upload items found in agent output"); + return; + } + if (orphanedBranchItems.length > maxCount) { + core.setFailed( + `Too many files to upload: ${orphanedBranchItems.length} (max: ${maxCount})` + ); + return; + } + core.info( + `Found ${orphanedBranchItems.length} file(s) to upload to orphaned branch` + ); + const uploadedFiles = []; + const fileUrls = []; + let commitSha = null; + if (isStaged) { + // In staged mode, just show what would be uploaded + core.summary.addHeading("Orphaned Branch File Upload (Staged Mode)", 2); + core.summary.addRaw( + "The following files would be uploaded to an orphaned branch:\n\n" + ); + for (const item of orphanedBranchItems) { + const originalFilename = item.original_filename || item.filename; + const sha = item.sha || "unknown"; + core.summary.addRaw( + `- **${item.filename}** (${Math.round(item.content.length * 0.75)} bytes) - SHA: ${sha} - Original: ${originalFilename}\n` + ); + uploadedFiles.push(item.filename); + fileUrls.push( + `https://raw.githubusercontent.com/${owner}/${repo.repo}/${branchName}/staged/${item.filename}` + ); + } + await core.summary.write(); + } else { + // Actually upload files to orphaned branch + try { + // Create or switch to orphaned branch + try { + execSync(`git checkout ${branchName}`, { stdio: "inherit" }); + core.info(`Switched to existing orphaned branch: ${branchName}`); + } catch (error) { + // Branch doesn't exist, create orphaned branch + execSync(`git checkout --orphan ${branchName}`, { stdio: "inherit" }); + execSync(`git rm -rf .`, { stdio: "inherit" }); + core.info(`Created new orphaned branch: ${branchName}`); + } + // Upload each file + for (const item of orphanedBranchItems) { + const { filename, original_filename, sha } = item; + if (!filename) { + core.warning(`Skipping invalid item: ${JSON.stringify(item)}`); + continue; + } + // Find the file in the artifact files directory + const filesDir = + process.env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR || + `${process.env.GITHUB_AW_SAFE_OUTPUTS_DIR || "/tmp/gh-aw/safe-outputs"}/files`; + const sourceFile = `${filesDir}/${filename}`; + if (!fs.existsSync(sourceFile)) { + core.setFailed(`File not found in artifact: ${sourceFile}`); + return; + } + // Read the file and validate SHA + const fileBuffer = fs.readFileSync(sourceFile); + const crypto = require("crypto"); + const computedHash = crypto.createHash("sha256"); + computedHash.update(fileBuffer); + const computedSha = computedHash.digest("hex"); + const fileSha = sha || "unknown"; + if (fileSha !== "unknown" && fileSha !== computedSha) { + core.setFailed( + `SHA validation failed for ${filename}. Expected: ${fileSha}, Computed: ${computedSha}` + ); + return; + } + // Use the SHA-based filename directly (it already includes the extension) + const safeFilename = filename.replace(/[^a-zA-Z0-9._-]/g, "_"); + // Copy file to working directory for git operations + fs.copyFileSync(sourceFile, safeFilename); + const originalName = original_filename || filename; + core.info( + `Created file: ${safeFilename} (${fileBuffer.length} bytes) - SHA: ${fileSha} - Original: ${originalName}` + ); + // Add to git + execSync(`git add ${safeFilename}`, { stdio: "inherit" }); + uploadedFiles.push(safeFilename); + } + // Commit files + const fileList = uploadedFiles + .map(filename => { + const item = orphanedBranchItems.find( + i => i.filename.replace(/[^a-zA-Z0-9._-]/g, "_") === filename + ); + const originalName = item?.original_filename || filename; + const sha = item?.sha || "unknown"; + return `${filename} (${originalName}, SHA: ${sha.substring(0, 8)})`; + }) + .join(", "); + const commitMessage = `Upload ${uploadedFiles.length} file(s) to orphaned branch\n\nFiles: ${fileList}`; + execSync(`git commit -m "${commitMessage}"`, { stdio: "inherit" }); + // Push to remote + execSync(`git push origin ${branchName}`, { stdio: "inherit" }); + // Get the commit SHA + commitSha = execSync(`git rev-parse HEAD`, { + encoding: "utf8", + }).trim(); + core.info(`Pushed to orphaned branch with commit: ${commitSha}`); + // Generate GitHub raw URLs using branch name + for (const filename of uploadedFiles) { + const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo.repo}/${branchName}/${filename}`; + fileUrls.push(rawUrl); + core.info(`File URL: ${rawUrl}`); + } + // Add summary + core.summary.addHeading("Files Uploaded to Orphaned Branch", 2); + core.summary.addRaw( + `Successfully uploaded ${uploadedFiles.length} file(s) to orphaned branch \`${branchName}\`\n\n` + ); + core.summary.addRaw(`**Commit:** \`${commitSha}\`\n\n`); + core.summary.addRaw("**Files:**\n"); + for (let i = 0; i < uploadedFiles.length; i++) { + const filename = uploadedFiles[i]; + const item = orphanedBranchItems.find( + item => item.filename.replace(/[^a-zA-Z0-9._-]/g, "_") === filename + ); + const originalName = item?.original_filename || filename; + const sha = item?.sha || "unknown"; + core.summary.addRaw( + `- [${filename}](${fileUrls[i]}) - Original: ${originalName} - SHA: ${sha.substring(0, 8)}\n` + ); + } + await core.summary.write(); + } catch (error) { + core.setFailed( + `Failed to upload files to orphaned branch: ${error instanceof Error ? error.message : String(error)}` + ); + return; + } + } + // Set outputs + core.setOutput("uploaded_files", JSON.stringify(uploadedFiles)); + core.setOutput("file_urls", JSON.stringify(fileUrls)); + if (commitSha) { + core.setOutput("commit_sha", commitSha); + } + core.info( + `Successfully processed ${uploadedFiles.length} file(s) for orphaned branch upload` + ); + missing_tool: needs: dev if: ${{ always() }} diff --git a/.github/workflows/dev.md b/.github/workflows/dev.md index 36f87303e2..dd009333ec 100644 --- a/.github/workflows/dev.md +++ b/.github/workflows/dev.md @@ -5,7 +5,15 @@ on: branches: - copilot/* - pelikhan/* +tools: + playwright: + docker_image_version: "v1.41.0" + allowed_domains: ["localhost", "127.0.0.1", "*.github.com", "github.com"] safe-outputs: + push-to-orphaned-branch: + max: 3 + create-issue: + max: 1 missing-tool: staged: true engine: @@ -14,4 +22,10 @@ engine: permissions: read-all --- -Try to call a tool, `draw_pelican` that draws a pelican. \ No newline at end of file +You have access to a `push-to-orphaned-branch` tool that can upload files (like screenshots) to an orphaned branch and return a GitHub raw URL. Use the expected URL from the response in your issue descriptions. + +Please: +1. Build the documentation by running appropriate build commands +2. Take a screenshot of the documentation using playwright +3. Upload the screenshot using the push-to-orphaned-branch tool - it will give you a URL to use +4. Create an issue describing the documentation status and include the screenshot using the URL provided by the upload tool \ No newline at end of file diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json index cf8df75895..732cf870a7 100644 --- a/pkg/parser/schemas/main_workflow_schema.json +++ b/pkg/parser/schemas/main_workflow_schema.json @@ -1577,6 +1577,32 @@ } ] }, + "push-to-orphaned-branch": { + "oneOf": [ + { + "type": "null", + "description": "Enable orphaned branch file upload with default configuration (max: 1, branch: assets/[workflow-id])" + }, + { + "type": "object", + "description": "Configuration for uploading files to an orphaned branch from agentic workflow output. Screenshots and images can be uploaded using this safe output.", + "properties": { + "max": { + "type": "integer", + "description": "Maximum number of files to upload (default: 1)", + "minimum": 1, + "maximum": 100 + }, + "branch": { + "type": "string", + "description": "Branch name for storing uploaded files (default: assets/[workflow-id])", + "minLength": 1 + } + }, + "additionalProperties": false + } + ] + }, "missing-tool": { "oneOf": [ { diff --git a/pkg/workflow/claude_engine.go b/pkg/workflow/claude_engine.go index 4aded7ebfe..6e34b534d7 100644 --- a/pkg/workflow/claude_engine.go +++ b/pkg/workflow/claude_engine.go @@ -185,6 +185,8 @@ func (e *ClaudeEngine) GetExecutionSteps(workflowData *WorkflowData, logFile str if workflowData.SafeOutputs != nil { stepLines = append(stepLines, " GITHUB_AW_SAFE_OUTPUTS: ${{ env.GITHUB_AW_SAFE_OUTPUTS }}") + stepLines = append(stepLines, " GITHUB_AW_SAFE_OUTPUTS_DIR: ${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}") + stepLines = append(stepLines, " GITHUB_AW_SAFE_OUTPUTS_FILES_DIR: ${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}") // Add staged flag if specified if workflowData.SafeOutputs.Staged != nil && *workflowData.SafeOutputs.Staged { @@ -565,6 +567,8 @@ func (e *ClaudeEngine) RenderMCPConfig(yaml *strings.Builder, tools map[string]a yaml.WriteString(" \"args\": [\"/tmp/safe-outputs/mcp-server.cjs\"],\n") yaml.WriteString(" \"env\": {\n") yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS\": \"${{ env.GITHUB_AW_SAFE_OUTPUTS }}\",\n") + yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS_DIR\": \"${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}\",\n") + yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS_FILES_DIR\": \"${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}\",\n") yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS_CONFIG\": ${{ toJSON(env.GITHUB_AW_SAFE_OUTPUTS_CONFIG) }}\n") yaml.WriteString(" }\n") serverCount++ diff --git a/pkg/workflow/codex_engine.go b/pkg/workflow/codex_engine.go index e61026fb6b..5ef43c3570 100644 --- a/pkg/workflow/codex_engine.go +++ b/pkg/workflow/codex_engine.go @@ -116,6 +116,8 @@ codex exec \ hasOutput := workflowData.SafeOutputs != nil if hasOutput { env["GITHUB_AW_SAFE_OUTPUTS"] = "${{ env.GITHUB_AW_SAFE_OUTPUTS }}" + env["GITHUB_AW_SAFE_OUTPUTS_DIR"] = "${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}" + env["GITHUB_AW_SAFE_OUTPUTS_FILES_DIR"] = "${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}" // Add staged flag if specified if workflowData.SafeOutputs.Staged != nil && *workflowData.SafeOutputs.Staged { @@ -229,7 +231,7 @@ func (e *CodexEngine) RenderMCPConfig(yaml *strings.Builder, tools map[string]an yaml.WriteString(" args = [\n") yaml.WriteString(" \"/tmp/safe-outputs/mcp-server.cjs\",\n") yaml.WriteString(" ]\n") - yaml.WriteString(" env = { \"GITHUB_AW_SAFE_OUTPUTS\" = \"${{ env.GITHUB_AW_SAFE_OUTPUTS }}\", \"GITHUB_AW_SAFE_OUTPUTS_CONFIG\" = ${{ toJSON(env.GITHUB_AW_SAFE_OUTPUTS_CONFIG) }} }\n") + yaml.WriteString(" env = { \"GITHUB_AW_SAFE_OUTPUTS\" = \"${{ env.GITHUB_AW_SAFE_OUTPUTS }}\", \"GITHUB_AW_SAFE_OUTPUTS_DIR\" = \"${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}\", \"GITHUB_AW_SAFE_OUTPUTS_FILES_DIR\" = \"${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}\", \"GITHUB_AW_SAFE_OUTPUTS_CONFIG\" = ${{ toJSON(env.GITHUB_AW_SAFE_OUTPUTS_CONFIG) }} }\n") } default: // Handle custom MCP tools (those with MCP-compatible type) diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go index 5011a48f6a..bd2642124c 100644 --- a/pkg/workflow/compiler.go +++ b/pkg/workflow/compiler.go @@ -171,6 +171,7 @@ type SafeOutputsConfig struct { AddIssueLabels *AddIssueLabelsConfig `yaml:"add-issue-label,omitempty"` UpdateIssues *UpdateIssuesConfig `yaml:"update-issue,omitempty"` PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pr-branch,omitempty"` + PushToOrphanedBranch *PushToOrphanedBranchConfig `yaml:"push-to-orphaned-branch,omitempty"` MissingTool *MissingToolConfig `yaml:"missing-tool,omitempty"` // Optional for reporting missing functionality AllowedDomains []string `yaml:"allowed-domains,omitempty"` Staged *bool `yaml:"staged,omitempty"` // If true, emit step summary messages instead of making GitHub API calls @@ -245,6 +246,12 @@ type PushToPullRequestBranchConfig struct { IfNoChanges string `yaml:"if-no-changes,omitempty"` // Behavior when no changes to push: "warn", "error", or "ignore" (default: "warn") } +// PushToOrphanedBranchConfig holds configuration for uploading files to an orphaned branch +type PushToOrphanedBranchConfig struct { + Max int `yaml:"max,omitempty"` // Maximum number of files to upload (default: 1) + Branch string `yaml:"branch,omitempty"` // Branch name for storing uploaded files (default: assets/[workflow-id]) +} + // MissingToolConfig holds configuration for reporting missing tools or functionality type MissingToolConfig struct { Max int `yaml:"max,omitempty"` // Maximum number of missing tool reports (default: unlimited) @@ -1957,6 +1964,17 @@ func (c *Compiler) buildJobs(data *WorkflowData, markdownPath string) error { } } + // Build push_to_orphaned_branch job if output.push-to-orphaned-branch is configured + if data.SafeOutputs.PushToOrphanedBranch != nil { + pushToOrphanedBranchJob, err := c.buildCreateOutputPushToOrphanedBranchJob(data, jobName) + if err != nil { + return fmt.Errorf("failed to build push_to_orphaned_branch job: %w", err) + } + if err := c.jobManager.AddJob(pushToOrphanedBranchJob); err != nil { + return fmt.Errorf("failed to add push_to_orphaned_branch job: %w", err) + } + } + // Build missing_tool job (always enabled when SafeOutputs exists) if data.SafeOutputs.MissingTool != nil { missingToolJob, err := c.buildCreateOutputMissingToolJob(data, jobName) @@ -3542,6 +3560,14 @@ func (c *Compiler) generatePrompt(yaml *strings.Builder, data *WorkflowData) { written = true } + if data.SafeOutputs.PushToOrphanedBranch != nil { + if written { + yaml.WriteString(", ") + } + yaml.WriteString("Uploading Files to Orphaned Branch") + written = true + } + if data.SafeOutputs.CreateCodeScanningAlerts != nil { if written { yaml.WriteString(", ") @@ -3721,6 +3747,12 @@ func (c *Compiler) extractSafeOutputsConfig(frontmatter map[string]any) *SafeOut config.PushToPullRequestBranch = pushToBranchConfig } + // Handle push-to-orphaned-branch + pushToOrphanedBranchConfig := c.parsePushToOrphanedBranchConfig(outputMap) + if pushToOrphanedBranchConfig != nil { + config.PushToOrphanedBranch = pushToOrphanedBranchConfig + } + // Handle missing-tool (parse configuration if present) missingToolConfig := c.parseMissingToolConfig(outputMap) if missingToolConfig != nil { @@ -4155,6 +4187,61 @@ func (c *Compiler) parsePushToPullRequestBranchConfig(outputMap map[string]any) return nil } +// parsePushToOrphanedBranchConfig handles push-to-orphaned-branch configuration +func (c *Compiler) parsePushToOrphanedBranchConfig(outputMap map[string]any) *PushToOrphanedBranchConfig { + if configData, exists := outputMap["push-to-orphaned-branch"]; exists { + pushToOrphanedBranchConfig := &PushToOrphanedBranchConfig{ + Max: 1, // Default: 1 file upload + } + + // Handle the case where configData is nil (push-to-orphaned-branch: with no value) + if configData == nil { + return pushToOrphanedBranchConfig + } + + if configMap, ok := configData.(map[string]any); ok { + // Parse max (optional, defaults to 1) + if maxCount, exists := configMap["max"]; exists { + // Handle different numeric types that YAML parsers might return + var maxCountInt int + var validMaxCount bool + switch v := maxCount.(type) { + case int: + maxCountInt = v + validMaxCount = true + case int64: + maxCountInt = int(v) + validMaxCount = true + case uint64: + maxCountInt = int(v) + validMaxCount = true + case float64: + maxCountInt = int(v) + validMaxCount = true + } + if validMaxCount && maxCountInt > 0 { + pushToOrphanedBranchConfig.Max = maxCountInt + } else if c.verbose { + fmt.Printf("Warning: invalid max value for push-to-orphaned-branch, using default 1\n") + } + } + + // Parse branch (optional, defaults to assets/[workflow-id]) + if branch, exists := configMap["branch"]; exists { + if branchStr, ok := branch.(string); ok && branchStr != "" { + pushToOrphanedBranchConfig.Branch = branchStr + } else if c.verbose { + fmt.Printf("Warning: invalid branch value for push-to-orphaned-branch, using default\n") + } + } + } + + return pushToOrphanedBranchConfig + } + + return nil +} + // parseMissingToolConfig handles missing-tool configuration func (c *Compiler) parseMissingToolConfig(outputMap map[string]any) *MissingToolConfig { if configData, exists := outputMap["missing-tool"]; exists { @@ -4420,6 +4507,18 @@ func (c *Compiler) generateSafeOutputsConfig(data *WorkflowData) string { } safeOutputsConfig["push-to-pr-branch"] = pushToBranchConfig } + if data.SafeOutputs.PushToOrphanedBranch != nil { + pushToOrphanedBranchConfig := map[string]interface{}{ + "enabled": true, + } + if data.SafeOutputs.PushToOrphanedBranch.Max > 0 { + pushToOrphanedBranchConfig["max"] = data.SafeOutputs.PushToOrphanedBranch.Max + } + if data.SafeOutputs.PushToOrphanedBranch.Branch != "" { + pushToOrphanedBranchConfig["branch"] = data.SafeOutputs.PushToOrphanedBranch.Branch + } + safeOutputsConfig["push-to-orphaned-branch"] = pushToOrphanedBranchConfig + } if data.SafeOutputs.MissingTool != nil { missingToolConfig := map[string]interface{}{ "enabled": true, @@ -4459,7 +4558,7 @@ func (c *Compiler) generateOutputCollectionStep(yaml *strings.Builder, data *Wor yaml.WriteString(" uses: actions/upload-artifact@v4\n") yaml.WriteString(" with:\n") fmt.Fprintf(yaml, " name: %s\n", OutputArtifactName) - yaml.WriteString(" path: ${{ env.GITHUB_AW_SAFE_OUTPUTS }}\n") + yaml.WriteString(" path: ${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}\n") yaml.WriteString(" if-no-files-found: warn\n") yaml.WriteString(" - name: Ingest agent output\n") diff --git a/pkg/workflow/custom_engine.go b/pkg/workflow/custom_engine.go index b956e65ca8..4c1a9b43d2 100644 --- a/pkg/workflow/custom_engine.go +++ b/pkg/workflow/custom_engine.go @@ -52,6 +52,8 @@ func (e *CustomEngine) GetExecutionSteps(workflowData *WorkflowData, logFile str // Add GITHUB_AW_SAFE_OUTPUTS if safe-outputs feature is used if workflowData.SafeOutputs != nil { envVars["GITHUB_AW_SAFE_OUTPUTS"] = "${{ env.GITHUB_AW_SAFE_OUTPUTS }}" + envVars["GITHUB_AW_SAFE_OUTPUTS_DIR"] = "${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}" + envVars["GITHUB_AW_SAFE_OUTPUTS_FILES_DIR"] = "${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}" // Add staged flag if specified if workflowData.SafeOutputs.Staged != nil && *workflowData.SafeOutputs.Staged { @@ -156,6 +158,8 @@ func (e *CustomEngine) RenderMCPConfig(yaml *strings.Builder, tools map[string]a yaml.WriteString(" \"args\": [\"/tmp/safe-outputs/mcp-server.cjs\"],\n") yaml.WriteString(" \"env\": {\n") yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS\": \"${{ env.GITHUB_AW_SAFE_OUTPUTS }}\",\n") + yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS_DIR\": \"${{ env.GITHUB_AW_SAFE_OUTPUTS_DIR }}\",\n") + yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS_FILES_DIR\": \"${{ env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR }}\",\n") yaml.WriteString(" \"GITHUB_AW_SAFE_OUTPUTS_CONFIG\": ${{ toJSON(env.GITHUB_AW_SAFE_OUTPUTS_CONFIG) }}\n") yaml.WriteString(" }\n") serverCount++ diff --git a/pkg/workflow/js.go b/pkg/workflow/js.go index f8468187bd..13999f41dd 100644 --- a/pkg/workflow/js.go +++ b/pkg/workflow/js.go @@ -39,6 +39,9 @@ var updateIssueScript string //go:embed js/push_to_pr_branch.cjs var pushToBranchScript string +//go:embed js/push_to_orphaned_branch.cjs +var pushToOrphanedBranchScript string + //go:embed js/setup_agent_output.cjs var setupAgentOutputScript string diff --git a/pkg/workflow/js/push_to_orphaned_branch.cjs b/pkg/workflow/js/push_to_orphaned_branch.cjs new file mode 100644 index 0000000000..23fa45b689 --- /dev/null +++ b/pkg/workflow/js/push_to_orphaned_branch.cjs @@ -0,0 +1,209 @@ +const { execSync } = require("child_process"); +const fs = require("fs"); + +// Get environment variables +const agentOutput = process.env.GITHUB_AW_AGENT_OUTPUT || "{}"; +const maxCount = parseInt( + process.env.GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT || "1" +); +const branchName = + process.env.GITHUB_AW_ORPHANED_BRANCH_NAME || "assets/workflow"; +const isStaged = process.env.GITHUB_AW_SAFE_OUTPUTS_STAGED === "true"; + +const repo = context.repo; +const owner = context.repo.owner; + +core.info(`Processing agent output for orphaned branch upload`); +core.info(`Repository: ${owner}/${repo.repo}`); +core.info(`Max files allowed: ${maxCount}`); +core.info(`Branch name: ${branchName}`); + +let parsedOutput; +try { + parsedOutput = JSON.parse(agentOutput); +} catch (error) { + core.setFailed( + `Failed to parse agent output: ${error instanceof Error ? error.message : String(error)}` + ); + return; +} + +// Extract push-to-orphaned-branch items +const orphanedBranchItems = (parsedOutput.items || []).filter( + item => item.type === "push-to-orphaned-branch" +); + +if (orphanedBranchItems.length === 0) { + core.info("No orphaned branch upload items found in agent output"); + return; +} + +if (orphanedBranchItems.length > maxCount) { + core.setFailed( + `Too many files to upload: ${orphanedBranchItems.length} (max: ${maxCount})` + ); + return; +} + +core.info( + `Found ${orphanedBranchItems.length} file(s) to upload to orphaned branch` +); + +const uploadedFiles = []; +const fileUrls = []; +let commitSha = null; + +if (isStaged) { + // In staged mode, just show what would be uploaded + core.summary.addHeading("Orphaned Branch File Upload (Staged Mode)", 2); + core.summary.addRaw( + "The following files would be uploaded to an orphaned branch:\n\n" + ); + + for (const item of orphanedBranchItems) { + const originalFilename = item.original_filename || item.filename; + const sha = item.sha || "unknown"; + core.summary.addRaw( + `- **${item.filename}** (${Math.round(item.content.length * 0.75)} bytes) - SHA: ${sha} - Original: ${originalFilename}\n` + ); + uploadedFiles.push(item.filename); + fileUrls.push( + `https://raw.githubusercontent.com/${owner}/${repo.repo}/${branchName}/staged/${item.filename}` + ); + } + + await core.summary.write(); +} else { + // Actually upload files to orphaned branch + try { + // Create or switch to orphaned branch + try { + execSync(`git checkout ${branchName}`, { stdio: "inherit" }); + core.info(`Switched to existing orphaned branch: ${branchName}`); + } catch (error) { + // Branch doesn't exist, create orphaned branch + execSync(`git checkout --orphan ${branchName}`, { stdio: "inherit" }); + execSync(`git rm -rf .`, { stdio: "inherit" }); + core.info(`Created new orphaned branch: ${branchName}`); + } + + // Upload each file + for (const item of orphanedBranchItems) { + const { filename, original_filename, sha } = item; + + if (!filename) { + core.warning(`Skipping invalid item: ${JSON.stringify(item)}`); + continue; + } + + // Find the file in the artifact files directory + const filesDir = + process.env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR || + `${process.env.GITHUB_AW_SAFE_OUTPUTS_DIR || "/tmp/gh-aw/safe-outputs"}/files`; + const sourceFile = `${filesDir}/${filename}`; + + if (!fs.existsSync(sourceFile)) { + core.setFailed(`File not found in artifact: ${sourceFile}`); + return; + } + + // Read the file and validate SHA + const fileBuffer = fs.readFileSync(sourceFile); + const crypto = require("crypto"); + const computedHash = crypto.createHash("sha256"); + computedHash.update(fileBuffer); + const computedSha = computedHash.digest("hex"); + + const fileSha = sha || "unknown"; + if (fileSha !== "unknown" && fileSha !== computedSha) { + core.setFailed( + `SHA validation failed for ${filename}. Expected: ${fileSha}, Computed: ${computedSha}` + ); + return; + } + + // Use the SHA-based filename directly (it already includes the extension) + const safeFilename = filename.replace(/[^a-zA-Z0-9._-]/g, "_"); + + // Copy file to working directory for git operations + fs.copyFileSync(sourceFile, safeFilename); + const originalName = original_filename || filename; + core.info( + `Created file: ${safeFilename} (${fileBuffer.length} bytes) - SHA: ${fileSha} - Original: ${originalName}` + ); + + // Add to git + execSync(`git add ${safeFilename}`, { stdio: "inherit" }); + + uploadedFiles.push(safeFilename); + } + + // Commit files + const fileList = uploadedFiles + .map(filename => { + const item = orphanedBranchItems.find( + i => i.filename.replace(/[^a-zA-Z0-9._-]/g, "_") === filename + ); + const originalName = item?.original_filename || filename; + const sha = item?.sha || "unknown"; + return `${filename} (${originalName}, SHA: ${sha.substring(0, 8)})`; + }) + .join(", "); + const commitMessage = `Upload ${uploadedFiles.length} file(s) to orphaned branch\n\nFiles: ${fileList}`; + execSync(`git commit -m "${commitMessage}"`, { stdio: "inherit" }); + + // Push to remote + execSync(`git push origin ${branchName}`, { stdio: "inherit" }); + + // Get the commit SHA + commitSha = execSync(`git rev-parse HEAD`, { + encoding: "utf8", + }).trim(); + core.info(`Pushed to orphaned branch with commit: ${commitSha}`); + + // Generate GitHub raw URLs using branch name + for (const filename of uploadedFiles) { + const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo.repo}/${branchName}/${filename}`; + fileUrls.push(rawUrl); + core.info(`File URL: ${rawUrl}`); + } + + // Add summary + core.summary.addHeading("Files Uploaded to Orphaned Branch", 2); + core.summary.addRaw( + `Successfully uploaded ${uploadedFiles.length} file(s) to orphaned branch \`${branchName}\`\n\n` + ); + core.summary.addRaw(`**Commit:** \`${commitSha}\`\n\n`); + core.summary.addRaw("**Files:**\n"); + + for (let i = 0; i < uploadedFiles.length; i++) { + const filename = uploadedFiles[i]; + const item = orphanedBranchItems.find( + item => item.filename.replace(/[^a-zA-Z0-9._-]/g, "_") === filename + ); + const originalName = item?.original_filename || filename; + const sha = item?.sha || "unknown"; + core.summary.addRaw( + `- [${filename}](${fileUrls[i]}) - Original: ${originalName} - SHA: ${sha.substring(0, 8)}\n` + ); + } + + await core.summary.write(); + } catch (error) { + core.setFailed( + `Failed to upload files to orphaned branch: ${error instanceof Error ? error.message : String(error)}` + ); + return; + } +} + +// Set outputs +core.setOutput("uploaded_files", JSON.stringify(uploadedFiles)); +core.setOutput("file_urls", JSON.stringify(fileUrls)); +if (commitSha) { + core.setOutput("commit_sha", commitSha); +} + +core.info( + `Successfully processed ${uploadedFiles.length} file(s) for orphaned branch upload` +); diff --git a/pkg/workflow/js/safe_outputs_mcp_server.cjs b/pkg/workflow/js/safe_outputs_mcp_server.cjs index 926cd3ab22..d44495f95c 100644 --- a/pkg/workflow/js/safe_outputs_mcp_server.cjs +++ b/pkg/workflow/js/safe_outputs_mcp_server.cjs @@ -6,6 +6,18 @@ const safeOutputsConfig = JSON.parse(configEnv); const outputFile = process.env.GITHUB_AW_SAFE_OUTPUTS; if (!outputFile) throw new Error("GITHUB_AW_SAFE_OUTPUTS not set, no output file"); + +// Validate required directory environment variables +const safeOutputsDir = process.env.GITHUB_AW_SAFE_OUTPUTS_DIR; +if (!safeOutputsDir) + throw new Error( + "GITHUB_AW_SAFE_OUTPUTS_DIR not set, no safe outputs directory" + ); +const filesDir = process.env.GITHUB_AW_SAFE_OUTPUTS_FILES_DIR; +if (!filesDir) + throw new Error( + "GITHUB_AW_SAFE_OUTPUTS_FILES_DIR not set, no files directory" + ); const SERVER_INFO = { name: "safe-outputs-mcp-server", version: "1.0.0" }; const debug = msg => process.stderr.write(`[${SERVER_INFO.name}] ${msg}\n`); function writeMessage(obj) { @@ -324,6 +336,116 @@ const TOOLS = Object.fromEntries( additionalProperties: false, }, }, + { + name: "push-to-orphaned-branch", + description: + "Upload a file to an orphaned branch and get a GitHub raw URL", + inputSchema: { + type: "object", + required: ["filename"], + properties: { + filename: { + type: "string", + description: + "Name of the file to upload. Screenshots and images can be uploaded using this safe output.", + }, + }, + additionalProperties: false, + }, + handler: args => { + const fs = require("fs"); + const path = require("path"); + const crypto = require("crypto"); + + const { filename } = args; + if (!filename) { + throw new Error("filename is required"); + } + + // Check if file exists + if (!fs.existsSync(filename)) { + throw new Error(`File not found: ${filename}`); + } + + // Read file and encode as base64 + const fileContent = fs.readFileSync(filename); + + // Check file size (10MB limit) + const fileSizeBytes = fileContent.length; + const maxSizeBytes = 10 * 1024 * 1024; // 10MB + if (fileSizeBytes > maxSizeBytes) { + throw new Error( + `File size ${Math.round(fileSizeBytes / 1024 / 1024)}MB exceeds 10MB limit` + ); + } + + // Compute SHA256 hash of the file content + const hash = crypto.createHash("sha256"); + hash.update(fileContent); + const fileSha = hash.digest("hex"); + + // Get file extension from original filename + const originalExtension = path.extname(filename); + + // Validate file extension is reasonable (up to 5 alphanumeric characters) + if (originalExtension) { + const extWithoutDot = originalExtension.slice(1); // Remove the leading dot + if ( + extWithoutDot.length > 5 || + !/^[a-zA-Z0-9]+$/.test(extWithoutDot) + ) { + throw new Error( + `File extension '${originalExtension}' is not allowed. Extension must be up to 5 alphanumeric characters.` + ); + } + } + + const shaFilename = fileSha + originalExtension; + + // Copy file to safe outputs files directory with SHA-based filename + const targetFile = path.join(filesDir, shaFilename); + + // Ensure directory exists + fs.mkdirSync(filesDir, { recursive: true }); + + // Copy the file + fs.copyFileSync(filename, targetFile); + + // Create the output entry without base64 content (file is now copied to safe outputs dir) + const entry = { + type: "push-to-orphaned-branch", + filename: shaFilename, + original_filename: path.basename(filename), + sha: fileSha, + }; + + appendSafeOutput(entry); + + // Get branch configuration if available + const branchConfig = + safeOutputsConfig["push-to-orphaned-branch"]?.branch; + const branchName = branchConfig || "assets/{workflow-name}"; + + // Get repository information from environment or use placeholders + const owner = process.env.GITHUB_REPOSITORY_OWNER || "{owner}"; + const repo = process.env.GITHUB_REPOSITORY + ? process.env.GITHUB_REPOSITORY.split("/")[1] + : "{repo}"; + + // Create template URL (will be resolved during GitHub Actions execution) + const templateUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branchName}/${shaFilename}`; + + // Return response with SHA information and expected URL + return { + content: [ + { + type: "text", + text: `File uploaded successfully. SHA: ${fileSha}, Original filename: ${path.basename(filename)}, Expected URL: ${templateUrl}`, + }, + ], + }; + }, + }, { name: "missing-tool", description: diff --git a/pkg/workflow/js/setup_agent_output.cjs b/pkg/workflow/js/setup_agent_output.cjs index dfe4492e54..2407db6dfb 100644 --- a/pkg/workflow/js/setup_agent_output.cjs +++ b/pkg/workflow/js/setup_agent_output.cjs @@ -2,22 +2,28 @@ function main() { const fs = require("fs"); const crypto = require("crypto"); - // Generate a random filename for the output file - const randomId = crypto.randomBytes(8).toString("hex"); - const outputFile = `/tmp/aw_output_${randomId}.txt`; + // Create the safe outputs directory structure + const safeOutputsDir = "/tmp/gh-aw/safe-outputs"; + const filesDir = `${safeOutputsDir}/files`; + const outputFile = `${safeOutputsDir}/safe_outputs.jsonl`; - // Ensure the /tmp directory exists - fs.mkdirSync("/tmp", { recursive: true }); + // Ensure the safe outputs directory structure exists + fs.mkdirSync(safeOutputsDir, { recursive: true }); + fs.mkdirSync(filesDir, { recursive: true }); // We don't create the file, as the name is sufficiently random // and some engines (Claude) fails first Write to the file // if it exists and has not been read. - // Set the environment variable for subsequent steps + // Set the environment variables for subsequent steps core.exportVariable("GITHUB_AW_SAFE_OUTPUTS", outputFile); + core.exportVariable("GITHUB_AW_SAFE_OUTPUTS_DIR", safeOutputsDir); + core.exportVariable("GITHUB_AW_SAFE_OUTPUTS_FILES_DIR", filesDir); // Also set as step output for reference core.setOutput("output_file", outputFile); + core.setOutput("output_dir", safeOutputsDir); + core.setOutput("files_dir", filesDir); } main(); diff --git a/pkg/workflow/js/types/safe-outputs.d.ts b/pkg/workflow/js/types/safe-outputs.d.ts index 257cacc336..cdcd78e02c 100644 --- a/pkg/workflow/js/types/safe-outputs.d.ts +++ b/pkg/workflow/js/types/safe-outputs.d.ts @@ -119,6 +119,17 @@ interface UpdateIssueItem extends BaseSafeOutputItem { issue_number?: number | string; } +/** + * JSONL item for pushing to an orphaned branch + */ +interface PushToOrphanedBranchItem extends BaseSafeOutputItem { + type: "push-to-orphaned-branch"; + /** Name of the file to upload. Screenshots and images can be uploaded using this safe output. */ + filename: string; + /** Base64 encoded file content */ + content: string; +} + /** * JSONL item for pushing to a PR branch */ @@ -156,6 +167,7 @@ type SafeOutputItem = | AddIssueLabelItem | UpdateIssueItem | PushToPrBranchItem + | PushToOrphanedBranchItem | MissingToolItem; @@ -173,6 +185,7 @@ export { AddIssueLabelItem, UpdateIssueItem, PushToPrBranchItem, + PushToOrphanedBranchItem, MissingToolItem, SafeOutputItem, }; diff --git a/pkg/workflow/output_push_to_orphaned_branch.go b/pkg/workflow/output_push_to_orphaned_branch.go new file mode 100644 index 0000000000..4d4d094c53 --- /dev/null +++ b/pkg/workflow/output_push_to_orphaned_branch.go @@ -0,0 +1,96 @@ +package workflow + +import ( + "fmt" +) + +// buildCreateOutputPushToOrphanedBranchJob creates the push_to_orphaned_branch job +func (c *Compiler) buildCreateOutputPushToOrphanedBranchJob(data *WorkflowData, mainJobName string) (*Job, error) { + if data.SafeOutputs == nil || data.SafeOutputs.PushToOrphanedBranch == nil { + return nil, fmt.Errorf("safe-outputs.push-to-orphaned-branch configuration is required") + } + + var steps []string + + // Step 1: Checkout repository + steps = append(steps, " - name: Checkout repository\n") + steps = append(steps, " uses: actions/checkout@v5\n") + steps = append(steps, " with:\n") + steps = append(steps, " fetch-depth: 0\n") + + // Step 2: Configure Git credentials + steps = append(steps, c.generateGitConfigurationSteps()...) + + // Step 3: Push to Orphaned Branch + steps = append(steps, " - name: Push to Orphaned Branch\n") + steps = append(steps, " id: push_to_orphaned_branch\n") + steps = append(steps, " uses: actions/github-script@v7\n") + + // Add environment variables + steps = append(steps, " env:\n") + // Add GH_TOKEN for authentication + steps = append(steps, " GH_TOKEN: ${{ github.token }}\n") + // Pass the agent output content from the main job + steps = append(steps, fmt.Sprintf(" GITHUB_AW_AGENT_OUTPUT: ${{ needs.%s.outputs.output }}\n", mainJobName)) + // Pass the max count configuration + maxCount := 1 + if data.SafeOutputs.PushToOrphanedBranch.Max > 0 { + maxCount = data.SafeOutputs.PushToOrphanedBranch.Max + } + steps = append(steps, fmt.Sprintf(" GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT: %d\n", maxCount)) + + // Pass the branch configuration + branchName := fmt.Sprintf("assets/%s", data.Name) + if data.SafeOutputs.PushToOrphanedBranch.Branch != "" { + branchName = data.SafeOutputs.PushToOrphanedBranch.Branch + } + steps = append(steps, fmt.Sprintf(" GITHUB_AW_ORPHANED_BRANCH_NAME: %s\n", branchName)) + + // Pass the staged flag if it's set to true + if data.SafeOutputs.Staged != nil && *data.SafeOutputs.Staged { + steps = append(steps, " GITHUB_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") + } + + // Add custom environment variables from safe-outputs.env + c.addCustomSafeOutputEnvVars(&steps, data) + + steps = append(steps, " with:\n") + // Add github-token if specified + c.addSafeOutputGitHubToken(&steps, data) + steps = append(steps, " script: |\n") + + // Add each line of the script with proper indentation + formattedScript := FormatJavaScriptForYAML(pushToOrphanedBranchScript) + steps = append(steps, formattedScript...) + + // Create outputs for the job + outputs := map[string]string{ + "uploaded_files": "${{ steps.push_to_orphaned_branch.outputs.uploaded_files }}", + "file_urls": "${{ steps.push_to_orphaned_branch.outputs.file_urls }}", + "commit_sha": "${{ steps.push_to_orphaned_branch.outputs.commit_sha }}", + } + + // This job can run in any context since it only uploads files to orphaned branches + jobCondition := "always()" + + // If this is a command workflow, add the command trigger condition + if data.Command != "" { + // Build the command trigger condition + commandCondition := buildCommandOnlyCondition(data.Command) + commandConditionStr := commandCondition.Render() + jobCondition = commandConditionStr + } + + job := &Job{ + Name: "push_to_orphaned_branch", + If: jobCondition, + RunsOn: "runs-on: ubuntu-latest", + Permissions: "permissions:\n contents: write\n actions: read", + TimeoutMinutes: 10, // 10-minute timeout as required + Steps: steps, + Outputs: outputs, + Needs: []string{mainJobName}, // Depend on the main workflow job + } + + return job, nil +} diff --git a/pkg/workflow/output_push_to_orphaned_branch_test.go b/pkg/workflow/output_push_to_orphaned_branch_test.go new file mode 100644 index 0000000000..2c9926fc55 --- /dev/null +++ b/pkg/workflow/output_push_to_orphaned_branch_test.go @@ -0,0 +1,185 @@ +package workflow + +import ( + "strings" + "testing" +) + +func TestBuildCreateOutputPushToOrphanedBranchJob(t *testing.T) { + compiler := NewCompiler(false, "", "1.0.0") + + t.Run("basic_configuration", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + SafeOutputs: &SafeOutputsConfig{ + PushToOrphanedBranch: &PushToOrphanedBranchConfig{ + Max: 3, + }, + }, + } + + job, err := compiler.buildCreateOutputPushToOrphanedBranchJob(workflowData, "main_job") + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + + if job.Name != "push_to_orphaned_branch" { + t.Errorf("Expected job name 'push_to_orphaned_branch', got: %s", job.Name) + } + + if job.If != "always()" { + t.Errorf("Expected job condition 'always()', got: %s", job.If) + } + + if !strings.Contains(job.Permissions, "contents: write") { + t.Errorf("Expected job to have contents: write permission") + } + + if job.TimeoutMinutes != 10 { + t.Errorf("Expected timeout of 10 minutes, got: %d", job.TimeoutMinutes) + } + + // Check for default branch name in environment variables + stepsStr := strings.Join(job.Steps, "") + if !strings.Contains(stepsStr, "GITHUB_AW_ORPHANED_BRANCH_NAME: assets/test-workflow") { + t.Errorf("Expected default branch name 'assets/test-workflow' in steps") + } + + // Check that the main job is a dependency + found := false + for _, need := range job.Needs { + if need == "main_job" { + t.Logf("Found expected dependency: %s", need) + found = true + break + } + } + if !found { + t.Errorf("Expected 'main_job' to be in needs, got: %v", job.Needs) + } + + // Check for expected outputs + if _, ok := job.Outputs["uploaded_files"]; !ok { + t.Errorf("Expected 'uploaded_files' output to be present") + } + if _, ok := job.Outputs["file_urls"]; !ok { + t.Errorf("Expected 'file_urls' output to be present") + } + + // Check that steps contain expected elements + stepsString := strings.Join(job.Steps, "") + if !strings.Contains(stepsString, "Checkout repository") { + t.Errorf("Expected checkout step") + } + if !strings.Contains(stepsString, "Push to Orphaned Branch") { + t.Errorf("Expected push to orphaned branch step") + } + if !strings.Contains(stepsString, "GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT: 3") { + t.Errorf("Expected max count environment variable to be set") + } + }) + + t.Run("default_max_count", func(t *testing.T) { + workflowData := &WorkflowData{ + SafeOutputs: &SafeOutputsConfig{ + PushToOrphanedBranch: &PushToOrphanedBranchConfig{}, + }, + } + + job, err := compiler.buildCreateOutputPushToOrphanedBranchJob(workflowData, "main_job") + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + + stepsStr := strings.Join(job.Steps, "") + if !strings.Contains(stepsStr, "GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT: 1") { + t.Errorf("Expected default max count of 1") + } + }) + + t.Run("command_workflow_condition", func(t *testing.T) { + workflowData := &WorkflowData{ + Command: "upload-files", + SafeOutputs: &SafeOutputsConfig{ + PushToOrphanedBranch: &PushToOrphanedBranchConfig{}, + }, + } + + job, err := compiler.buildCreateOutputPushToOrphanedBranchJob(workflowData, "main_job") + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + + // Should have command trigger condition + if !strings.Contains(job.If, "upload-files") { + t.Errorf("Expected command condition in job.If, got: %s", job.If) + } + }) + + t.Run("missing_configuration", func(t *testing.T) { + workflowData := &WorkflowData{ + SafeOutputs: &SafeOutputsConfig{}, + } + + _, err := compiler.buildCreateOutputPushToOrphanedBranchJob(workflowData, "main_job") + if err == nil { + t.Fatalf("Expected error for missing configuration") + } + + if !strings.Contains(err.Error(), "safe-outputs.push-to-orphaned-branch configuration is required") { + t.Errorf("Expected specific error message, got: %v", err) + } + }) +} + +func TestBuildCreateOutputPushToOrphanedBranchJobWithCustomBranch(t *testing.T) { + compiler := NewCompiler(false, "", "1.0.0") + + t.Run("custom_branch_configuration", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + SafeOutputs: &SafeOutputsConfig{ + PushToOrphanedBranch: &PushToOrphanedBranchConfig{ + Max: 2, + Branch: "custom-uploads", + }, + }, + } + + job, err := compiler.buildCreateOutputPushToOrphanedBranchJob(workflowData, "main_job") + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + + // Check for custom branch name in environment variables + stepsStr := strings.Join(job.Steps, "") + if !strings.Contains(stepsStr, "GITHUB_AW_ORPHANED_BRANCH_NAME: custom-uploads") { + t.Errorf("Expected custom branch name 'custom-uploads' in steps, got: %s", stepsStr) + } + + // Check that max count is correctly set + if !strings.Contains(stepsStr, "GITHUB_AW_ORPHANED_BRANCH_MAX_COUNT: 2") { + t.Errorf("Expected max count 2 in environment variables") + } + }) +} + +func TestHasSafeOutputsEnabledWithOrphanedBranch(t *testing.T) { + t.Run("enabled_with_orphaned_branch", func(t *testing.T) { + config := &SafeOutputsConfig{ + PushToOrphanedBranch: &PushToOrphanedBranchConfig{}, + } + + if !HasSafeOutputsEnabled(config) { + t.Errorf("Expected safe outputs to be enabled with orphaned branch config") + } + }) + + t.Run("disabled_without_orphaned_branch", func(t *testing.T) { + config := &SafeOutputsConfig{} + + if HasSafeOutputsEnabled(config) { + t.Errorf("Expected safe outputs to be disabled without any config") + } + }) +} diff --git a/pkg/workflow/safe_outputs.go b/pkg/workflow/safe_outputs.go index bf717ee15b..4a59dd534a 100644 --- a/pkg/workflow/safe_outputs.go +++ b/pkg/workflow/safe_outputs.go @@ -11,5 +11,6 @@ func HasSafeOutputsEnabled(safeOutputs *SafeOutputsConfig) bool { safeOutputs.AddIssueLabels != nil || safeOutputs.UpdateIssues != nil || safeOutputs.PushToPullRequestBranch != nil || + safeOutputs.PushToOrphanedBranch != nil || safeOutputs.MissingTool != nil } diff --git a/schemas/agent-output.json b/schemas/agent-output.json index 81f34135e5..721eb54b07 100644 --- a/schemas/agent-output.json +++ b/schemas/agent-output.json @@ -33,6 +33,7 @@ {"$ref": "#/$defs/AddIssueLabelOutput"}, {"$ref": "#/$defs/UpdateIssueOutput"}, {"$ref": "#/$defs/PushToPullRequestBranchOutput"}, + {"$ref": "#/$defs/PushToOrphanedBranchOutput"}, {"$ref": "#/$defs/CreatePullRequestReviewCommentOutput"}, {"$ref": "#/$defs/CreateDiscussionOutput"}, {"$ref": "#/$defs/MissingToolOutput"}, @@ -193,6 +194,33 @@ "required": ["type"], "additionalProperties": false }, + "PushToOrphanedBranchOutput": { + "title": "Push to Orphaned Branch Output", + "description": "Output for uploading a file to an orphaned branch and getting a GitHub raw URL", + "type": "object", + "properties": { + "type": { + "const": "push-to-orphaned-branch" + }, + "filename": { + "type": "string", + "description": "SHA-based filename with original extension (e.g., sha256hash.png). Screenshots and images can be uploaded using this safe output.", + "minLength": 1 + }, + "original_filename": { + "type": "string", + "description": "Original filename as provided by the user", + "minLength": 1 + }, + "sha": { + "type": "string", + "description": "SHA256 hash of the file content", + "minLength": 1 + } + }, + "required": ["type", "filename", "original_filename", "sha"], + "additionalProperties": false + }, "CreatePullRequestReviewCommentOutput": { "title": "Create Pull Request Review Comment Output", "description": "Output for creating a review comment on a specific line of code",