diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index 6d1fb5e7ad..bf6f894de9 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -53,7 +53,9 @@
# bash:
# - "*"
# edit:
-# cache-memory:
+# repo-memory:
+# branch-name: memory/firewall-reports
+# description: "Firewall analysis history and aggregated data"
# imports:
# - shared/mcp/gh-aw.md
# - shared/reporting.md
@@ -74,15 +76,19 @@
# conclusion["conclusion"]
# create_discussion["create_discussion"]
# detection["detection"]
+# push_repo_memory["push_repo_memory"]
# upload_assets["upload_assets"]
# activation --> agent
# agent --> conclusion
# activation --> conclusion
# create_discussion --> conclusion
# upload_assets --> conclusion
+# push_repo_memory --> conclusion
# agent --> create_discussion
# detection --> create_discussion
# agent --> detection
+# agent --> push_repo_memory
+# detection --> push_repo_memory
# agent --> upload_assets
# detection --> upload_assets
# ```
@@ -515,16 +521,16 @@
#
# ## Instructions
#
-# ### Step 0: Check Cache for Recent Analysis
+# ### Step 0: Check Repo Memory for Recent Analysis
#
# **EFFICIENCY FIRST**: Before starting the full analysis:
#
-# 1. Check `/tmp/gh-aw/cache-memory/firewall-reports/` for the most recent report
+# 1. Check `/tmp/gh-aw/repo-memory-default/memory/default/` for the most recent report
# 2. If a report exists from the last 24 hours:
# - Read the cached run IDs that were analyzed
# - Determine if any new workflow runs have occurred since then
# - If no new runs, update the existing report with current timestamp and exit early
-# 3. Cache the following for the next run:
+# 3. Store the following in repo memory for the next run:
# - Last analysis timestamp
# - List of run IDs analyzed
# - Aggregated blocked domains data
@@ -909,6 +915,35 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ # Repo memory git-based storage configuration from frontmatter processed below
+ - name: Clone repo-memory branch (default)
+ env:
+ GH_TOKEN: ${{ github.token }}
+ BRANCH_NAME: memory/firewall-reports
+ run: |
+ set +e # Don't fail if branch doesn't exist
+ git clone --depth 1 --single-branch --branch "memory/firewall-reports" "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git" "/tmp/gh-aw/repo-memory-default" 2>/dev/null
+ CLONE_EXIT_CODE=$?
+ set -e
+
+ if [ $CLONE_EXIT_CODE -ne 0 ]; then
+ echo "Branch memory/firewall-reports does not exist, creating orphan branch"
+ mkdir -p "/tmp/gh-aw/repo-memory-default"
+ cd "/tmp/gh-aw/repo-memory-default"
+ git init
+ git checkout --orphan "$BRANCH_NAME"
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ git remote add origin "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git"
+ else
+ echo "Successfully cloned memory/firewall-reports branch"
+ cd "/tmp/gh-aw/repo-memory-default"
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ fi
+
+ mkdir -p "/tmp/gh-aw/repo-memory-default/memory/default"
+ echo "Repo memory directory ready at /tmp/gh-aw/repo-memory-default/memory/default"
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -2825,16 +2860,16 @@ jobs:
## Instructions
- ### Step 0: Check Cache for Recent Analysis
+ ### Step 0: Check Repo Memory for Recent Analysis
**EFFICIENCY FIRST**: Before starting the full analysis:
- 1. Check `/tmp/gh-aw/cache-memory/firewall-reports/` for the most recent report
+ 1. Check `/tmp/gh-aw/repo-memory-default/memory/default/` for the most recent report
2. If a report exists from the last 24 hours:
- Read the cached run IDs that were analyzed
- Determine if any new workflow runs have occurred since then
- If no new runs, update the existing report with current timestamp and exit early
- 3. Cache the following for the next run:
+ 3. Store the following in repo memory for the next run:
- Last analysis timestamp
- List of run IDs analyzed
- Aggregated blocked domains data
@@ -2891,13 +2926,13 @@ jobs:
```javascript
// From the audit tool result, access:
result.firewall_analysis.denied_domains // Array of denied domain names
- result.firewall_analysis.allowed_domains // Array of allowed domain names
PROMPT_EOF
- name: Append prompt (part 2)
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
run: |
cat << 'PROMPT_EOF' | envsubst >> "$GH_AW_PROMPT"
+ result.firewall_analysis.allowed_domains // Array of allowed domain names
result.firewall_analysis.total_requests // Total number of network requests
result.firewall_analysis.denied_requests // Number of denied requests
```
@@ -3080,6 +3115,35 @@ jobs:
- `/tmp/gh-aw/cache-memory/history.log` - activity history and logs
- `/tmp/gh-aw/cache-memory/state/` - organized state files in subdirectories
+ Feel free to create, read, update, and organize files in this folder as needed for your tasks.
+ PROMPT_EOF
+ - name: Append repo memory instructions to prompt
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ run: |
+ cat << 'PROMPT_EOF' | envsubst >> "$GH_AW_PROMPT"
+
+ ---
+
+ ## Repo Memory Available
+
+ You have access to a persistent repo memory folder at `/tmp/gh-aw/repo-memory-default/memory/default/` where you can read and write files that are stored in a git branch. Firewall analysis history and aggregated data
+
+ - **Read/Write Access**: You can freely read from and write to any files in this folder
+ - **Git Branch Storage**: Files are stored in the `memory/firewall-reports` branch of the current repository
+ - **Automatic Push**: Changes are automatically committed and pushed after the workflow completes
+ - **Merge Strategy**: In case of conflicts, your changes (current version) win
+ - **Persistence**: Files persist across workflow runs via git branch storage
+
+ **Constraints:**
+ - **Max File Size**: 10240 bytes (0.01 MB) per file
+ - **Max File Count**: 100 files per commit
+
+ Examples of what you can store:
+ - `/tmp/gh-aw/repo-memory-default/memory/default/notes.md` - general notes and observations
+ - `/tmp/gh-aw/repo-memory-default/memory/default/state.json` - structured state data
+ - `/tmp/gh-aw/repo-memory-default/memory/default/history/` - organized history files in subdirectories
+
Feel free to create, read, update, and organize files in this folder as needed for your tasks.
PROMPT_EOF
- name: Append safe outputs instructions to prompt
@@ -5923,6 +5987,15 @@ jobs:
name: agent-stdio.log
path: /tmp/gh-aw/agent-stdio.log
if-no-files-found: warn
+ # Upload repo memory as artifacts for push job
+ - name: Upload repo-memory artifact (default)
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory-default
+ retention-days: 1
+ if-no-files-found: ignore
- name: Upload safe outputs assets
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
@@ -6172,6 +6245,7 @@ jobs:
- activation
- create_discussion
- upload_assets
+ - push_repo_memory
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7585,6 +7659,199 @@ jobs:
path: /tmp/gh-aw/threat-detection/detection.log
if-no-files-found: ignore
+ push_repo_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ with:
+ persist-credentials: false
+ sparse-checkout: .
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download repo-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
+ continue-on-error: true
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory-default
+ - name: Push repo-memory changes (default)
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_RUN_ID: ${{ github.run_id }}
+ ARTIFACT_DIR: /tmp/gh-aw/repo-memory-default
+ MEMORY_ID: default
+ TARGET_REPO: ${{ github.repository }}
+ BRANCH_NAME: memory/firewall-reports
+ MAX_FILE_SIZE: 10240
+ MAX_FILE_COUNT: 100
+ with:
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const { execSync } = require("child_process");
+ async function main() {
+ const artifactDir = process.env.ARTIFACT_DIR;
+ const memoryId = process.env.MEMORY_ID;
+ const targetRepo = process.env.TARGET_REPO;
+ const branchName = process.env.BRANCH_NAME;
+ const maxFileSize = parseInt(process.env.MAX_FILE_SIZE || "10240", 10);
+ const maxFileCount = parseInt(process.env.MAX_FILE_COUNT || "100", 10);
+ const fileGlobFilter = process.env.FILE_GLOB_FILTER || "";
+ const ghToken = process.env.GH_TOKEN;
+ const githubRunId = process.env.GITHUB_RUN_ID || "unknown";
+ if (!artifactDir || !memoryId || !targetRepo || !branchName || !ghToken) {
+ core.setFailed("Missing required environment variables: ARTIFACT_DIR, MEMORY_ID, TARGET_REPO, BRANCH_NAME, GH_TOKEN");
+ return;
+ }
+ const sourceMemoryPath = path.join(artifactDir, "memory", memoryId);
+ if (!fs.existsSync(sourceMemoryPath)) {
+ core.info(`Memory directory not found in artifact: ${sourceMemoryPath}`);
+ return;
+ }
+ const workspaceDir = process.env.GITHUB_WORKSPACE || process.cwd();
+ core.info(`Working in repository: ${workspaceDir}`);
+ core.info(`Disabling sparse checkout...`);
+ try {
+ execSync("git sparse-checkout disable", { stdio: "pipe" });
+ } catch (error) {
+ core.info("Sparse checkout was not enabled or already disabled");
+ }
+ core.info(`Checking out branch: ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ try {
+ execSync(`git fetch "${repoUrl}" "${branchName}:${branchName}"`, { stdio: "pipe" });
+ execSync(`git checkout "${branchName}"`, { stdio: "inherit" });
+ core.info(`Checked out existing branch: ${branchName}`);
+ } catch (fetchError) {
+ core.info(`Branch ${branchName} does not exist, creating orphan branch...`);
+ execSync(`git checkout --orphan "${branchName}"`, { stdio: "inherit" });
+ execSync("git rm -rf . || true", { stdio: "pipe" });
+ core.info(`Created orphan branch: ${branchName}`);
+ }
+ } catch (error) {
+ core.setFailed(`Failed to checkout branch: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ const destMemoryPath = path.join(workspaceDir, "memory", memoryId);
+ fs.mkdirSync(destMemoryPath, { recursive: true });
+ core.info(`Destination directory: ${destMemoryPath}`);
+ let filesToCopy = [];
+ try {
+ const files = fs.readdirSync(sourceMemoryPath, { withFileTypes: true });
+ for (const file of files) {
+ if (!file.isFile()) {
+ continue;
+ }
+ const fileName = file.name;
+ const sourceFilePath = path.join(sourceMemoryPath, fileName);
+ const stats = fs.statSync(sourceFilePath);
+ if (fileGlobFilter) {
+ const patterns = fileGlobFilter.split(/\s+/).map(pattern => {
+ const regexPattern = pattern.replace(/\./g, "\\.").replace(/\*/g, "[^/]*");
+ return new RegExp(`^${regexPattern}$`);
+ });
+ if (!patterns.some(pattern => pattern.test(fileName))) {
+ core.error(`File does not match allowed patterns: ${fileName}`);
+ core.error(`Allowed patterns: ${fileGlobFilter}`);
+ core.setFailed("File pattern validation failed");
+ return;
+ }
+ }
+ if (stats.size > maxFileSize) {
+ core.error(`File exceeds size limit: ${fileName} (${stats.size} bytes > ${maxFileSize} bytes)`);
+ core.setFailed("File size validation failed");
+ return;
+ }
+ filesToCopy.push({ name: fileName, source: sourceFilePath, size: stats.size });
+ }
+ } catch (error) {
+ core.setFailed(`Failed to read artifact directory: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ if (filesToCopy.length > maxFileCount) {
+ core.setFailed(`Too many files (${filesToCopy.length} > ${maxFileCount})`);
+ return;
+ }
+ if (filesToCopy.length === 0) {
+ core.info("No files to copy from artifact");
+ return;
+ }
+ core.info(`Copying ${filesToCopy.length} validated file(s)...`);
+ for (const file of filesToCopy) {
+ const destFilePath = path.join(destMemoryPath, file.name);
+ try {
+ fs.copyFileSync(file.source, destFilePath);
+ core.info(`Copied: ${file.name} (${file.size} bytes)`);
+ } catch (error) {
+ core.setFailed(`Failed to copy file ${file.name}: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+ let hasChanges = false;
+ try {
+ const status = execSync("git status --porcelain", { encoding: "utf8" });
+ hasChanges = status.trim().length > 0;
+ } catch (error) {
+ core.setFailed(`Failed to check git status: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ if (!hasChanges) {
+ core.info("No changes detected after copying files");
+ return;
+ }
+ core.info("Changes detected, committing and pushing...");
+ try {
+ execSync("git add .", { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to stage changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ try {
+ execSync(`git commit -m "Update repo memory from workflow run ${githubRunId}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to commit changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.info(`Pulling latest changes from ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git pull --no-rebase -X ours "${repoUrl}" "${branchName}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.warning(`Pull failed (this may be expected): ${error instanceof Error ? error.message : String(error)}`);
+ }
+ core.info(`Pushing changes to ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git push "${repoUrl}" HEAD:"${branchName}"`, { stdio: "inherit" });
+ core.info(`Successfully pushed changes to ${branchName} branch`);
+ } catch (error) {
+ core.setFailed(`Failed to push changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+ main().catch(error => {
+ core.setFailed(`Unexpected error: ${error instanceof Error ? error.message : String(error)}`);
+ });
+
upload_assets:
needs:
- agent
diff --git a/.github/workflows/daily-firewall-report.md b/.github/workflows/daily-firewall-report.md
index cd91f86e3e..4b4cdb657d 100644
--- a/.github/workflows/daily-firewall-report.md
+++ b/.github/workflows/daily-firewall-report.md
@@ -30,7 +30,9 @@ tools:
bash:
- "*"
edit:
- cache-memory:
+ repo-memory:
+ branch-name: memory/firewall-reports
+ description: "Firewall analysis history and aggregated data"
imports:
- shared/mcp/gh-aw.md
- shared/reporting.md
@@ -150,16 +152,16 @@ Generate a comprehensive daily report of all rejected domains across all agentic
## Instructions
-### Step 0: Check Cache for Recent Analysis
+### Step 0: Check Repo Memory for Recent Analysis
**EFFICIENCY FIRST**: Before starting the full analysis:
-1. Check `/tmp/gh-aw/cache-memory/firewall-reports/` for the most recent report
+1. Check `/tmp/gh-aw/repo-memory-default/memory/default/` for the most recent report
2. If a report exists from the last 24 hours:
- Read the cached run IDs that were analyzed
- Determine if any new workflow runs have occurred since then
- If no new runs, update the existing report with current timestamp and exit early
-3. Cache the following for the next run:
+3. Store the following in repo memory for the next run:
- Last analysis timestamp
- List of run IDs analyzed
- Aggregated blocked domains data
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 015ab9dc37..607bb4d156 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -57,7 +57,11 @@
# close-older-discussions: true
#
# tools:
-# cache-memory:
+# repo-memory:
+# branch-name: memory/deep-report
+# description: "Long-term insights, patterns, and trend data"
+# file-glob: ["*.md"]
+# max-file-size: 1048576 # 1MB
# github:
# toolsets:
# - all
@@ -87,15 +91,19 @@
# conclusion["conclusion"]
# create_discussion["create_discussion"]
# detection["detection"]
+# push_repo_memory["push_repo_memory"]
# upload_assets["upload_assets"]
# activation --> agent
# agent --> conclusion
# activation --> conclusion
# create_discussion --> conclusion
# upload_assets --> conclusion
+# push_repo_memory --> conclusion
# agent --> create_discussion
# detection --> create_discussion
# agent --> detection
+# agent --> push_repo_memory
+# detection --> push_repo_memory
# agent --> upload_assets
# detection --> upload_assets
# ```
@@ -447,16 +455,16 @@
#
# ## Intelligence Collection Process
#
-# ### Step 0: Check Cache Memory
+# ### Step 0: Check Repo Memory
#
# **EFFICIENCY FIRST**: Before starting full analysis:
#
-# 1. Check `/tmp/gh-aw/cache-memory/deep-report/` for previous insights
-# 2. Load any existing:
-# - `last_analysis_timestamp.txt` - When the last full analysis was run
-# - `known_patterns.json` - Previously identified patterns
-# - `trend_data.json` - Historical trend data
-# - `flagged_items.json` - Items flagged for continued monitoring
+# 1. Check `/tmp/gh-aw/repo-memory-default/memory/default/` for previous insights
+# 2. Load any existing markdown files (only markdown files are allowed in repo-memory):
+# - `last_analysis_timestamp.md` - When the last full analysis was run
+# - `known_patterns.md` - Previously identified patterns
+# - `trend_data.md` - Historical trend data
+# - `flagged_items.md` - Items flagged for continued monitoring
#
# 3. If the last analysis was less than 20 hours ago, focus only on new data since then
#
@@ -497,13 +505,15 @@
# 3. Find patterns that span multiple report types
# 4. Track how identified patterns evolve over time
#
-# ### Step 4: Store Insights in Cache
+# ### Step 4: Store Insights in Repo Memory
#
-# Save your findings to `/tmp/gh-aw/cache-memory/deep-report/`:
-# - Update `known_patterns.json` with any new patterns discovered
-# - Update `trend_data.json` with current metrics
-# - Update `flagged_items.json` with items needing attention
-# - Save `last_analysis_timestamp.txt` with current timestamp
+# Save your findings to `/tmp/gh-aw/repo-memory-default/memory/default/` as markdown files:
+# - Update `known_patterns.md` with any new patterns discovered
+# - Update `trend_data.md` with current metrics
+# - Update `flagged_items.md` with items needing attention
+# - Save `last_analysis_timestamp.md` with current timestamp
+#
+# **Note:** Only markdown (.md) files are allowed in the repo-memory folder. Use markdown tables, lists, and formatting to structure your data.
#
# ## Report Structure
#
@@ -560,7 +570,7 @@
# - Discussion references with links
# - Workflow run references with links
# - Time range of data analyzed
-# - Cache data used from previous analyses
+# - Repo-memory data used from previous analyses (stored in memory/deep-report branch)
#
# ## Output Guidelines
#
@@ -789,6 +799,35 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ # Repo memory git-based storage configuration from frontmatter processed below
+ - name: Clone repo-memory branch (default)
+ env:
+ GH_TOKEN: ${{ github.token }}
+ BRANCH_NAME: memory/deep-report
+ run: |
+ set +e # Don't fail if branch doesn't exist
+ git clone --depth 1 --single-branch --branch "memory/deep-report" "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git" "/tmp/gh-aw/repo-memory-default" 2>/dev/null
+ CLONE_EXIT_CODE=$?
+ set -e
+
+ if [ $CLONE_EXIT_CODE -ne 0 ]; then
+ echo "Branch memory/deep-report does not exist, creating orphan branch"
+ mkdir -p "/tmp/gh-aw/repo-memory-default"
+ cd "/tmp/gh-aw/repo-memory-default"
+ git init
+ git checkout --orphan "$BRANCH_NAME"
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ git remote add origin "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git"
+ else
+ echo "Successfully cloned memory/deep-report branch"
+ cd "/tmp/gh-aw/repo-memory-default"
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ fi
+
+ mkdir -p "/tmp/gh-aw/repo-memory-default/memory/default"
+ echo "Repo memory directory ready at /tmp/gh-aw/repo-memory-default/memory/default"
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -2593,16 +2632,16 @@ jobs:
## Intelligence Collection Process
- ### Step 0: Check Cache Memory
+ ### Step 0: Check Repo Memory
**EFFICIENCY FIRST**: Before starting full analysis:
- 1. Check `/tmp/gh-aw/cache-memory/deep-report/` for previous insights
- 2. Load any existing:
- - `last_analysis_timestamp.txt` - When the last full analysis was run
- - `known_patterns.json` - Previously identified patterns
- - `trend_data.json` - Historical trend data
- - `flagged_items.json` - Items flagged for continued monitoring
+ 1. Check `/tmp/gh-aw/repo-memory-default/memory/default/` for previous insights
+ 2. Load any existing markdown files (only markdown files are allowed in repo-memory):
+ - `last_analysis_timestamp.md` - When the last full analysis was run
+ - `known_patterns.md` - Previously identified patterns
+ - `trend_data.md` - Historical trend data
+ - `flagged_items.md` - Items flagged for continued monitoring
3. If the last analysis was less than 20 hours ago, focus only on new data since then
@@ -2643,13 +2682,15 @@ jobs:
3. Find patterns that span multiple report types
4. Track how identified patterns evolve over time
- ### Step 4: Store Insights in Cache
+ ### Step 4: Store Insights in Repo Memory
- Save your findings to `/tmp/gh-aw/cache-memory/deep-report/`:
- - Update `known_patterns.json` with any new patterns discovered
- - Update `trend_data.json` with current metrics
- - Update `flagged_items.json` with items needing attention
- - Save `last_analysis_timestamp.txt` with current timestamp
+ Save your findings to `/tmp/gh-aw/repo-memory-default/memory/default/` as markdown files:
+ - Update `known_patterns.md` with any new patterns discovered
+ - Update `trend_data.md` with current metrics
+ - Update `flagged_items.md` with items needing attention
+ - Save `last_analysis_timestamp.md` with current timestamp
+
+ **Note:** Only markdown (.md) files are allowed in the repo-memory folder. Use markdown tables, lists, and formatting to structure your data.
## Report Structure
@@ -2706,7 +2747,7 @@ jobs:
- Discussion references with links
- Workflow run references with links
- Time range of data analyzed
- - Cache data used from previous analyses
+ - Repo-memory data used from previous analyses (stored in memory/deep-report branch)
## Output Guidelines
@@ -2799,6 +2840,36 @@ jobs:
- `/tmp/gh-aw/cache-memory/history.log` - activity history and logs
- `/tmp/gh-aw/cache-memory/state/` - organized state files in subdirectories
+ Feel free to create, read, update, and organize files in this folder as needed for your tasks.
+ PROMPT_EOF
+ - name: Append repo memory instructions to prompt
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ run: |
+ cat << 'PROMPT_EOF' | envsubst >> "$GH_AW_PROMPT"
+
+ ---
+
+ ## Repo Memory Available
+
+ You have access to a persistent repo memory folder at `/tmp/gh-aw/repo-memory-default/memory/default/` where you can read and write files that are stored in a git branch. Long-term insights, patterns, and trend data
+
+ - **Read/Write Access**: You can freely read from and write to any files in this folder
+ - **Git Branch Storage**: Files are stored in the `memory/deep-report` branch of the current repository
+ - **Automatic Push**: Changes are automatically committed and pushed after the workflow completes
+ - **Merge Strategy**: In case of conflicts, your changes (current version) win
+ - **Persistence**: Files persist across workflow runs via git branch storage
+
+ **Constraints:**
+ - **Allowed Files**: Only files matching patterns: *.md
+ - **Max File Size**: 1048576 bytes (1.00 MB) per file
+ - **Max File Count**: 100 files per commit
+
+ Examples of what you can store:
+ - `/tmp/gh-aw/repo-memory-default/memory/default/notes.md` - general notes and observations
+ - `/tmp/gh-aw/repo-memory-default/memory/default/state.json` - structured state data
+ - `/tmp/gh-aw/repo-memory-default/memory/default/history/` - organized history files in subdirectories
+
Feel free to create, read, update, and organize files in this folder as needed for your tasks.
PROMPT_EOF
- name: Append safe outputs instructions to prompt
@@ -5130,6 +5201,15 @@ jobs:
name: agent-stdio.log
path: /tmp/gh-aw/agent-stdio.log
if-no-files-found: warn
+ # Upload repo memory as artifacts for push job
+ - name: Upload repo-memory artifact (default)
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory-default
+ retention-days: 1
+ if-no-files-found: ignore
- name: Upload safe outputs assets
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
@@ -5379,6 +5459,7 @@ jobs:
- activation
- create_discussion
- upload_assets
+ - push_repo_memory
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6780,6 +6861,200 @@ jobs:
path: /tmp/gh-aw/threat-detection/detection.log
if-no-files-found: ignore
+ push_repo_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ with:
+ persist-credentials: false
+ sparse-checkout: .
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download repo-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
+ continue-on-error: true
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory-default
+ - name: Push repo-memory changes (default)
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_RUN_ID: ${{ github.run_id }}
+ ARTIFACT_DIR: /tmp/gh-aw/repo-memory-default
+ MEMORY_ID: default
+ TARGET_REPO: ${{ github.repository }}
+ BRANCH_NAME: memory/deep-report
+ MAX_FILE_SIZE: 1048576
+ MAX_FILE_COUNT: 100
+ FILE_GLOB_FILTER: "*.md"
+ with:
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const { execSync } = require("child_process");
+ async function main() {
+ const artifactDir = process.env.ARTIFACT_DIR;
+ const memoryId = process.env.MEMORY_ID;
+ const targetRepo = process.env.TARGET_REPO;
+ const branchName = process.env.BRANCH_NAME;
+ const maxFileSize = parseInt(process.env.MAX_FILE_SIZE || "10240", 10);
+ const maxFileCount = parseInt(process.env.MAX_FILE_COUNT || "100", 10);
+ const fileGlobFilter = process.env.FILE_GLOB_FILTER || "";
+ const ghToken = process.env.GH_TOKEN;
+ const githubRunId = process.env.GITHUB_RUN_ID || "unknown";
+ if (!artifactDir || !memoryId || !targetRepo || !branchName || !ghToken) {
+ core.setFailed("Missing required environment variables: ARTIFACT_DIR, MEMORY_ID, TARGET_REPO, BRANCH_NAME, GH_TOKEN");
+ return;
+ }
+ const sourceMemoryPath = path.join(artifactDir, "memory", memoryId);
+ if (!fs.existsSync(sourceMemoryPath)) {
+ core.info(`Memory directory not found in artifact: ${sourceMemoryPath}`);
+ return;
+ }
+ const workspaceDir = process.env.GITHUB_WORKSPACE || process.cwd();
+ core.info(`Working in repository: ${workspaceDir}`);
+ core.info(`Disabling sparse checkout...`);
+ try {
+ execSync("git sparse-checkout disable", { stdio: "pipe" });
+ } catch (error) {
+ core.info("Sparse checkout was not enabled or already disabled");
+ }
+ core.info(`Checking out branch: ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ try {
+ execSync(`git fetch "${repoUrl}" "${branchName}:${branchName}"`, { stdio: "pipe" });
+ execSync(`git checkout "${branchName}"`, { stdio: "inherit" });
+ core.info(`Checked out existing branch: ${branchName}`);
+ } catch (fetchError) {
+ core.info(`Branch ${branchName} does not exist, creating orphan branch...`);
+ execSync(`git checkout --orphan "${branchName}"`, { stdio: "inherit" });
+ execSync("git rm -rf . || true", { stdio: "pipe" });
+ core.info(`Created orphan branch: ${branchName}`);
+ }
+ } catch (error) {
+ core.setFailed(`Failed to checkout branch: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ const destMemoryPath = path.join(workspaceDir, "memory", memoryId);
+ fs.mkdirSync(destMemoryPath, { recursive: true });
+ core.info(`Destination directory: ${destMemoryPath}`);
+ let filesToCopy = [];
+ try {
+ const files = fs.readdirSync(sourceMemoryPath, { withFileTypes: true });
+ for (const file of files) {
+ if (!file.isFile()) {
+ continue;
+ }
+ const fileName = file.name;
+ const sourceFilePath = path.join(sourceMemoryPath, fileName);
+ const stats = fs.statSync(sourceFilePath);
+ if (fileGlobFilter) {
+ const patterns = fileGlobFilter.split(/\s+/).map(pattern => {
+ const regexPattern = pattern.replace(/\./g, "\\.").replace(/\*/g, "[^/]*");
+ return new RegExp(`^${regexPattern}$`);
+ });
+ if (!patterns.some(pattern => pattern.test(fileName))) {
+ core.error(`File does not match allowed patterns: ${fileName}`);
+ core.error(`Allowed patterns: ${fileGlobFilter}`);
+ core.setFailed("File pattern validation failed");
+ return;
+ }
+ }
+ if (stats.size > maxFileSize) {
+ core.error(`File exceeds size limit: ${fileName} (${stats.size} bytes > ${maxFileSize} bytes)`);
+ core.setFailed("File size validation failed");
+ return;
+ }
+ filesToCopy.push({ name: fileName, source: sourceFilePath, size: stats.size });
+ }
+ } catch (error) {
+ core.setFailed(`Failed to read artifact directory: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ if (filesToCopy.length > maxFileCount) {
+ core.setFailed(`Too many files (${filesToCopy.length} > ${maxFileCount})`);
+ return;
+ }
+ if (filesToCopy.length === 0) {
+ core.info("No files to copy from artifact");
+ return;
+ }
+ core.info(`Copying ${filesToCopy.length} validated file(s)...`);
+ for (const file of filesToCopy) {
+ const destFilePath = path.join(destMemoryPath, file.name);
+ try {
+ fs.copyFileSync(file.source, destFilePath);
+ core.info(`Copied: ${file.name} (${file.size} bytes)`);
+ } catch (error) {
+ core.setFailed(`Failed to copy file ${file.name}: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+ let hasChanges = false;
+ try {
+ const status = execSync("git status --porcelain", { encoding: "utf8" });
+ hasChanges = status.trim().length > 0;
+ } catch (error) {
+ core.setFailed(`Failed to check git status: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ if (!hasChanges) {
+ core.info("No changes detected after copying files");
+ return;
+ }
+ core.info("Changes detected, committing and pushing...");
+ try {
+ execSync("git add .", { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to stage changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ try {
+ execSync(`git commit -m "Update repo memory from workflow run ${githubRunId}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to commit changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.info(`Pulling latest changes from ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git pull --no-rebase -X ours "${repoUrl}" "${branchName}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.warning(`Pull failed (this may be expected): ${error instanceof Error ? error.message : String(error)}`);
+ }
+ core.info(`Pushing changes to ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git push "${repoUrl}" HEAD:"${branchName}"`, { stdio: "inherit" });
+ core.info(`Successfully pushed changes to ${branchName} branch`);
+ } catch (error) {
+ core.setFailed(`Failed to push changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+ main().catch(error => {
+ core.setFailed(`Unexpected error: ${error instanceof Error ? error.message : String(error)}`);
+ });
+
upload_assets:
needs:
- agent
diff --git a/.github/workflows/deep-report.md b/.github/workflows/deep-report.md
index a709cfe5e2..c9d74eaa65 100644
--- a/.github/workflows/deep-report.md
+++ b/.github/workflows/deep-report.md
@@ -34,7 +34,11 @@ safe-outputs:
close-older-discussions: true
tools:
- cache-memory:
+ repo-memory:
+ branch-name: memory/deep-report
+ description: "Long-term insights, patterns, and trend data"
+ file-glob: ["*.md"]
+ max-file-size: 1048576 # 1MB
github:
toolsets:
- all
@@ -130,16 +134,16 @@ jq '[.[].author.login] | unique' /tmp/gh-aw/weekly-issues-data/issues.json
## Intelligence Collection Process
-### Step 0: Check Cache Memory
+### Step 0: Check Repo Memory
**EFFICIENCY FIRST**: Before starting full analysis:
-1. Check `/tmp/gh-aw/cache-memory/deep-report/` for previous insights
-2. Load any existing:
- - `last_analysis_timestamp.txt` - When the last full analysis was run
- - `known_patterns.json` - Previously identified patterns
- - `trend_data.json` - Historical trend data
- - `flagged_items.json` - Items flagged for continued monitoring
+1. Check `/tmp/gh-aw/repo-memory-default/memory/default/` for previous insights
+2. Load any existing markdown files (only markdown files are allowed in repo-memory):
+ - `last_analysis_timestamp.md` - When the last full analysis was run
+ - `known_patterns.md` - Previously identified patterns
+ - `trend_data.md` - Historical trend data
+ - `flagged_items.md` - Items flagged for continued monitoring
3. If the last analysis was less than 20 hours ago, focus only on new data since then
@@ -180,13 +184,15 @@ Connect the dots between different data sources:
3. Find patterns that span multiple report types
4. Track how identified patterns evolve over time
-### Step 4: Store Insights in Cache
+### Step 4: Store Insights in Repo Memory
-Save your findings to `/tmp/gh-aw/cache-memory/deep-report/`:
-- Update `known_patterns.json` with any new patterns discovered
-- Update `trend_data.json` with current metrics
-- Update `flagged_items.json` with items needing attention
-- Save `last_analysis_timestamp.txt` with current timestamp
+Save your findings to `/tmp/gh-aw/repo-memory-default/memory/default/` as markdown files:
+- Update `known_patterns.md` with any new patterns discovered
+- Update `trend_data.md` with current metrics
+- Update `flagged_items.md` with items needing attention
+- Save `last_analysis_timestamp.md` with current timestamp
+
+**Note:** Only markdown (.md) files are allowed in the repo-memory folder. Use markdown tables, lists, and formatting to structure your data.
## Report Structure
@@ -243,7 +249,7 @@ List all reports and data sources analyzed:
- Discussion references with links
- Workflow run references with links
- Time range of data analyzed
-- Cache data used from previous analyses
+- Repo-memory data used from previous analyses (stored in memory/deep-report branch)
## Output Guidelines
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index 522216081a..4765da5887 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -18,14 +18,14 @@
# gh aw compile
# For more information: https://github.com/githubnext/gh-aw/blob/main/.github/aw/github-agentic-workflows.md
#
-# List the last 3 issues using gh CLI
+# Create a poem about GitHub and save it to repo-memory
#
# Original Frontmatter:
# ```yaml
# on:
# workflow_dispatch:
# name: Dev
-# description: List the last 3 issues using gh CLI
+# description: Create a poem about GitHub and save it to repo-memory
# timeout-minutes: 5
# strict: false
# engine: claude
@@ -33,6 +33,9 @@
# contents: read
# issues: read
# tools:
+# repo-memory:
+# branch-name: memory/poems
+# description: "Poem collection"
# github: false
# imports:
# - shared/gh.md
@@ -47,30 +50,47 @@
# graph LR
# activation["activation"]
# agent["agent"]
+# push_repo_memory["push_repo_memory"]
# activation --> agent
+# agent --> push_repo_memory
# ```
#
# Original Prompt:
# ```markdown
-# # List Last 3 Issues
+# # Create a Poem and Save to Repo Memory
#
-# List the last 3 issues in this repository using the gh CLI tool.
+# Create a creative poem about GitHub and agentic workflows, then save it to the repo-memory.
#
# ## Task
#
-# 1. **Use gh CLI**: Use the `gh` tool to list the last 3 issues in this repository.
+# 1. **Create a Poem**: Write a creative, fun poem about GitHub, automation, and agentic workflows.
+# - The poem should be 8-12 lines
+# - Include references to GitHub features like Issues, Pull Requests, Actions, etc.
+# - Make it engaging and technical but fun
#
-# Example invocation:
-# ```
-# gh with args: "issue list --limit 3 --repo ${{ github.repository }}"
-# ```
+# 2. **Save to Repo Memory**: Save the poem to `/tmp/gh-aw/repo-memory-default/memory/default/poem_{{ github.run_number }}.md`
+# - Use the run number in the filename to make it unique
+# - Include a header with the date and run information
+# - The file will be automatically committed and pushed to the `memory/poems` branch
#
-# 2. **Display results**: Show the output from the gh CLI command.
+# 3. **List Previous Poems**: If there are other poem files in the repo memory, list them to show the history.
+#
+# ## Example Poem Structure
+#
+# ```markdown
+# # Poem #{{ github.run_number }}
+# Date: {{ current date }}
+# Run ID: ${{ github.run_id }}
+#
+# [Your poem here]
+# ```
# ```
#
# Pinned GitHub Actions:
# - actions/checkout@v5 (93cb6efe18208431cddfb8368fd83d5badbf9bfd)
# https://github.com/actions/checkout/commit/93cb6efe18208431cddfb8368fd83d5badbf9bfd
+# - actions/download-artifact@v6 (018cc2cf5baa6db3ef3c5f8a56943fffe632ef53)
+# https://github.com/actions/download-artifact/commit/018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
# - actions/github-script@v8 (ed597411d8f924073f98dfc5c65a23a2325f34cd)
# https://github.com/actions/github-script/commit/ed597411d8f924073f98dfc5c65a23a2325f34cd
# - actions/setup-node@v6 (395ad3262231945c25e8478fd5baf05154b1d79f)
@@ -205,6 +225,35 @@ jobs:
run: |
mkdir -p /tmp/gh-aw/agent
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
+ # Repo memory git-based storage configuration from frontmatter processed below
+ - name: Clone repo-memory branch (default)
+ env:
+ GH_TOKEN: ${{ github.token }}
+ BRANCH_NAME: memory/poems
+ run: |
+ set +e # Don't fail if branch doesn't exist
+ git clone --depth 1 --single-branch --branch "memory/poems" "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git" "/tmp/gh-aw/repo-memory-default" 2>/dev/null
+ CLONE_EXIT_CODE=$?
+ set -e
+
+ if [ $CLONE_EXIT_CODE -ne 0 ]; then
+ echo "Branch memory/poems does not exist, creating orphan branch"
+ mkdir -p "/tmp/gh-aw/repo-memory-default"
+ cd "/tmp/gh-aw/repo-memory-default"
+ git init
+ git checkout --orphan "$BRANCH_NAME"
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ git remote add origin "https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git"
+ else
+ echo "Successfully cloned memory/poems branch"
+ cd "/tmp/gh-aw/repo-memory-default"
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ fi
+
+ mkdir -p "/tmp/gh-aw/repo-memory-default/memory/default"
+ echo "Repo memory directory ready at /tmp/gh-aw/repo-memory-default/memory/default"
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -1195,27 +1244,40 @@ jobs:
- name: Create prompt
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
+ GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
run: |
PROMPT_DIR="$(dirname "$GH_AW_PROMPT")"
mkdir -p "$PROMPT_DIR"
cat << 'PROMPT_EOF' | envsubst > "$GH_AW_PROMPT"
- # List Last 3 Issues
+ # Create a Poem and Save to Repo Memory
- List the last 3 issues in this repository using the gh CLI tool.
+ Create a creative poem about GitHub and agentic workflows, then save it to the repo-memory.
## Task
- 1. **Use gh CLI**: Use the `gh` tool to list the last 3 issues in this repository.
-
- Example invocation:
- ```
- gh with args: "issue list --limit 3 --repo ${GH_AW_GITHUB_REPOSITORY}"
- ```
+ 1. **Create a Poem**: Write a creative, fun poem about GitHub, automation, and agentic workflows.
+ - The poem should be 8-12 lines
+ - Include references to GitHub features like Issues, Pull Requests, Actions, etc.
+ - Make it engaging and technical but fun
+
+ 2. **Save to Repo Memory**: Save the poem to `/tmp/gh-aw/repo-memory-default/memory/default/poem_{{ github.run_number }}.md`
+ - Use the run number in the filename to make it unique
+ - Include a header with the date and run information
+ - The file will be automatically committed and pushed to the `memory/poems` branch
+
+ 3. **List Previous Poems**: If there are other poem files in the repo memory, list them to show the history.
+
+ ## Example Poem Structure
+
+ ```markdown
+ # Poem #{{ github.run_number }}
+ Date: {{ current date }}
+ Run ID: ${GH_AW_GITHUB_RUN_ID}
- 2. **Display results**: Show the output from the gh CLI command.
+ [Your poem here]
+ ```
PROMPT_EOF
- name: Append XPIA security instructions to prompt
@@ -1250,12 +1312,41 @@ jobs:
When you need to create temporary files or directories during your work, always use the /tmp/gh-aw/agent/ directory that has been pre-created for you. Do NOT use the root /tmp/ directory directly.
+ PROMPT_EOF
+ - name: Append repo memory instructions to prompt
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ run: |
+ cat << 'PROMPT_EOF' | envsubst >> "$GH_AW_PROMPT"
+
+ ---
+
+ ## Repo Memory Available
+
+ You have access to a persistent repo memory folder at `/tmp/gh-aw/repo-memory-default/memory/default/` where you can read and write files that are stored in a git branch. Poem collection
+
+ - **Read/Write Access**: You can freely read from and write to any files in this folder
+ - **Git Branch Storage**: Files are stored in the `memory/poems` branch of the current repository
+ - **Automatic Push**: Changes are automatically committed and pushed after the workflow completes
+ - **Merge Strategy**: In case of conflicts, your changes (current version) win
+ - **Persistence**: Files persist across workflow runs via git branch storage
+
+ **Constraints:**
+ - **Max File Size**: 10240 bytes (0.01 MB) per file
+ - **Max File Count**: 100 files per commit
+
+ Examples of what you can store:
+ - `/tmp/gh-aw/repo-memory-default/memory/default/notes.md` - general notes and observations
+ - `/tmp/gh-aw/repo-memory-default/memory/default/state.json` - structured state data
+ - `/tmp/gh-aw/repo-memory-default/memory/default/history/` - organized history files in subdirectories
+
+ Feel free to create, read, update, and organize files in this folder as needed for your tasks.
PROMPT_EOF
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
+ GH_AW_GITHUB_RUN_ID: ${{ github.run_id }}
with:
script: |
const fs = require("fs");
@@ -2377,6 +2468,15 @@ jobs:
name: agent-stdio.log
path: /tmp/gh-aw/agent-stdio.log
if-no-files-found: warn
+ # Upload repo memory as artifacts for push job
+ - name: Upload repo-memory artifact (default)
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory-default
+ retention-days: 1
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
@@ -2613,3 +2713,194 @@ jobs:
main();
}
+ push_repo_memory:
+ needs: agent
+ if: always()
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ with:
+ persist-credentials: false
+ sparse-checkout: .
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download repo-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
+ continue-on-error: true
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory-default
+ - name: Push repo-memory changes (default)
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_RUN_ID: ${{ github.run_id }}
+ ARTIFACT_DIR: /tmp/gh-aw/repo-memory-default
+ MEMORY_ID: default
+ TARGET_REPO: ${{ github.repository }}
+ BRANCH_NAME: memory/poems
+ MAX_FILE_SIZE: 10240
+ MAX_FILE_COUNT: 100
+ with:
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const { execSync } = require("child_process");
+ async function main() {
+ const artifactDir = process.env.ARTIFACT_DIR;
+ const memoryId = process.env.MEMORY_ID;
+ const targetRepo = process.env.TARGET_REPO;
+ const branchName = process.env.BRANCH_NAME;
+ const maxFileSize = parseInt(process.env.MAX_FILE_SIZE || "10240", 10);
+ const maxFileCount = parseInt(process.env.MAX_FILE_COUNT || "100", 10);
+ const fileGlobFilter = process.env.FILE_GLOB_FILTER || "";
+ const ghToken = process.env.GH_TOKEN;
+ const githubRunId = process.env.GITHUB_RUN_ID || "unknown";
+ if (!artifactDir || !memoryId || !targetRepo || !branchName || !ghToken) {
+ core.setFailed("Missing required environment variables: ARTIFACT_DIR, MEMORY_ID, TARGET_REPO, BRANCH_NAME, GH_TOKEN");
+ return;
+ }
+ const sourceMemoryPath = path.join(artifactDir, "memory", memoryId);
+ if (!fs.existsSync(sourceMemoryPath)) {
+ core.info(`Memory directory not found in artifact: ${sourceMemoryPath}`);
+ return;
+ }
+ const workspaceDir = process.env.GITHUB_WORKSPACE || process.cwd();
+ core.info(`Working in repository: ${workspaceDir}`);
+ core.info(`Disabling sparse checkout...`);
+ try {
+ execSync("git sparse-checkout disable", { stdio: "pipe" });
+ } catch (error) {
+ core.info("Sparse checkout was not enabled or already disabled");
+ }
+ core.info(`Checking out branch: ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ try {
+ execSync(`git fetch "${repoUrl}" "${branchName}:${branchName}"`, { stdio: "pipe" });
+ execSync(`git checkout "${branchName}"`, { stdio: "inherit" });
+ core.info(`Checked out existing branch: ${branchName}`);
+ } catch (fetchError) {
+ core.info(`Branch ${branchName} does not exist, creating orphan branch...`);
+ execSync(`git checkout --orphan "${branchName}"`, { stdio: "inherit" });
+ execSync("git rm -rf . || true", { stdio: "pipe" });
+ core.info(`Created orphan branch: ${branchName}`);
+ }
+ } catch (error) {
+ core.setFailed(`Failed to checkout branch: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ const destMemoryPath = path.join(workspaceDir, "memory", memoryId);
+ fs.mkdirSync(destMemoryPath, { recursive: true });
+ core.info(`Destination directory: ${destMemoryPath}`);
+ let filesToCopy = [];
+ try {
+ const files = fs.readdirSync(sourceMemoryPath, { withFileTypes: true });
+ for (const file of files) {
+ if (!file.isFile()) {
+ continue;
+ }
+ const fileName = file.name;
+ const sourceFilePath = path.join(sourceMemoryPath, fileName);
+ const stats = fs.statSync(sourceFilePath);
+ if (fileGlobFilter) {
+ const patterns = fileGlobFilter.split(/\s+/).map(pattern => {
+ const regexPattern = pattern.replace(/\./g, "\\.").replace(/\*/g, "[^/]*");
+ return new RegExp(`^${regexPattern}$`);
+ });
+ if (!patterns.some(pattern => pattern.test(fileName))) {
+ core.error(`File does not match allowed patterns: ${fileName}`);
+ core.error(`Allowed patterns: ${fileGlobFilter}`);
+ core.setFailed("File pattern validation failed");
+ return;
+ }
+ }
+ if (stats.size > maxFileSize) {
+ core.error(`File exceeds size limit: ${fileName} (${stats.size} bytes > ${maxFileSize} bytes)`);
+ core.setFailed("File size validation failed");
+ return;
+ }
+ filesToCopy.push({ name: fileName, source: sourceFilePath, size: stats.size });
+ }
+ } catch (error) {
+ core.setFailed(`Failed to read artifact directory: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ if (filesToCopy.length > maxFileCount) {
+ core.setFailed(`Too many files (${filesToCopy.length} > ${maxFileCount})`);
+ return;
+ }
+ if (filesToCopy.length === 0) {
+ core.info("No files to copy from artifact");
+ return;
+ }
+ core.info(`Copying ${filesToCopy.length} validated file(s)...`);
+ for (const file of filesToCopy) {
+ const destFilePath = path.join(destMemoryPath, file.name);
+ try {
+ fs.copyFileSync(file.source, destFilePath);
+ core.info(`Copied: ${file.name} (${file.size} bytes)`);
+ } catch (error) {
+ core.setFailed(`Failed to copy file ${file.name}: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+ let hasChanges = false;
+ try {
+ const status = execSync("git status --porcelain", { encoding: "utf8" });
+ hasChanges = status.trim().length > 0;
+ } catch (error) {
+ core.setFailed(`Failed to check git status: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ if (!hasChanges) {
+ core.info("No changes detected after copying files");
+ return;
+ }
+ core.info("Changes detected, committing and pushing...");
+ try {
+ execSync("git add .", { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to stage changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ try {
+ execSync(`git commit -m "Update repo memory from workflow run ${githubRunId}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to commit changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.info(`Pulling latest changes from ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git pull --no-rebase -X ours "${repoUrl}" "${branchName}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.warning(`Pull failed (this may be expected): ${error instanceof Error ? error.message : String(error)}`);
+ }
+ core.info(`Pushing changes to ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git push "${repoUrl}" HEAD:"${branchName}"`, { stdio: "inherit" });
+ core.info(`Successfully pushed changes to ${branchName} branch`);
+ } catch (error) {
+ core.setFailed(`Failed to push changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+ main().catch(error => {
+ core.setFailed(`Unexpected error: ${error instanceof Error ? error.message : String(error)}`);
+ });
+
diff --git a/.github/workflows/dev.md b/.github/workflows/dev.md
index f41801d452..8bb1d670d8 100644
--- a/.github/workflows/dev.md
+++ b/.github/workflows/dev.md
@@ -2,7 +2,7 @@
on:
workflow_dispatch:
name: Dev
-description: List the last 3 issues using gh CLI
+description: Create a poem about GitHub and save it to repo-memory
timeout-minutes: 5
strict: false
engine: claude
@@ -10,21 +10,37 @@ permissions:
contents: read
issues: read
tools:
+ repo-memory:
+ branch-name: memory/poems
+ description: "Poem collection"
github: false
imports:
- shared/gh.md
---
-# List Last 3 Issues
+# Create a Poem and Save to Repo Memory
-List the last 3 issues in this repository using the gh CLI tool.
+Create a creative poem about GitHub and agentic workflows, then save it to the repo-memory.
## Task
-1. **Use gh CLI**: Use the `gh` tool to list the last 3 issues in this repository.
-
- Example invocation:
- ```
- gh with args: "issue list --limit 3 --repo ${{ github.repository }}"
- ```
+1. **Create a Poem**: Write a creative, fun poem about GitHub, automation, and agentic workflows.
+ - The poem should be 8-12 lines
+ - Include references to GitHub features like Issues, Pull Requests, Actions, etc.
+ - Make it engaging and technical but fun
-2. **Display results**: Show the output from the gh CLI command.
\ No newline at end of file
+2. **Save to Repo Memory**: Save the poem to `/tmp/gh-aw/repo-memory-default/memory/default/poem_{{ github.run_number }}.md`
+ - Use the run number in the filename to make it unique
+ - Include a header with the date and run information
+ - The file will be automatically committed and pushed to the `memory/poems` branch
+
+3. **List Previous Poems**: If there are other poem files in the repo memory, list them to show the history.
+
+## Example Poem Structure
+
+```markdown
+# Poem #{{ github.run_number }}
+Date: {{ current date }}
+Run ID: ${{ github.run_id }}
+
+[Your poem here]
+```
\ No newline at end of file
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index c9e8731682..cd0c7c7e89 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -1469,6 +1469,57 @@ tools:
# (optional)
version: null
+ # Repo memory configuration for git-based persistent storage
+ # (optional)
+ # This field supports multiple formats (oneOf):
+
+ # Option 1: Enable repo-memory with default settings
+ repo-memory: true
+
+ # Option 2: Enable repo-memory with default settings (same as true)
+ repo-memory: null
+
+ # Option 3: Repo-memory configuration object
+ repo-memory:
+ # Target repository for memory storage (default: current repository). Format:
+ # owner/repo
+ # (optional)
+ target-repo: "example-value"
+
+ # Git branch name for memory storage (default: memory/default)
+ # (optional)
+ branch-name: "example-value"
+
+ # (optional)
+ # This field supports multiple formats (oneOf):
+
+ # Option 1: Single file glob pattern for allowed files
+ file-glob: "example-value"
+
+ # Option 2: Array of file glob patterns for allowed files
+ file-glob: []
+ # Array items: string
+
+ # Maximum size per file in bytes (default: 1048576 = 1MB)
+ # (optional)
+ max-file-size: 1
+
+ # Maximum file count per commit (default: 100)
+ # (optional)
+ max-file-count: 1
+
+ # Optional description for the memory that will be shown in the agent prompt
+ # (optional)
+ description: "Description of the workflow"
+
+ # Create orphaned branch if it doesn't exist (default: true)
+ # (optional)
+ create-orphan: true
+
+ # Option 4: Array of repo-memory configurations for multiple memory locations
+ repo-memory: []
+ # Array items: object
+
# Command name for the workflow
# (optional)
command: "example-value"
diff --git a/pkg/cli/workflows/test-copilot-repo-memory.md b/pkg/cli/workflows/test-copilot-repo-memory.md
new file mode 100644
index 0000000000..e1505e60b2
--- /dev/null
+++ b/pkg/cli/workflows/test-copilot-repo-memory.md
@@ -0,0 +1,62 @@
+---
+engine: copilot
+on:
+ workflow_dispatch:
+ inputs:
+ task:
+ description: 'Task to remember'
+ required: true
+ default: 'Store this information for later'
+
+tools:
+ repo-memory:
+ branch-name: memory/test-agent
+ description: "Test repo-memory persistence"
+ max-file-size: 524288 # 512KB
+ max-file-count: 10
+ github:
+ allowed: [get_repository]
+
+timeout-minutes: 5
+---
+
+# Test Copilot with Repo Memory Git-Based Storage
+
+You are a test agent that demonstrates the repo-memory functionality with Copilot engine using git-based persistent storage.
+
+## Task
+
+Your job is to:
+
+1. **Store a test task** in the repo-memory folder using file operations
+2. **Retrieve any previous tasks** that you've stored in previous runs
+3. **Report on the memory contents** including both current and historical tasks
+4. **Use GitHub tools** to get basic repository information
+
+## Instructions
+
+1. First, check what files exist in `/tmp/gh-aw/repo-memory-default/memory/default/` from previous runs
+2. Store a new test task: "Test task for run ${{ github.run_number }}" in a file in the memory folder
+3. List all files and contents you now have in the memory folder
+4. Get basic information about this repository using the GitHub tool
+5. Provide a summary of:
+ - What you found from before (if anything)
+ - What you just stored
+ - Basic repository information
+
+## Expected Behavior
+
+- **First run**: Should show empty memory folder (or new orphan branch created), then store the new task
+- **Subsequent runs**: Should show previously stored files from git branch, then add the new one
+- **File persistence**: Files persist across workflow runs via git branch storage
+- **Version control**: All changes are committed to the `memory/test-agent` branch
+- **Automatic push**: Changes are automatically committed and pushed after workflow completion
+- **Conflict resolution**: Current version wins in case of merge conflicts
+
+This workflow tests that the repo-memory configuration properly:
+- Clones the git branch at workflow start (creates orphan branch if needed)
+- Provides simple file access at `/tmp/gh-aw/repo-memory-default/memory/default/`
+- Persists data between runs using git branch storage
+- Commits and pushes changes automatically at workflow end
+- Works with Copilot engine and file operations
+- Integrates with other tools like GitHub
diff --git a/pkg/parser/schemas/included_file_schema.json b/pkg/parser/schemas/included_file_schema.json
index 52f82e9efe..00e78b3852 100644
--- a/pkg/parser/schemas/included_file_schema.json
+++ b/pkg/parser/schemas/included_file_schema.json
@@ -23,7 +23,17 @@
"description": "Whether input is required"
},
"default": {
- "oneOf": [{ "type": "string" }, { "type": "number" }, { "type": "boolean" }],
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ],
"description": "Default value for the input"
},
"type": {
@@ -225,6 +235,167 @@
"additionalProperties": true
}
]
+ },
+ "repo-memory": {
+ "description": "Repo memory configuration for git-based persistent storage",
+ "oneOf": [
+ {
+ "type": "boolean",
+ "description": "Enable repo-memory with default settings"
+ },
+ {
+ "type": "null",
+ "description": "Enable repo-memory with default settings (same as true)"
+ },
+ {
+ "type": "object",
+ "description": "Repo-memory configuration object",
+ "properties": {
+ "target-repo": {
+ "type": "string",
+ "description": "Target repository for memory storage (default: current repository). Format: owner/repo"
+ },
+ "branch-name": {
+ "type": "string",
+ "description": "Git branch name for memory storage (default: memory/default)"
+ },
+ "file-glob": {
+ "oneOf": [
+ {
+ "type": "string",
+ "description": "Single file glob pattern for allowed files"
+ },
+ {
+ "type": "array",
+ "description": "Array of file glob patterns for allowed files",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ "max-file-size": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 104857600,
+ "description": "Maximum size per file in bytes (default: 10240 = 10KB)"
+ },
+ "max-file-count": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 1000,
+ "description": "Maximum file count per commit (default: 100)"
+ },
+ "description": {
+ "type": "string",
+ "description": "Optional description for the memory that will be shown in the agent prompt"
+ },
+ "create-orphan": {
+ "type": "boolean",
+ "description": "Create orphaned branch if it doesn't exist (default: true)"
+ }
+ },
+ "additionalProperties": false,
+ "examples": [
+ {
+ "branch-name": "memory/session-state"
+ },
+ {
+ "target-repo": "myorg/memory-repo",
+ "branch-name": "memory/agent-notes",
+ "max-file-size": 524288
+ }
+ ]
+ },
+ {
+ "type": "array",
+ "description": "Array of repo-memory configurations for multiple memory locations",
+ "items": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Memory identifier (required for array notation, default: 'default')"
+ },
+ "target-repo": {
+ "type": "string",
+ "description": "Target repository for memory storage (default: current repository). Format: owner/repo"
+ },
+ "branch-name": {
+ "type": "string",
+ "description": "Git branch name for memory storage (default: memory/{id})"
+ },
+ "file-glob": {
+ "oneOf": [
+ {
+ "type": "string",
+ "description": "Single file glob pattern for allowed files"
+ },
+ {
+ "type": "array",
+ "description": "Array of file glob patterns for allowed files",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ "max-file-size": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 104857600,
+ "description": "Maximum size per file in bytes (default: 10240 = 10KB)"
+ },
+ "max-file-count": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 1000,
+ "description": "Maximum file count per commit (default: 100)"
+ },
+ "description": {
+ "type": "string",
+ "description": "Optional description for this memory that will be shown in the agent prompt"
+ },
+ "create-orphan": {
+ "type": "boolean",
+ "description": "Create orphaned branch if it doesn't exist (default: true)"
+ }
+ },
+ "additionalProperties": false
+ },
+ "minItems": 1,
+ "examples": [
+ [
+ {
+ "id": "default",
+ "branch-name": "memory/default"
+ },
+ {
+ "id": "session",
+ "branch-name": "memory/session"
+ }
+ ]
+ ]
+ }
+ ],
+ "examples": [
+ true,
+ null,
+ {
+ "branch-name": "memory/agent-state"
+ },
+ [
+ {
+ "id": "default",
+ "branch-name": "memory/default"
+ },
+ {
+ "id": "logs",
+ "branch-name": "memory/logs",
+ "max-file-size": 524288
+ }
+ ]
+ ]
}
},
"additionalProperties": {
@@ -676,7 +847,14 @@
}
},
"then": {
- "anyOf": [{ "required": ["command"] }, { "required": ["container"] }]
+ "anyOf": [
+ {
+ "required": ["command"]
+ },
+ {
+ "required": ["container"]
+ }
+ ]
}
}
]
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 0c3a0406bd..db409ec3a8 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -52,7 +52,17 @@
"type": "object",
"description": "Input values to pass to the imported workflow. Keys are input names declared in the imported workflow's inputs section, values can be strings or expressions.",
"additionalProperties": {
- "oneOf": [{ "type": "string" }, { "type": "number" }, { "type": "boolean" }]
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
}
}
}
@@ -64,7 +74,14 @@
["shared/mcp/gh-aw.md", "shared/jqschema.md", "shared/reporting.md"],
["../instructions/documentation.instructions.md"],
[".github/agents/my-agent.md"],
- [{ "path": "shared/discussions-data-fetch.md", "inputs": { "count": 50 } }]
+ [
+ {
+ "path": "shared/discussions-data-fetch.md",
+ "inputs": {
+ "count": 50
+ }
+ }
+ ]
]
},
"on": {
@@ -2634,6 +2651,167 @@
"additionalProperties": false
}
]
+ },
+ "repo-memory": {
+ "description": "Repo memory configuration for git-based persistent storage",
+ "oneOf": [
+ {
+ "type": "boolean",
+ "description": "Enable repo-memory with default settings"
+ },
+ {
+ "type": "null",
+ "description": "Enable repo-memory with default settings (same as true)"
+ },
+ {
+ "type": "object",
+ "description": "Repo-memory configuration object",
+ "properties": {
+ "target-repo": {
+ "type": "string",
+ "description": "Target repository for memory storage (default: current repository). Format: owner/repo"
+ },
+ "branch-name": {
+ "type": "string",
+ "description": "Git branch name for memory storage (default: memory/default)"
+ },
+ "file-glob": {
+ "oneOf": [
+ {
+ "type": "string",
+ "description": "Single file glob pattern for allowed files"
+ },
+ {
+ "type": "array",
+ "description": "Array of file glob patterns for allowed files",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ "max-file-size": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 104857600,
+ "description": "Maximum size per file in bytes (default: 10240 = 10KB)"
+ },
+ "max-file-count": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 1000,
+ "description": "Maximum file count per commit (default: 100)"
+ },
+ "description": {
+ "type": "string",
+ "description": "Optional description for the memory that will be shown in the agent prompt"
+ },
+ "create-orphan": {
+ "type": "boolean",
+ "description": "Create orphaned branch if it doesn't exist (default: true)"
+ }
+ },
+ "additionalProperties": false,
+ "examples": [
+ {
+ "branch-name": "memory/session-state"
+ },
+ {
+ "target-repo": "myorg/memory-repo",
+ "branch-name": "memory/agent-notes",
+ "max-file-size": 524288
+ }
+ ]
+ },
+ {
+ "type": "array",
+ "description": "Array of repo-memory configurations for multiple memory locations",
+ "items": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Memory identifier (required for array notation, default: 'default')"
+ },
+ "target-repo": {
+ "type": "string",
+ "description": "Target repository for memory storage (default: current repository). Format: owner/repo"
+ },
+ "branch-name": {
+ "type": "string",
+ "description": "Git branch name for memory storage (default: memory/{id})"
+ },
+ "file-glob": {
+ "oneOf": [
+ {
+ "type": "string",
+ "description": "Single file glob pattern for allowed files"
+ },
+ {
+ "type": "array",
+ "description": "Array of file glob patterns for allowed files",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ "max-file-size": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 104857600,
+ "description": "Maximum size per file in bytes (default: 10240 = 10KB)"
+ },
+ "max-file-count": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 1000,
+ "description": "Maximum file count per commit (default: 100)"
+ },
+ "description": {
+ "type": "string",
+ "description": "Optional description for this memory that will be shown in the agent prompt"
+ },
+ "create-orphan": {
+ "type": "boolean",
+ "description": "Create orphaned branch if it doesn't exist (default: true)"
+ }
+ },
+ "additionalProperties": false
+ },
+ "minItems": 1,
+ "examples": [
+ [
+ {
+ "id": "default",
+ "branch-name": "memory/default"
+ },
+ {
+ "id": "session",
+ "branch-name": "memory/session"
+ }
+ ]
+ ]
+ }
+ ],
+ "examples": [
+ true,
+ null,
+ {
+ "branch-name": "memory/agent-state"
+ },
+ [
+ {
+ "id": "default",
+ "branch-name": "memory/default"
+ },
+ {
+ "id": "logs",
+ "branch-name": "memory/logs",
+ "max-file-size": 524288
+ }
+ ]
+ ]
}
},
"additionalProperties": {
@@ -4364,15 +4542,42 @@
"oneOf": [
{
"required": ["script"],
- "not": { "anyOf": [{ "required": ["run"] }, { "required": ["py"] }] }
+ "not": {
+ "anyOf": [
+ {
+ "required": ["run"]
+ },
+ {
+ "required": ["py"]
+ }
+ ]
+ }
},
{
"required": ["run"],
- "not": { "anyOf": [{ "required": ["script"] }, { "required": ["py"] }] }
+ "not": {
+ "anyOf": [
+ {
+ "required": ["script"]
+ },
+ {
+ "required": ["py"]
+ }
+ ]
+ }
},
{
"required": ["py"],
- "not": { "anyOf": [{ "required": ["script"] }, { "required": ["run"] }] }
+ "not": {
+ "anyOf": [
+ {
+ "required": ["script"]
+ },
+ {
+ "required": ["run"]
+ }
+ ]
+ }
}
]
}
@@ -4770,7 +4975,14 @@
}
},
"then": {
- "anyOf": [{ "required": ["command"] }, { "required": ["container"] }]
+ "anyOf": [
+ {
+ "required": ["command"]
+ },
+ {
+ "required": ["container"]
+ }
+ ]
}
}
]
diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go
index 6eaf0254c3..81e104bd74 100644
--- a/pkg/workflow/compiler.go
+++ b/pkg/workflow/compiler.go
@@ -242,6 +242,7 @@ type WorkflowData struct {
SafeInputs *SafeInputsConfig // safe-inputs configuration for custom MCP tools
Roles []string // permission levels required to trigger workflow
CacheMemoryConfig *CacheMemoryConfig // parsed cache-memory configuration
+ RepoMemoryConfig *RepoMemoryConfig // parsed repo-memory configuration
SafetyPrompt bool // whether to include XPIA safety prompt (default true)
Runtimes map[string]any // runtime version overrides from frontmatter
ToolsTimeout int // timeout in seconds for tool/MCP operations (0 = use engine default)
@@ -1247,6 +1248,17 @@ func (c *Compiler) ParseWorkflowFile(markdownPath string) (*WorkflowData, error)
}
workflowData.CacheMemoryConfig = cacheMemoryConfig
+ // Extract repo-memory config and check for errors
+ toolsConfig, err := ParseToolsConfig(tools)
+ if err != nil {
+ return nil, err
+ }
+ repoMemoryConfig, err := c.extractRepoMemoryConfig(toolsConfig)
+ if err != nil {
+ return nil, err
+ }
+ workflowData.RepoMemoryConfig = repoMemoryConfig
+
// Process stop-after configuration from the on: section
err = c.processStopAfterConfiguration(result.Frontmatter, workflowData, markdownPath)
if err != nil {
diff --git a/pkg/workflow/compiler_jobs.go b/pkg/workflow/compiler_jobs.go
index 647852c07f..f23370fa5c 100644
--- a/pkg/workflow/compiler_jobs.go
+++ b/pkg/workflow/compiler_jobs.go
@@ -195,6 +195,38 @@ func (c *Compiler) buildJobs(data *WorkflowData, markdownPath string) error {
return fmt.Errorf("failed to build custom jobs: %w", err)
}
+ // Build push_repo_memory job if repo-memory is configured
+ // This job downloads repo-memory artifacts and pushes changes to git branches
+ // It runs after agent job completes (even if it fails) and has contents: write permission
+ var pushRepoMemoryJobName string
+ if data.RepoMemoryConfig != nil && len(data.RepoMemoryConfig.Memories) > 0 {
+ compilerJobsLog.Print("Building push_repo_memory job")
+ pushRepoMemoryJob, err := c.buildPushRepoMemoryJob(data, threatDetectionEnabledForSafeJobs)
+ if err != nil {
+ return fmt.Errorf("failed to build push_repo_memory job: %w", err)
+ }
+ if pushRepoMemoryJob != nil {
+ // Add detection dependency if threat detection is enabled
+ if threatDetectionEnabledForSafeJobs {
+ pushRepoMemoryJob.Needs = append(pushRepoMemoryJob.Needs, constants.DetectionJobName)
+ compilerJobsLog.Print("Added detection dependency to push_repo_memory job")
+ }
+ if err := c.jobManager.AddJob(pushRepoMemoryJob); err != nil {
+ return fmt.Errorf("failed to add push_repo_memory job: %w", err)
+ }
+ pushRepoMemoryJobName = pushRepoMemoryJob.Name
+ compilerJobsLog.Printf("Successfully added push_repo_memory job: %s", pushRepoMemoryJobName)
+ }
+ }
+
+ // Update conclusion job to depend on push_repo_memory if it exists
+ if pushRepoMemoryJobName != "" {
+ if conclusionJob, exists := c.jobManager.GetJob("conclusion"); exists {
+ conclusionJob.Needs = append(conclusionJob.Needs, pushRepoMemoryJobName)
+ compilerJobsLog.Printf("Added push_repo_memory dependency to conclusion job")
+ }
+ }
+
compilerJobsLog.Print("Successfully built all jobs for workflow")
return nil
}
@@ -640,13 +672,19 @@ func (c *Compiler) buildSafeOutputsJobs(data *WorkflowData, jobName, markdownPat
// Note: noop processing is now handled inside the conclusion job, not as a separate job
// Build conclusion job if add-comment is configured OR if command trigger is configured with reactions
- // This job runs last, after all safe output jobs, to update the activation comment on failure
+ // This job runs last, after all safe output jobs (and push_repo_memory if configured), to update the activation comment on failure
// The buildConclusionJob function itself will decide whether to create the job based on the configuration
conclusionJob, err := c.buildConclusionJob(data, jobName, safeOutputJobNames)
if err != nil {
return fmt.Errorf("failed to build conclusion job: %w", err)
}
if conclusionJob != nil {
+ // If push_repo_memory job exists, conclusion should depend on it
+ // Check if the job was already created (it's created in buildJobs)
+ if _, exists := c.jobManager.GetJob("push_repo_memory"); exists {
+ conclusionJob.Needs = append(conclusionJob.Needs, "push_repo_memory")
+ compilerJobsLog.Printf("Added push_repo_memory dependency to conclusion job")
+ }
if err := c.jobManager.AddJob(conclusionJob); err != nil {
return fmt.Errorf("failed to add conclusion job: %w", err)
}
diff --git a/pkg/workflow/compiler_yaml.go b/pkg/workflow/compiler_yaml.go
index 4beb1167cf..8224ffcd61 100644
--- a/pkg/workflow/compiler_yaml.go
+++ b/pkg/workflow/compiler_yaml.go
@@ -319,6 +319,9 @@ func (c *Compiler) generateMainJobSteps(yaml *strings.Builder, data *WorkflowDat
// Add cache-memory steps if cache-memory configuration is present
generateCacheMemorySteps(yaml, data)
+ // Add repo-memory clone steps if repo-memory configuration is present
+ generateRepoMemorySteps(yaml, data)
+
// Configure git credentials for agentic workflows
gitConfigSteps := c.generateGitConfigurationSteps()
for _, line := range gitConfigSteps {
@@ -433,6 +436,9 @@ func (c *Compiler) generateMainJobSteps(yaml *strings.Builder, data *WorkflowDat
}
}
+ // Add repo-memory artifact upload to save state for push job
+ generateRepoMemoryArtifactUpload(yaml, data)
+
// upload assets if upload-asset is configured
if data.SafeOutputs != nil && data.SafeOutputs.UploadAssets != nil {
c.generateUploadAssets(yaml)
@@ -798,6 +804,9 @@ func (c *Compiler) generatePrompt(yaml *strings.Builder, data *WorkflowData) {
// Add cache memory prompt as separate step if enabled
c.generateCacheMemoryPromptStep(yaml, data.CacheMemoryConfig)
+ // Add repo memory prompt as separate step if enabled
+ c.generateRepoMemoryPromptStep(yaml, data.RepoMemoryConfig)
+
// Add safe outputs instructions to prompt when safe-outputs are configured
// This tells agents to use the safeoutputs MCP server instead of gh CLI
c.generateSafeOutputsPromptStep(yaml, HasSafeOutputsEnabled(data.SafeOutputs))
diff --git a/pkg/workflow/js.go b/pkg/workflow/js.go
index b7a20d49a6..f9b3de55b1 100644
--- a/pkg/workflow/js.go
+++ b/pkg/workflow/js.go
@@ -147,6 +147,9 @@ var generateFooterScript string
//go:embed js/get_tracker_id.cjs
var getTrackerIDScript string
+//go:embed js/push_repo_memory.cjs
+var pushRepoMemoryScript string
+
//go:embed js/messages.cjs
var messagesScript string
diff --git a/pkg/workflow/js/push_repo_memory.cjs b/pkg/workflow/js/push_repo_memory.cjs
new file mode 100644
index 0000000000..0d738404fc
--- /dev/null
+++ b/pkg/workflow/js/push_repo_memory.cjs
@@ -0,0 +1,214 @@
+// @ts-check
+///
+
+const fs = require("fs");
+const path = require("path");
+const { execSync } = require("child_process");
+
+/**
+ * Push repo-memory changes to git branch
+ * Environment variables:
+ * ARTIFACT_DIR: Path to the downloaded artifact directory containing memory files
+ * MEMORY_ID: Memory identifier (used for subdirectory path)
+ * TARGET_REPO: Target repository (owner/name)
+ * BRANCH_NAME: Branch name to push to
+ * MAX_FILE_SIZE: Maximum file size in bytes
+ * MAX_FILE_COUNT: Maximum number of files per commit
+ * FILE_GLOB_FILTER: Optional space-separated list of file patterns (e.g., "*.md *.txt")
+ * GH_TOKEN: GitHub token for authentication
+ * GITHUB_RUN_ID: Workflow run ID for commit messages
+ */
+
+async function main() {
+ const artifactDir = process.env.ARTIFACT_DIR;
+ const memoryId = process.env.MEMORY_ID;
+ const targetRepo = process.env.TARGET_REPO;
+ const branchName = process.env.BRANCH_NAME;
+ const maxFileSize = parseInt(process.env.MAX_FILE_SIZE || "10240", 10);
+ const maxFileCount = parseInt(process.env.MAX_FILE_COUNT || "100", 10);
+ const fileGlobFilter = process.env.FILE_GLOB_FILTER || "";
+ const ghToken = process.env.GH_TOKEN;
+ const githubRunId = process.env.GITHUB_RUN_ID || "unknown";
+
+ // Validate required environment variables
+ if (!artifactDir || !memoryId || !targetRepo || !branchName || !ghToken) {
+ core.setFailed("Missing required environment variables: ARTIFACT_DIR, MEMORY_ID, TARGET_REPO, BRANCH_NAME, GH_TOKEN");
+ return;
+ }
+
+ // Source directory with memory files (artifact location)
+ const sourceMemoryPath = path.join(artifactDir, "memory", memoryId);
+
+ // Check if artifact memory directory exists
+ if (!fs.existsSync(sourceMemoryPath)) {
+ core.info(`Memory directory not found in artifact: ${sourceMemoryPath}`);
+ return;
+ }
+
+ // We're already in the checked out repository (from checkout step)
+ const workspaceDir = process.env.GITHUB_WORKSPACE || process.cwd();
+ core.info(`Working in repository: ${workspaceDir}`);
+
+ // Disable sparse checkout to work with full branch content
+ // This is necessary because checkout was configured with sparse-checkout
+ core.info(`Disabling sparse checkout...`);
+ try {
+ execSync("git sparse-checkout disable", { stdio: "pipe" });
+ } catch (error) {
+ // Ignore if sparse checkout wasn't enabled
+ core.info("Sparse checkout was not enabled or already disabled");
+ }
+
+ // Checkout or create the memory branch
+ core.info(`Checking out branch: ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+
+ // Try to fetch the branch
+ try {
+ execSync(`git fetch "${repoUrl}" "${branchName}:${branchName}"`, { stdio: "pipe" });
+ execSync(`git checkout "${branchName}"`, { stdio: "inherit" });
+ core.info(`Checked out existing branch: ${branchName}`);
+ } catch (fetchError) {
+ // Branch doesn't exist, create orphan branch
+ core.info(`Branch ${branchName} does not exist, creating orphan branch...`);
+ execSync(`git checkout --orphan "${branchName}"`, { stdio: "inherit" });
+ execSync("git rm -rf . || true", { stdio: "pipe" });
+ core.info(`Created orphan branch: ${branchName}`);
+ }
+ } catch (error) {
+ core.setFailed(`Failed to checkout branch: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+
+ // Create destination directory in repo
+ const destMemoryPath = path.join(workspaceDir, "memory", memoryId);
+ fs.mkdirSync(destMemoryPath, { recursive: true });
+ core.info(`Destination directory: ${destMemoryPath}`);
+
+ // Read files from artifact directory and validate before copying
+ let filesToCopy = [];
+ try {
+ const files = fs.readdirSync(sourceMemoryPath, { withFileTypes: true });
+
+ for (const file of files) {
+ if (!file.isFile()) {
+ continue; // Skip directories
+ }
+
+ const fileName = file.name;
+ const sourceFilePath = path.join(sourceMemoryPath, fileName);
+ const stats = fs.statSync(sourceFilePath);
+
+ // Validate file name patterns if filter is set
+ if (fileGlobFilter) {
+ const patterns = fileGlobFilter.split(/\s+/).map(pattern => {
+ const regexPattern = pattern.replace(/\./g, "\\.").replace(/\*/g, "[^/]*");
+ return new RegExp(`^${regexPattern}$`);
+ });
+
+ if (!patterns.some(pattern => pattern.test(fileName))) {
+ core.error(`File does not match allowed patterns: ${fileName}`);
+ core.error(`Allowed patterns: ${fileGlobFilter}`);
+ core.setFailed("File pattern validation failed");
+ return;
+ }
+ }
+
+ // Validate file size
+ if (stats.size > maxFileSize) {
+ core.error(`File exceeds size limit: ${fileName} (${stats.size} bytes > ${maxFileSize} bytes)`);
+ core.setFailed("File size validation failed");
+ return;
+ }
+
+ filesToCopy.push({ name: fileName, source: sourceFilePath, size: stats.size });
+ }
+ } catch (error) {
+ core.setFailed(`Failed to read artifact directory: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+
+ // Validate file count
+ if (filesToCopy.length > maxFileCount) {
+ core.setFailed(`Too many files (${filesToCopy.length} > ${maxFileCount})`);
+ return;
+ }
+
+ if (filesToCopy.length === 0) {
+ core.info("No files to copy from artifact");
+ return;
+ }
+
+ core.info(`Copying ${filesToCopy.length} validated file(s)...`);
+
+ // Copy files to destination
+ for (const file of filesToCopy) {
+ const destFilePath = path.join(destMemoryPath, file.name);
+ try {
+ fs.copyFileSync(file.source, destFilePath);
+ core.info(`Copied: ${file.name} (${file.size} bytes)`);
+ } catch (error) {
+ core.setFailed(`Failed to copy file ${file.name}: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ }
+
+ // Check if we have any changes to commit
+ let hasChanges = false;
+ try {
+ const status = execSync("git status --porcelain", { encoding: "utf8" });
+ hasChanges = status.trim().length > 0;
+ } catch (error) {
+ core.setFailed(`Failed to check git status: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+
+ if (!hasChanges) {
+ core.info("No changes detected after copying files");
+ return;
+ }
+
+ core.info("Changes detected, committing and pushing...");
+
+ // Stage all changes
+ try {
+ execSync("git add .", { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to stage changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+
+ // Commit changes
+ try {
+ execSync(`git commit -m "Update repo memory from workflow run ${githubRunId}"`, { stdio: "inherit" });
+ } catch (error) {
+ core.setFailed(`Failed to commit changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+
+ // Pull with merge strategy (ours wins on conflicts)
+ core.info(`Pulling latest changes from ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git pull --no-rebase -X ours "${repoUrl}" "${branchName}"`, { stdio: "inherit" });
+ } catch (error) {
+ // Pull might fail if branch doesn't exist yet or on conflicts - this is acceptable
+ core.warning(`Pull failed (this may be expected): ${error instanceof Error ? error.message : String(error)}`);
+ }
+
+ // Push changes
+ core.info(`Pushing changes to ${branchName}...`);
+ try {
+ const repoUrl = `https://x-access-token:${ghToken}@github.com/${targetRepo}.git`;
+ execSync(`git push "${repoUrl}" HEAD:"${branchName}"`, { stdio: "inherit" });
+ core.info(`Successfully pushed changes to ${branchName} branch`);
+ } catch (error) {
+ core.setFailed(`Failed to push changes: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+}
+
+main().catch(error => {
+ core.setFailed(`Unexpected error: ${error instanceof Error ? error.message : String(error)}`);
+});
diff --git a/pkg/workflow/mcp_config_validation.go b/pkg/workflow/mcp_config_validation.go
index 569aeccd45..45a703ceb4 100644
--- a/pkg/workflow/mcp_config_validation.go
+++ b/pkg/workflow/mcp_config_validation.go
@@ -65,6 +65,7 @@ func ValidateMCPConfigs(tools map[string]any) error {
"serena": true,
"agentic-workflows": true,
"cache-memory": true,
+ "repo-memory": true,
"bash": true,
"edit": true,
"web-fetch": true,
diff --git a/pkg/workflow/repo_memory.go b/pkg/workflow/repo_memory.go
new file mode 100644
index 0000000000..d49ca5768b
--- /dev/null
+++ b/pkg/workflow/repo_memory.go
@@ -0,0 +1,562 @@
+package workflow
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/githubnext/gh-aw/pkg/logger"
+)
+
+var repoMemoryLog = logger.New("workflow:repo_memory")
+
+// RepoMemoryConfig holds configuration for repo-memory functionality
+type RepoMemoryConfig struct {
+ Memories []RepoMemoryEntry `yaml:"memories,omitempty"` // repo-memory configurations
+}
+
+// RepoMemoryEntry represents a single repo-memory configuration
+type RepoMemoryEntry struct {
+ ID string `yaml:"id"` // memory identifier (required for array notation)
+ TargetRepo string `yaml:"target-repo,omitempty"` // target repository (default: current repo)
+ BranchName string `yaml:"branch-name,omitempty"` // branch name (default: memory/{memory-id})
+ FileGlob []string `yaml:"file-glob,omitempty"` // file glob patterns for allowed files
+ MaxFileSize int `yaml:"max-file-size,omitempty"` // maximum size per file in bytes (default: 10KB)
+ MaxFileCount int `yaml:"max-file-count,omitempty"` // maximum file count per commit (default: 100)
+ Description string `yaml:"description,omitempty"` // optional description for this memory
+ CreateOrphan bool `yaml:"create-orphan,omitempty"` // create orphaned branch if missing (default: true)
+}
+
+// RepoMemoryToolConfig represents the configuration for repo-memory in tools
+type RepoMemoryToolConfig struct {
+ // Can be boolean, object, or array - handled by this file
+ Raw any `yaml:"-"`
+}
+
+// generateDefaultBranchName generates a default branch name for a given memory ID
+func generateDefaultBranchName(memoryID string) string {
+ if memoryID == "default" {
+ return "memory/default"
+ }
+ return fmt.Sprintf("memory/%s", memoryID)
+}
+
+// extractRepoMemoryConfig extracts repo-memory configuration from tools section
+func (c *Compiler) extractRepoMemoryConfig(toolsConfig *ToolsConfig) (*RepoMemoryConfig, error) {
+ // Check if repo-memory tool is configured
+ if toolsConfig == nil || toolsConfig.RepoMemory == nil {
+ return nil, nil
+ }
+
+ repoMemoryLog.Print("Extracting repo-memory configuration from ToolsConfig")
+
+ config := &RepoMemoryConfig{}
+ repoMemoryValue := toolsConfig.RepoMemory.Raw
+
+ // Handle nil value (simple enable with defaults) - same as true
+ if repoMemoryValue == nil {
+ config.Memories = []RepoMemoryEntry{
+ {
+ ID: "default",
+ BranchName: generateDefaultBranchName("default"),
+ MaxFileSize: 10240, // 10KB
+ MaxFileCount: 100,
+ CreateOrphan: true,
+ },
+ }
+ return config, nil
+ }
+
+ // Handle boolean value (simple enable/disable)
+ if boolValue, ok := repoMemoryValue.(bool); ok {
+ if boolValue {
+ // Create a single default memory entry
+ config.Memories = []RepoMemoryEntry{
+ {
+ ID: "default",
+ BranchName: generateDefaultBranchName("default"),
+ MaxFileSize: 10240, // 10KB
+ MaxFileCount: 100,
+ CreateOrphan: true,
+ },
+ }
+ }
+ // If false, return empty config (empty array means disabled)
+ return config, nil
+ }
+
+ // Handle array of memory configurations
+ if memoryArray, ok := repoMemoryValue.([]any); ok {
+ repoMemoryLog.Printf("Processing memory array with %d entries", len(memoryArray))
+ config.Memories = make([]RepoMemoryEntry, 0, len(memoryArray))
+ for _, item := range memoryArray {
+ if memoryMap, ok := item.(map[string]any); ok {
+ entry := RepoMemoryEntry{
+ MaxFileSize: 10240, // 10KB default
+ MaxFileCount: 100, // 100 files default
+ CreateOrphan: true, // create orphan by default
+ }
+
+ // ID is required for array notation
+ if id, exists := memoryMap["id"]; exists {
+ if idStr, ok := id.(string); ok {
+ entry.ID = idStr
+ }
+ }
+ // Use "default" if no ID specified
+ if entry.ID == "" {
+ entry.ID = "default"
+ }
+
+ // Parse target-repo
+ if targetRepo, exists := memoryMap["target-repo"]; exists {
+ if repoStr, ok := targetRepo.(string); ok {
+ entry.TargetRepo = repoStr
+ }
+ }
+
+ // Parse branch-name
+ if branchName, exists := memoryMap["branch-name"]; exists {
+ if branchStr, ok := branchName.(string); ok {
+ entry.BranchName = branchStr
+ }
+ }
+ // Set default branch name if not specified
+ if entry.BranchName == "" {
+ entry.BranchName = generateDefaultBranchName(entry.ID)
+ }
+
+ // Parse file-glob
+ if fileGlob, exists := memoryMap["file-glob"]; exists {
+ if globArray, ok := fileGlob.([]any); ok {
+ entry.FileGlob = make([]string, 0, len(globArray))
+ for _, item := range globArray {
+ if str, ok := item.(string); ok {
+ entry.FileGlob = append(entry.FileGlob, str)
+ }
+ }
+ } else if globStr, ok := fileGlob.(string); ok {
+ // Allow single string to be treated as array of one
+ entry.FileGlob = []string{globStr}
+ }
+ }
+
+ // Parse max-file-size
+ if maxFileSize, exists := memoryMap["max-file-size"]; exists {
+ if sizeInt, ok := maxFileSize.(int); ok {
+ entry.MaxFileSize = sizeInt
+ } else if sizeFloat, ok := maxFileSize.(float64); ok {
+ entry.MaxFileSize = int(sizeFloat)
+ } else if sizeUint64, ok := maxFileSize.(uint64); ok {
+ entry.MaxFileSize = int(sizeUint64)
+ }
+ }
+
+ // Parse max-file-count
+ if maxFileCount, exists := memoryMap["max-file-count"]; exists {
+ if countInt, ok := maxFileCount.(int); ok {
+ entry.MaxFileCount = countInt
+ } else if countFloat, ok := maxFileCount.(float64); ok {
+ entry.MaxFileCount = int(countFloat)
+ } else if countUint64, ok := maxFileCount.(uint64); ok {
+ entry.MaxFileCount = int(countUint64)
+ }
+ }
+
+ // Parse description
+ if description, exists := memoryMap["description"]; exists {
+ if descStr, ok := description.(string); ok {
+ entry.Description = descStr
+ }
+ }
+
+ // Parse create-orphan
+ if createOrphan, exists := memoryMap["create-orphan"]; exists {
+ if orphanBool, ok := createOrphan.(bool); ok {
+ entry.CreateOrphan = orphanBool
+ }
+ }
+
+ config.Memories = append(config.Memories, entry)
+ }
+ }
+
+ // Check for duplicate memory IDs
+ if err := validateNoDuplicateMemoryIDs(config.Memories); err != nil {
+ return nil, err
+ }
+
+ return config, nil
+ }
+
+ // Handle object configuration (single memory, backward compatible)
+ // Convert to array with single entry
+ if configMap, ok := repoMemoryValue.(map[string]any); ok {
+ entry := RepoMemoryEntry{
+ ID: "default",
+ BranchName: generateDefaultBranchName("default"),
+ MaxFileSize: 10240, // 10KB default
+ MaxFileCount: 100, // 100 files default
+ CreateOrphan: true, // create orphan by default
+ }
+
+ // Parse target-repo
+ if targetRepo, exists := configMap["target-repo"]; exists {
+ if repoStr, ok := targetRepo.(string); ok {
+ entry.TargetRepo = repoStr
+ }
+ }
+
+ // Parse branch-name
+ if branchName, exists := configMap["branch-name"]; exists {
+ if branchStr, ok := branchName.(string); ok {
+ entry.BranchName = branchStr
+ }
+ }
+
+ // Parse file-glob
+ if fileGlob, exists := configMap["file-glob"]; exists {
+ if globArray, ok := fileGlob.([]any); ok {
+ entry.FileGlob = make([]string, 0, len(globArray))
+ for _, item := range globArray {
+ if str, ok := item.(string); ok {
+ entry.FileGlob = append(entry.FileGlob, str)
+ }
+ }
+ } else if globStr, ok := fileGlob.(string); ok {
+ // Allow single string to be treated as array of one
+ entry.FileGlob = []string{globStr}
+ }
+ }
+
+ // Parse max-file-size
+ if maxFileSize, exists := configMap["max-file-size"]; exists {
+ if sizeInt, ok := maxFileSize.(int); ok {
+ entry.MaxFileSize = sizeInt
+ } else if sizeFloat, ok := maxFileSize.(float64); ok {
+ entry.MaxFileSize = int(sizeFloat)
+ } else if sizeUint64, ok := maxFileSize.(uint64); ok {
+ entry.MaxFileSize = int(sizeUint64)
+ }
+ }
+
+ // Parse max-file-count
+ if maxFileCount, exists := configMap["max-file-count"]; exists {
+ if countInt, ok := maxFileCount.(int); ok {
+ entry.MaxFileCount = countInt
+ } else if countFloat, ok := maxFileCount.(float64); ok {
+ entry.MaxFileCount = int(countFloat)
+ } else if countUint64, ok := maxFileCount.(uint64); ok {
+ entry.MaxFileCount = int(countUint64)
+ }
+ }
+
+ // Parse description
+ if description, exists := configMap["description"]; exists {
+ if descStr, ok := description.(string); ok {
+ entry.Description = descStr
+ }
+ }
+
+ // Parse create-orphan
+ if createOrphan, exists := configMap["create-orphan"]; exists {
+ if orphanBool, ok := createOrphan.(bool); ok {
+ entry.CreateOrphan = orphanBool
+ }
+ }
+
+ config.Memories = []RepoMemoryEntry{entry}
+ return config, nil
+ }
+
+ return nil, nil
+}
+
+// validateNoDuplicateMemoryIDs checks for duplicate memory IDs and returns an error if found
+func validateNoDuplicateMemoryIDs(memories []RepoMemoryEntry) error {
+ seen := make(map[string]bool)
+ for _, memory := range memories {
+ if seen[memory.ID] {
+ return fmt.Errorf("duplicate memory ID found: '%s'. Each memory must have a unique ID", memory.ID)
+ }
+ seen[memory.ID] = true
+ }
+ return nil
+}
+
+// generateRepoMemoryArtifactUpload generates steps to upload repo-memory directories as artifacts
+// This runs at the end of the agent job (always condition) to save the state
+func generateRepoMemoryArtifactUpload(builder *strings.Builder, data *WorkflowData) {
+ if data.RepoMemoryConfig == nil || len(data.RepoMemoryConfig.Memories) == 0 {
+ return
+ }
+
+ repoMemoryLog.Printf("Generating repo-memory artifact upload steps for %d memories", len(data.RepoMemoryConfig.Memories))
+
+ builder.WriteString(" # Upload repo memory as artifacts for push job\n")
+
+ for _, memory := range data.RepoMemoryConfig.Memories {
+ // Determine the memory directory
+ memoryDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s", memory.ID)
+
+ // Step: Upload repo-memory directory as artifact
+ builder.WriteString(fmt.Sprintf(" - name: Upload repo-memory artifact (%s)\n", memory.ID))
+ builder.WriteString(" if: always()\n")
+ builder.WriteString(fmt.Sprintf(" uses: %s\n", GetActionPin("actions/upload-artifact")))
+ builder.WriteString(" with:\n")
+ builder.WriteString(fmt.Sprintf(" name: repo-memory-%s\n", memory.ID))
+ builder.WriteString(fmt.Sprintf(" path: %s\n", memoryDir))
+ builder.WriteString(" retention-days: 1\n")
+ builder.WriteString(" if-no-files-found: ignore\n")
+ }
+}
+
+// generateRepoMemoryPushSteps generates steps to push changes back to the repo-memory branches
+// This runs at the end of the workflow (always condition) to persist any changes made
+func generateRepoMemoryPushSteps(builder *strings.Builder, data *WorkflowData) {
+ if data.RepoMemoryConfig == nil || len(data.RepoMemoryConfig.Memories) == 0 {
+ return
+ }
+
+ repoMemoryLog.Printf("Generating repo-memory push steps for %d memories", len(data.RepoMemoryConfig.Memories))
+
+ builder.WriteString(" # Push repo memory changes back to git branches\n")
+
+ for _, memory := range data.RepoMemoryConfig.Memories {
+ // Determine the target repository
+ targetRepo := memory.TargetRepo
+ if targetRepo == "" {
+ targetRepo = "${{ github.repository }}"
+ }
+
+ // Determine the memory directory
+ memoryDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s", memory.ID)
+
+ // Step: Push changes to repo-memory branch
+ builder.WriteString(fmt.Sprintf(" - name: Push repo-memory changes (%s)\n", memory.ID))
+ builder.WriteString(" if: always()\n")
+ builder.WriteString(" env:\n")
+ builder.WriteString(" GH_TOKEN: ${{ github.token }}\n")
+ builder.WriteString(" run: |\n")
+ builder.WriteString(" set -e\n")
+ builder.WriteString(fmt.Sprintf(" cd \"%s\" || exit 0\n", memoryDir))
+ builder.WriteString(" \n")
+ builder.WriteString(" # Check if we have any changes to commit\n")
+ builder.WriteString(" if [ -n \"$(git status --porcelain)\" ]; then\n")
+ builder.WriteString(" echo \"Changes detected in repo memory, committing and pushing...\"\n")
+ builder.WriteString(" \n")
+
+ // Add file validation if constraints are specified
+ if len(memory.FileGlob) > 0 || memory.MaxFileSize > 0 || memory.MaxFileCount > 0 {
+ builder.WriteString(" # Validate files before committing\n")
+
+ if memory.MaxFileSize > 0 {
+ builder.WriteString(fmt.Sprintf(" # Check file sizes (max: %d bytes)\n", memory.MaxFileSize))
+ builder.WriteString(fmt.Sprintf(" if find . -type f -size +%dc | grep -q .; then\n", memory.MaxFileSize))
+ builder.WriteString(" echo \"Error: Files exceed maximum size limit\"\n")
+ builder.WriteString(fmt.Sprintf(" find . -type f -size +%dc -exec ls -lh {} \\;\n", memory.MaxFileSize))
+ builder.WriteString(" exit 1\n")
+ builder.WriteString(" fi\n")
+ builder.WriteString(" \n")
+ }
+
+ if memory.MaxFileCount > 0 {
+ builder.WriteString(fmt.Sprintf(" # Check file count (max: %d files)\n", memory.MaxFileCount))
+ builder.WriteString(" FILE_COUNT=$(git status --porcelain | wc -l)\n")
+ builder.WriteString(fmt.Sprintf(" if [ \"$FILE_COUNT\" -gt %d ]; then\n", memory.MaxFileCount))
+ builder.WriteString(fmt.Sprintf(" echo \"Error: Too many files to commit ($FILE_COUNT > %d)\"\n", memory.MaxFileCount))
+ builder.WriteString(" exit 1\n")
+ builder.WriteString(" fi\n")
+ builder.WriteString(" \n")
+ }
+ }
+
+ builder.WriteString(" # Add all changes\n")
+ builder.WriteString(" git add -A\n")
+ builder.WriteString(" \n")
+ builder.WriteString(" # Commit changes\n")
+ builder.WriteString(" git commit -m \"Update memory from workflow run ${{ github.run_id }}\"\n")
+ builder.WriteString(" \n")
+ builder.WriteString(" # Pull with ours merge strategy (our changes win in conflicts)\n")
+ builder.WriteString(" set +e\n")
+ builder.WriteString(fmt.Sprintf(" git pull --no-rebase -s recursive -X ours \"https://x-access-token:${GH_TOKEN}@github.com/%s.git\" \"%s\" 2>&1\n",
+ targetRepo, memory.BranchName))
+ builder.WriteString(" PULL_EXIT_CODE=$?\n")
+ builder.WriteString(" set -e\n")
+ builder.WriteString(" \n")
+ builder.WriteString(" # Push changes (force push if needed due to conflict resolution)\n")
+ builder.WriteString(fmt.Sprintf(" git push \"https://x-access-token:${GH_TOKEN}@github.com/%s.git\" \"HEAD:%s\"\n",
+ targetRepo, memory.BranchName))
+ builder.WriteString(" \n")
+ builder.WriteString(" echo \"Successfully pushed changes to repo memory\"\n")
+ builder.WriteString(" else\n")
+ builder.WriteString(" echo \"No changes in repo memory, skipping push\"\n")
+ builder.WriteString(" fi\n")
+ }
+}
+
+// generateRepoMemorySteps generates git steps for the repo-memory configuration
+func generateRepoMemorySteps(builder *strings.Builder, data *WorkflowData) {
+ if data.RepoMemoryConfig == nil || len(data.RepoMemoryConfig.Memories) == 0 {
+ return
+ }
+
+ repoMemoryLog.Printf("Generating repo-memory steps for %d memories", len(data.RepoMemoryConfig.Memories))
+
+ builder.WriteString(" # Repo memory git-based storage configuration from frontmatter processed below\n")
+
+ for _, memory := range data.RepoMemoryConfig.Memories {
+ // Determine the target repository
+ targetRepo := memory.TargetRepo
+ if targetRepo == "" {
+ targetRepo = "${{ github.repository }}"
+ }
+
+ // Determine the memory directory
+ memoryDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s", memory.ID)
+
+ // Step 1: Clone the repo-memory branch
+ builder.WriteString(fmt.Sprintf(" - name: Clone repo-memory branch (%s)\n", memory.ID))
+ builder.WriteString(" env:\n")
+ builder.WriteString(" GH_TOKEN: ${{ github.token }}\n")
+ builder.WriteString(fmt.Sprintf(" BRANCH_NAME: %s\n", memory.BranchName))
+ builder.WriteString(" run: |\n")
+ builder.WriteString(" set +e # Don't fail if branch doesn't exist\n")
+ builder.WriteString(fmt.Sprintf(" git clone --depth 1 --single-branch --branch \"%s\" \"https://x-access-token:${GH_TOKEN}@github.com/%s.git\" \"%s\" 2>/dev/null\n",
+ memory.BranchName, targetRepo, memoryDir))
+ builder.WriteString(" CLONE_EXIT_CODE=$?\n")
+ builder.WriteString(" set -e\n")
+ builder.WriteString(" \n")
+ builder.WriteString(" if [ $CLONE_EXIT_CODE -ne 0 ]; then\n")
+
+ if memory.CreateOrphan {
+ builder.WriteString(fmt.Sprintf(" echo \"Branch %s does not exist, creating orphan branch\"\n", memory.BranchName))
+ builder.WriteString(fmt.Sprintf(" mkdir -p \"%s\"\n", memoryDir))
+ builder.WriteString(fmt.Sprintf(" cd \"%s\"\n", memoryDir))
+ builder.WriteString(" git init\n")
+ builder.WriteString(" git checkout --orphan \"$BRANCH_NAME\"\n")
+ builder.WriteString(" git config user.name \"github-actions[bot]\"\n")
+ builder.WriteString(" git config user.email \"github-actions[bot]@users.noreply.github.com\"\n")
+ builder.WriteString(fmt.Sprintf(" git remote add origin \"https://x-access-token:${GH_TOKEN}@github.com/%s.git\"\n", targetRepo))
+ } else {
+ builder.WriteString(fmt.Sprintf(" echo \"Branch %s does not exist and create-orphan is false, skipping\"\n", memory.BranchName))
+ builder.WriteString(fmt.Sprintf(" mkdir -p \"%s\"\n", memoryDir))
+ }
+
+ builder.WriteString(" else\n")
+ builder.WriteString(fmt.Sprintf(" echo \"Successfully cloned %s branch\"\n", memory.BranchName))
+ builder.WriteString(fmt.Sprintf(" cd \"%s\"\n", memoryDir))
+ builder.WriteString(" git config user.name \"github-actions[bot]\"\n")
+ builder.WriteString(" git config user.email \"github-actions[bot]@users.noreply.github.com\"\n")
+ builder.WriteString(" fi\n")
+ builder.WriteString(" \n")
+
+ // Create the memory subdirectory
+ builder.WriteString(fmt.Sprintf(" mkdir -p \"%s/memory/%s\"\n", memoryDir, memory.ID))
+ builder.WriteString(fmt.Sprintf(" echo \"Repo memory directory ready at %s/memory/%s\"\n", memoryDir, memory.ID))
+ }
+}
+
+// buildPushRepoMemoryJob creates a job that downloads repo-memory artifacts and pushes them to git branches
+// This job runs after the agent job completes (even if it fails) and requires contents: write permission
+// If threat detection is enabled, only runs if no threats were detected
+func (c *Compiler) buildPushRepoMemoryJob(data *WorkflowData, threatDetectionEnabled bool) (*Job, error) {
+ if data.RepoMemoryConfig == nil || len(data.RepoMemoryConfig.Memories) == 0 {
+ return nil, nil
+ }
+
+ repoMemoryLog.Printf("Building push_repo_memory job for %d memories (threatDetectionEnabled=%v)", len(data.RepoMemoryConfig.Memories), threatDetectionEnabled)
+
+ var steps []string
+
+ // Add checkout step to configure git (without checking out files)
+ // We use sparse-checkout to avoid downloading files since we'll checkout the memory branch
+ var checkoutStep strings.Builder
+ checkoutStep.WriteString(" - name: Checkout repository\n")
+ checkoutStep.WriteString(fmt.Sprintf(" uses: %s\n", GetActionPin("actions/checkout")))
+ checkoutStep.WriteString(" with:\n")
+ checkoutStep.WriteString(" persist-credentials: false\n")
+ checkoutStep.WriteString(" sparse-checkout: .\n")
+ steps = append(steps, checkoutStep.String())
+
+ // Add git configuration step
+ gitConfigSteps := c.generateGitConfigurationSteps()
+ steps = append(steps, gitConfigSteps...)
+
+ // Build steps as complete YAML strings
+ for _, memory := range data.RepoMemoryConfig.Memories {
+ // Download artifact step
+ var step strings.Builder
+ step.WriteString(fmt.Sprintf(" - name: Download repo-memory artifact (%s)\n", memory.ID))
+ step.WriteString(fmt.Sprintf(" uses: %s\n", GetActionPin("actions/download-artifact")))
+ step.WriteString(" continue-on-error: true\n")
+ step.WriteString(" with:\n")
+ step.WriteString(fmt.Sprintf(" name: repo-memory-%s\n", memory.ID))
+ step.WriteString(fmt.Sprintf(" path: /tmp/gh-aw/repo-memory-%s\n", memory.ID))
+ steps = append(steps, step.String())
+ }
+
+ // Add push steps for each memory
+ for _, memory := range data.RepoMemoryConfig.Memories {
+ targetRepo := memory.TargetRepo
+ if targetRepo == "" {
+ targetRepo = "${{ github.repository }}"
+ }
+
+ artifactDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s", memory.ID)
+
+ // Build file glob filter string
+ fileGlobFilter := ""
+ if len(memory.FileGlob) > 0 {
+ fileGlobFilter = strings.Join(memory.FileGlob, " ")
+ }
+
+ // Build step with github-script action
+ var step strings.Builder
+ step.WriteString(fmt.Sprintf(" - name: Push repo-memory changes (%s)\n", memory.ID))
+ step.WriteString(" if: always()\n")
+ step.WriteString(fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script")))
+ step.WriteString(" env:\n")
+ step.WriteString(" GH_TOKEN: ${{ github.token }}\n")
+ step.WriteString(" GITHUB_RUN_ID: ${{ github.run_id }}\n")
+ step.WriteString(fmt.Sprintf(" ARTIFACT_DIR: %s\n", artifactDir))
+ step.WriteString(fmt.Sprintf(" MEMORY_ID: %s\n", memory.ID))
+ step.WriteString(fmt.Sprintf(" TARGET_REPO: %s\n", targetRepo))
+ step.WriteString(fmt.Sprintf(" BRANCH_NAME: %s\n", memory.BranchName))
+ step.WriteString(fmt.Sprintf(" MAX_FILE_SIZE: %d\n", memory.MaxFileSize))
+ step.WriteString(fmt.Sprintf(" MAX_FILE_COUNT: %d\n", memory.MaxFileCount))
+ if fileGlobFilter != "" {
+ // Quote the value to prevent YAML alias interpretation of patterns like *.md
+ step.WriteString(fmt.Sprintf(" FILE_GLOB_FILTER: \"%s\"\n", fileGlobFilter))
+ }
+ step.WriteString(" with:\n")
+ step.WriteString(" script: |\n")
+
+ // Add the JavaScript script with proper indentation
+ formattedScript := FormatJavaScriptForYAML(pushRepoMemoryScript)
+ for _, line := range formattedScript {
+ step.WriteString(line)
+ }
+
+ steps = append(steps, step.String())
+ }
+
+ // Set job condition based on threat detection
+ // If threat detection is enabled, only run if detection passed
+ // Otherwise, always run (even if agent job failed)
+ jobCondition := "always()"
+ if threatDetectionEnabled {
+ jobCondition = "always() && needs.detection.outputs.success == 'true'"
+ }
+
+ job := &Job{
+ Name: "push_repo_memory",
+ DisplayName: "", // No display name - job ID is sufficient
+ RunsOn: "runs-on: ubuntu-latest",
+ If: jobCondition,
+ Permissions: "permissions:\n contents: write",
+ Needs: []string{"agent"}, // Detection dependency added by caller if needed
+ Steps: steps,
+ }
+
+ return job, nil
+}
diff --git a/pkg/workflow/repo_memory_integration_test.go b/pkg/workflow/repo_memory_integration_test.go
new file mode 100644
index 0000000000..b8b76f709e
--- /dev/null
+++ b/pkg/workflow/repo_memory_integration_test.go
@@ -0,0 +1,299 @@
+package workflow
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/githubnext/gh-aw/pkg/testutil"
+)
+
+// TestRepoMemoryIntegrationSimple tests basic repo-memory workflow compilation
+func TestRepoMemoryIntegrationSimple(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+ workflowPath := filepath.Join(tmpDir, "test-workflow.md")
+
+ content := `---
+name: Test Repo Memory
+on: workflow_dispatch
+engine: copilot
+tools:
+ repo-memory: true
+---
+
+# Test Workflow
+
+This workflow uses repo memory.
+`
+
+ if err := os.WriteFile(workflowPath, []byte(content), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Failed to compile workflow: %v", err)
+ }
+
+ // Read the generated lock file
+ lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
+ lockContent, err := os.ReadFile(lockPath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockFile := string(lockContent)
+
+ // Check for clone step
+ if !strings.Contains(lockFile, "Clone repo-memory branch (default)") {
+ t.Error("Expected clone step in compiled workflow")
+ }
+
+ // Check for push step
+ if !strings.Contains(lockFile, "Push repo-memory changes (default)") {
+ t.Error("Expected push step in compiled workflow")
+ }
+
+ // Check for prompt
+ if !strings.Contains(lockFile, "## Repo Memory Available") {
+ t.Error("Expected repo memory prompt in compiled workflow")
+ }
+
+ // Check for memory directory path
+ if !strings.Contains(lockFile, "/tmp/gh-aw/repo-memory-default") {
+ t.Error("Expected memory directory path in compiled workflow")
+ }
+}
+
+// TestRepoMemoryIntegrationCustomConfig tests repo-memory with custom configuration
+func TestRepoMemoryIntegrationCustomConfig(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+ workflowPath := filepath.Join(tmpDir, "test-workflow.md")
+
+ content := `---
+name: Test Repo Memory Custom
+on: workflow_dispatch
+engine: copilot
+tools:
+ repo-memory:
+ target-repo: myorg/memory-repo
+ branch-name: memory/agent-state
+ max-file-size: 524288
+ description: Agent state storage
+---
+
+# Test Workflow
+
+This workflow uses custom repo memory configuration.
+`
+
+ if err := os.WriteFile(workflowPath, []byte(content), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Failed to compile workflow: %v", err)
+ }
+
+ // Read the generated lock file
+ lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
+ lockContent, err := os.ReadFile(lockPath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockFile := string(lockContent)
+
+ // Check for custom branch name
+ if !strings.Contains(lockFile, "memory/agent-state") {
+ t.Error("Expected custom branch name in compiled workflow")
+ }
+
+ // Check for custom target repo
+ if !strings.Contains(lockFile, "myorg/memory-repo") {
+ t.Error("Expected custom target repo in compiled workflow")
+ }
+
+ // Check for custom description in prompt
+ if !strings.Contains(lockFile, "Agent state storage") {
+ t.Error("Expected custom description in prompt")
+ }
+}
+
+// TestRepoMemoryIntegrationMultiple tests multiple repo-memory configurations
+func TestRepoMemoryIntegrationMultiple(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+ workflowPath := filepath.Join(tmpDir, "test-workflow.md")
+
+ content := `---
+name: Test Multiple Repo Memories
+on: workflow_dispatch
+engine: copilot
+tools:
+ repo-memory:
+ - id: session
+ branch-name: memory/session
+ description: Session data
+ - id: logs
+ branch-name: memory/logs
+ max-file-size: 2097152
+---
+
+# Test Workflow
+
+This workflow uses multiple repo memories.
+`
+
+ if err := os.WriteFile(workflowPath, []byte(content), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Failed to compile workflow: %v", err)
+ }
+
+ // Read the generated lock file
+ lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
+ lockContent, err := os.ReadFile(lockPath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockFile := string(lockContent)
+
+ // Check for both memory clones
+ if !strings.Contains(lockFile, "Clone repo-memory branch (session)") {
+ t.Error("Expected clone step for session memory")
+ }
+ if !strings.Contains(lockFile, "Clone repo-memory branch (logs)") {
+ t.Error("Expected clone step for logs memory")
+ }
+
+ // Check for both memory pushes
+ if !strings.Contains(lockFile, "Push repo-memory changes (session)") {
+ t.Error("Expected push step for session memory")
+ }
+ if !strings.Contains(lockFile, "Push repo-memory changes (logs)") {
+ t.Error("Expected push step for logs memory")
+ }
+
+ // Check for both directories
+ if !strings.Contains(lockFile, "/tmp/gh-aw/repo-memory-session") {
+ t.Error("Expected session memory directory")
+ }
+ if !strings.Contains(lockFile, "/tmp/gh-aw/repo-memory-logs") {
+ t.Error("Expected logs memory directory")
+ }
+
+ // Check for plural form in prompt
+ if !strings.Contains(lockFile, "## Repo Memory Locations Available") {
+ t.Error("Expected plural form in prompt for multiple memories")
+ }
+}
+
+// TestRepoMemoryIntegrationFileValidation tests file size and count validation
+func TestRepoMemoryIntegrationFileValidation(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+ workflowPath := filepath.Join(tmpDir, "test-workflow.md")
+
+ content := `---
+name: Test Repo Memory Validation
+on: workflow_dispatch
+engine: copilot
+tools:
+ repo-memory:
+ max-file-size: 524288
+ max-file-count: 50
+---
+
+# Test Workflow
+
+This workflow has file validation.
+`
+
+ if err := os.WriteFile(workflowPath, []byte(content), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Failed to compile workflow: %v", err)
+ }
+
+ // Read the generated lock file
+ lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
+ lockContent, err := os.ReadFile(lockPath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockFile := string(lockContent)
+
+ // Check for file size validation
+ if !strings.Contains(lockFile, "File exceeds size limit") && !strings.Contains(lockFile, "maxFileSize") {
+ t.Error("Expected file size validation in push step")
+ }
+
+ // Check for file count validation
+ if !strings.Contains(lockFile, "Too many files") && !strings.Contains(lockFile, "maxFileCount") {
+ t.Error("Expected file count validation in push step")
+ }
+
+ // Check for git user configuration
+ if !strings.Contains(lockFile, "github-actions[bot]") {
+ t.Error("Expected git user configuration as github-actions[bot]")
+ }
+
+ // Check constraints in prompt
+ if !strings.Contains(lockFile, "**Constraints:**") {
+ t.Error("Expected constraints section in prompt")
+ }
+}
+
+// TestRepoMemoryDisabled tests that repo-memory can be disabled with false
+func TestRepoMemoryDisabled(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+ workflowPath := filepath.Join(tmpDir, "test-workflow.md")
+
+ content := `---
+name: Test Repo Memory Disabled
+on: workflow_dispatch
+engine: copilot
+tools:
+ repo-memory: false
+---
+
+# Test Workflow
+
+This workflow has repo-memory disabled.
+`
+
+ if err := os.WriteFile(workflowPath, []byte(content), 0644); err != nil {
+ t.Fatalf("Failed to write workflow file: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ if err := compiler.CompileWorkflow(workflowPath); err != nil {
+ t.Fatalf("Failed to compile workflow: %v", err)
+ }
+
+ // Read the generated lock file
+ lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
+ lockContent, err := os.ReadFile(lockPath)
+ if err != nil {
+ t.Fatalf("Failed to read lock file: %v", err)
+ }
+ lockFile := string(lockContent)
+
+ // Check that repo-memory steps are NOT present
+ if strings.Contains(lockFile, "Clone repo-memory branch") {
+ t.Error("Should not have clone step when repo-memory is disabled")
+ }
+
+ if strings.Contains(lockFile, "Push repo-memory changes") {
+ t.Error("Should not have push step when repo-memory is disabled")
+ }
+
+ if strings.Contains(lockFile, "## Repo Memory") {
+ t.Error("Should not have repo memory prompt when disabled")
+ }
+}
diff --git a/pkg/workflow/repo_memory_prompt.go b/pkg/workflow/repo_memory_prompt.go
new file mode 100644
index 0000000000..8a07d8dfc6
--- /dev/null
+++ b/pkg/workflow/repo_memory_prompt.go
@@ -0,0 +1,114 @@
+package workflow
+
+import (
+ "fmt"
+ "strings"
+)
+
+// generateRepoMemoryPromptStep generates a separate step for repo memory instructions
+// when repo-memory is enabled, informing the agent about git-based persistent storage capabilities
+func (c *Compiler) generateRepoMemoryPromptStep(yaml *strings.Builder, config *RepoMemoryConfig) {
+ if config == nil || len(config.Memories) == 0 {
+ return
+ }
+
+ appendPromptStepWithHeredoc(yaml,
+ "Append repo memory instructions to prompt",
+ func(y *strings.Builder) {
+ generateRepoMemoryPromptSection(y, config)
+ })
+}
+
+// generateRepoMemoryPromptSection generates the repo memory notification section for prompts
+// when repo-memory is enabled, informing the agent about git-based persistent storage capabilities
+func generateRepoMemoryPromptSection(yaml *strings.Builder, config *RepoMemoryConfig) {
+ if config == nil || len(config.Memories) == 0 {
+ return
+ }
+
+ yaml.WriteString(" \n")
+ yaml.WriteString(" ---\n")
+ yaml.WriteString(" \n")
+
+ // Check if there's only one memory with ID "default" to use singular form
+ if len(config.Memories) == 1 && config.Memories[0].ID == "default" {
+ yaml.WriteString(" ## Repo Memory Available\n")
+ yaml.WriteString(" \n")
+ memory := config.Memories[0]
+ memoryDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s/memory/%s/", memory.ID, memory.ID)
+
+ if memory.Description != "" {
+ yaml.WriteString(fmt.Sprintf(" You have access to a persistent repo memory folder at `%s` where you can read and write files that are stored in a git branch. %s\n", memoryDir, memory.Description))
+ } else {
+ yaml.WriteString(fmt.Sprintf(" You have access to a persistent repo memory folder at `%s` where you can read and write files that are stored in a git branch.\n", memoryDir))
+ }
+ yaml.WriteString(" \n")
+ yaml.WriteString(" - **Read/Write Access**: You can freely read from and write to any files in this folder\n")
+ yaml.WriteString(fmt.Sprintf(" - **Git Branch Storage**: Files are stored in the `%s` branch", memory.BranchName))
+ if memory.TargetRepo != "" {
+ yaml.WriteString(fmt.Sprintf(" of repository `%s`\n", memory.TargetRepo))
+ } else {
+ yaml.WriteString(" of the current repository\n")
+ }
+ yaml.WriteString(" - **Automatic Push**: Changes are automatically committed and pushed after the workflow completes\n")
+ yaml.WriteString(" - **Merge Strategy**: In case of conflicts, your changes (current version) win\n")
+ yaml.WriteString(" - **Persistence**: Files persist across workflow runs via git branch storage\n")
+
+ // Add file constraints if specified
+ if len(memory.FileGlob) > 0 || memory.MaxFileSize > 0 || memory.MaxFileCount > 0 {
+ yaml.WriteString(" \n")
+ yaml.WriteString(" **Constraints:**\n")
+ if len(memory.FileGlob) > 0 {
+ yaml.WriteString(fmt.Sprintf(" - **Allowed Files**: Only files matching patterns: %s\n", strings.Join(memory.FileGlob, ", ")))
+ }
+ if memory.MaxFileSize > 0 {
+ yaml.WriteString(fmt.Sprintf(" - **Max File Size**: %d bytes (%.2f MB) per file\n", memory.MaxFileSize, float64(memory.MaxFileSize)/1048576.0))
+ }
+ if memory.MaxFileCount > 0 {
+ yaml.WriteString(fmt.Sprintf(" - **Max File Count**: %d files per commit\n", memory.MaxFileCount))
+ }
+ }
+
+ yaml.WriteString(" \n")
+ yaml.WriteString(" Examples of what you can store:\n")
+ yaml.WriteString(fmt.Sprintf(" - `%snotes.md` - general notes and observations\n", memoryDir))
+ yaml.WriteString(fmt.Sprintf(" - `%sstate.json` - structured state data\n", memoryDir))
+ yaml.WriteString(fmt.Sprintf(" - `%shistory/` - organized history files in subdirectories\n", memoryDir))
+ yaml.WriteString(" \n")
+ yaml.WriteString(" Feel free to create, read, update, and organize files in this folder as needed for your tasks.\n")
+ } else {
+ // Multiple memories or non-default single memory
+ yaml.WriteString(" ## Repo Memory Locations Available\n")
+ yaml.WriteString(" \n")
+ yaml.WriteString(" You have access to persistent repo memory folders where you can read and write files that are stored in git branches:\n")
+ yaml.WriteString(" \n")
+ for _, memory := range config.Memories {
+ memoryDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s/memory/%s/", memory.ID, memory.ID)
+ yaml.WriteString(fmt.Sprintf(" - **%s**: `%s`", memory.ID, memoryDir))
+ if memory.Description != "" {
+ yaml.WriteString(fmt.Sprintf(" - %s", memory.Description))
+ }
+ yaml.WriteString(fmt.Sprintf(" (branch: `%s`", memory.BranchName))
+ if memory.TargetRepo != "" {
+ yaml.WriteString(fmt.Sprintf(" in `%s`", memory.TargetRepo))
+ }
+ yaml.WriteString(")\n")
+ }
+ yaml.WriteString(" \n")
+ yaml.WriteString(" - **Read/Write Access**: You can freely read from and write to any files in these folders\n")
+ yaml.WriteString(" - **Git Branch Storage**: Each memory is stored in its own git branch\n")
+ yaml.WriteString(" - **Automatic Push**: Changes are automatically committed and pushed after the workflow completes\n")
+ yaml.WriteString(" - **Merge Strategy**: In case of conflicts, your changes (current version) win\n")
+ yaml.WriteString(" - **Persistence**: Files persist across workflow runs via git branch storage\n")
+ yaml.WriteString(" \n")
+ yaml.WriteString(" Examples of what you can store:\n")
+ for _, memory := range config.Memories {
+ memoryDir := fmt.Sprintf("/tmp/gh-aw/repo-memory-%s/memory/%s", memory.ID, memory.ID)
+ yaml.WriteString(fmt.Sprintf(" - `%s/notes.md` - general notes and observations\n", memoryDir))
+ yaml.WriteString(fmt.Sprintf(" - `%s/state.json` - structured state data\n", memoryDir))
+ yaml.WriteString(fmt.Sprintf(" - `%s/history/` - organized history files\n", memoryDir))
+ }
+ yaml.WriteString(" \n")
+ yaml.WriteString(" Feel free to create, read, update, and organize files in these folders as needed for your tasks.\n")
+ }
+}
diff --git a/pkg/workflow/repo_memory_test.go b/pkg/workflow/repo_memory_test.go
new file mode 100644
index 0000000000..bc55bcb43a
--- /dev/null
+++ b/pkg/workflow/repo_memory_test.go
@@ -0,0 +1,340 @@
+package workflow
+
+import (
+ "strings"
+ "testing"
+)
+
+// TestRepoMemoryConfigDefault tests basic repo-memory configuration with boolean true
+func TestRepoMemoryConfigDefault(t *testing.T) {
+ toolsMap := map[string]any{
+ "repo-memory": true,
+ }
+
+ toolsConfig, err := ParseToolsConfig(toolsMap)
+ if err != nil {
+ t.Fatalf("Failed to parse tools config: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ config, err := compiler.extractRepoMemoryConfig(toolsConfig)
+ if err != nil {
+ t.Fatalf("Failed to extract repo-memory config: %v", err)
+ }
+
+ if config == nil {
+ t.Fatal("Expected non-nil config")
+ }
+
+ if len(config.Memories) != 1 {
+ t.Fatalf("Expected 1 memory, got %d", len(config.Memories))
+ }
+
+ memory := config.Memories[0]
+ if memory.ID != "default" {
+ t.Errorf("Expected ID 'default', got '%s'", memory.ID)
+ }
+
+ if memory.BranchName != "memory/default" {
+ t.Errorf("Expected branch name 'memory/default', got '%s'", memory.BranchName)
+ }
+
+ if memory.MaxFileSize != 10240 {
+ t.Errorf("Expected max file size 10240, got %d", memory.MaxFileSize)
+ }
+
+ if memory.MaxFileCount != 100 {
+ t.Errorf("Expected max file count 100, got %d", memory.MaxFileCount)
+ }
+
+ if !memory.CreateOrphan {
+ t.Error("Expected create-orphan to be true by default")
+ }
+}
+
+// TestRepoMemoryConfigObject tests repo-memory configuration with object notation
+func TestRepoMemoryConfigObject(t *testing.T) {
+ toolsMap := map[string]any{
+ "repo-memory": map[string]any{
+ "target-repo": "myorg/myrepo",
+ "branch-name": "memory/custom",
+ "max-file-size": 524288,
+ "description": "Custom memory store",
+ },
+ }
+
+ toolsConfig, err := ParseToolsConfig(toolsMap)
+ if err != nil {
+ t.Fatalf("Failed to parse tools config: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ config, err := compiler.extractRepoMemoryConfig(toolsConfig)
+ if err != nil {
+ t.Fatalf("Failed to extract repo-memory config: %v", err)
+ }
+
+ if config == nil {
+ t.Fatal("Expected non-nil config")
+ }
+
+ if len(config.Memories) != 1 {
+ t.Fatalf("Expected 1 memory, got %d", len(config.Memories))
+ }
+
+ memory := config.Memories[0]
+ if memory.TargetRepo != "myorg/myrepo" {
+ t.Errorf("Expected target-repo 'myorg/myrepo', got '%s'", memory.TargetRepo)
+ }
+
+ if memory.BranchName != "memory/custom" {
+ t.Errorf("Expected branch name 'memory/custom', got '%s'", memory.BranchName)
+ }
+
+ if memory.MaxFileSize != 524288 {
+ t.Errorf("Expected max file size 524288, got %d", memory.MaxFileSize)
+ }
+
+ if memory.Description != "Custom memory store" {
+ t.Errorf("Expected description 'Custom memory store', got '%s'", memory.Description)
+ }
+}
+
+// TestRepoMemoryConfigArray tests repo-memory configuration with array notation
+func TestRepoMemoryConfigArray(t *testing.T) {
+ toolsMap := map[string]any{
+ "repo-memory": []any{
+ map[string]any{
+ "id": "session",
+ "branch-name": "memory/session",
+ },
+ map[string]any{
+ "id": "logs",
+ "branch-name": "memory/logs",
+ "max-file-size": 2097152,
+ },
+ },
+ }
+
+ toolsConfig, err := ParseToolsConfig(toolsMap)
+ if err != nil {
+ t.Fatalf("Failed to parse tools config: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ config, err := compiler.extractRepoMemoryConfig(toolsConfig)
+ if err != nil {
+ t.Fatalf("Failed to extract repo-memory config: %v", err)
+ }
+
+ if config == nil {
+ t.Fatal("Expected non-nil config")
+ }
+
+ if len(config.Memories) != 2 {
+ t.Fatalf("Expected 2 memories, got %d", len(config.Memories))
+ }
+
+ // Check first memory
+ memory1 := config.Memories[0]
+ if memory1.ID != "session" {
+ t.Errorf("Expected ID 'session', got '%s'", memory1.ID)
+ }
+ if memory1.BranchName != "memory/session" {
+ t.Errorf("Expected branch name 'memory/session', got '%s'", memory1.BranchName)
+ }
+
+ // Check second memory
+ memory2 := config.Memories[1]
+ if memory2.ID != "logs" {
+ t.Errorf("Expected ID 'logs', got '%s'", memory2.ID)
+ }
+ if memory2.BranchName != "memory/logs" {
+ t.Errorf("Expected branch name 'memory/logs', got '%s'", memory2.BranchName)
+ }
+ if memory2.MaxFileSize != 2097152 {
+ t.Errorf("Expected max file size 2097152, got %d", memory2.MaxFileSize)
+ }
+}
+
+// TestRepoMemoryConfigDuplicateIDs tests that duplicate memory IDs are rejected
+func TestRepoMemoryConfigDuplicateIDs(t *testing.T) {
+ toolsMap := map[string]any{
+ "repo-memory": []any{
+ map[string]any{
+ "id": "session",
+ "branch-name": "memory/session",
+ },
+ map[string]any{
+ "id": "session",
+ "branch-name": "memory/session2",
+ },
+ },
+ }
+
+ toolsConfig, err := ParseToolsConfig(toolsMap)
+ if err != nil {
+ t.Fatalf("Failed to parse tools config: %v", err)
+ }
+
+ compiler := NewCompiler(false, "", "test")
+ _, err = compiler.extractRepoMemoryConfig(toolsConfig)
+ if err == nil {
+ t.Fatal("Expected error for duplicate memory IDs, got nil")
+ }
+
+ if !strings.Contains(err.Error(), "duplicate memory ID") {
+ t.Errorf("Expected error about duplicate memory ID, got: %v", err)
+ }
+}
+
+// TestRepoMemoryStepsGeneration tests that repo-memory steps are generated correctly
+func TestRepoMemoryStepsGeneration(t *testing.T) {
+ config := &RepoMemoryConfig{
+ Memories: []RepoMemoryEntry{
+ {
+ ID: "default",
+ BranchName: "memory/default",
+ MaxFileSize: 10240,
+ MaxFileCount: 100,
+ CreateOrphan: true,
+ },
+ },
+ }
+
+ data := &WorkflowData{
+ RepoMemoryConfig: config,
+ }
+
+ var builder strings.Builder
+ generateRepoMemorySteps(&builder, data)
+
+ output := builder.String()
+
+ // Check for clone step
+ if !strings.Contains(output, "Clone repo-memory branch (default)") {
+ t.Error("Expected clone step for repo-memory")
+ }
+
+ // Check for git commands
+ if !strings.Contains(output, "git clone") {
+ t.Error("Expected git clone command")
+ }
+
+ if !strings.Contains(output, "memory/default") {
+ t.Error("Expected memory/default branch reference")
+ }
+
+ // Check for orphan branch creation
+ if !strings.Contains(output, "git checkout --orphan") {
+ t.Error("Expected orphan branch creation")
+ }
+
+ // Check for memory directory creation
+ if !strings.Contains(output, "/tmp/gh-aw/repo-memory-default/memory/default") {
+ t.Error("Expected memory directory path")
+ }
+}
+
+// TestRepoMemoryPushStepsGeneration tests that push steps are generated correctly
+func TestRepoMemoryPushStepsGeneration(t *testing.T) {
+ config := &RepoMemoryConfig{
+ Memories: []RepoMemoryEntry{
+ {
+ ID: "default",
+ BranchName: "memory/default",
+ MaxFileSize: 10240,
+ MaxFileCount: 100,
+ },
+ },
+ }
+
+ data := &WorkflowData{
+ RepoMemoryConfig: config,
+ }
+
+ var builder strings.Builder
+ generateRepoMemoryPushSteps(&builder, data)
+
+ output := builder.String()
+
+ // Check for push step
+ if !strings.Contains(output, "Push repo-memory changes (default)") {
+ t.Error("Expected push step for repo-memory")
+ }
+
+ // Check for if: always()
+ if !strings.Contains(output, "if: always()") {
+ t.Error("Expected always() condition")
+ }
+
+ // Check for git commit
+ if !strings.Contains(output, "git commit") {
+ t.Error("Expected git commit command")
+ }
+
+ // Check for git push
+ if !strings.Contains(output, "git push") {
+ t.Error("Expected git push command")
+ }
+
+ // Check for merge strategy
+ if !strings.Contains(output, "-X ours") {
+ t.Error("Expected ours merge strategy")
+ }
+
+ // Check for validation
+ if !strings.Contains(output, "Check file sizes") {
+ t.Error("Expected file size validation")
+ }
+
+ if !strings.Contains(output, "Check file count") {
+ t.Error("Expected file count validation")
+ }
+}
+
+// TestRepoMemoryPromptGeneration tests that prompt section is generated correctly
+func TestRepoMemoryPromptGeneration(t *testing.T) {
+ config := &RepoMemoryConfig{
+ Memories: []RepoMemoryEntry{
+ {
+ ID: "default",
+ BranchName: "memory/default",
+ Description: "Persistent memory for agent state",
+ },
+ },
+ }
+
+ var builder strings.Builder
+ generateRepoMemoryPromptSection(&builder, config)
+
+ output := builder.String()
+
+ // Check for prompt header
+ if !strings.Contains(output, "## Repo Memory Available") {
+ t.Error("Expected repo memory header")
+ }
+
+ // Check for description
+ if !strings.Contains(output, "Persistent memory for agent state") {
+ t.Error("Expected custom description")
+ }
+
+ // Check for key information
+ if !strings.Contains(output, "Read/Write Access") {
+ t.Error("Expected read/write access information")
+ }
+
+ if !strings.Contains(output, "Git Branch Storage") {
+ t.Error("Expected git branch storage information")
+ }
+
+ if !strings.Contains(output, "Automatic Push") {
+ t.Error("Expected automatic push information")
+ }
+
+ // Check for examples
+ if !strings.Contains(output, "notes.md") {
+ t.Error("Expected example file")
+ }
+}
diff --git a/pkg/workflow/tools_types.go b/pkg/workflow/tools_types.go
index ead05b7b55..90b194015b 100644
--- a/pkg/workflow/tools_types.go
+++ b/pkg/workflow/tools_types.go
@@ -75,6 +75,7 @@ type ToolsConfig struct {
Serena *SerenaToolConfig `yaml:"serena,omitempty"`
AgenticWorkflows *AgenticWorkflowsToolConfig `yaml:"agentic-workflows,omitempty"`
CacheMemory *CacheMemoryToolConfig `yaml:"cache-memory,omitempty"`
+ RepoMemory *RepoMemoryToolConfig `yaml:"repo-memory,omitempty"`
SafetyPrompt *bool `yaml:"safety-prompt,omitempty"`
Timeout *int `yaml:"timeout,omitempty"`
StartupTimeout *int `yaml:"startup-timeout,omitempty"`
@@ -146,6 +147,9 @@ func (t *ToolsConfig) ToMap() map[string]any {
if t.CacheMemory != nil {
result["cache-memory"] = t.CacheMemory.Raw
}
+ if t.RepoMemory != nil {
+ result["repo-memory"] = t.RepoMemory.Raw
+ }
if t.SafetyPrompt != nil {
result["safety-prompt"] = *t.SafetyPrompt
}
@@ -294,6 +298,9 @@ func NewTools(toolsMap map[string]any) *Tools {
if val, exists := toolsMap["cache-memory"]; exists {
tools.CacheMemory = parseCacheMemoryTool(val)
}
+ if val, exists := toolsMap["repo-memory"]; exists {
+ tools.RepoMemory = parseRepoMemoryTool(val)
+ }
if val, exists := toolsMap["safety-prompt"]; exists {
tools.SafetyPrompt = parseSafetyPromptTool(val)
}
@@ -315,6 +322,7 @@ func NewTools(toolsMap map[string]any) *Tools {
"serena": true,
"agentic-workflows": true,
"cache-memory": true,
+ "repo-memory": true,
"safety-prompt": true,
"timeout": true,
"startup-timeout": true,
@@ -586,6 +594,12 @@ func parseCacheMemoryTool(val any) *CacheMemoryToolConfig {
return &CacheMemoryToolConfig{Raw: val}
}
+// parseRepoMemoryTool converts raw repo-memory tool configuration
+func parseRepoMemoryTool(val any) *RepoMemoryToolConfig {
+ // repo-memory can be boolean, object, or array - store raw value
+ return &RepoMemoryToolConfig{Raw: val}
+}
+
// parseMCPGatewayTool converts raw mcp-gateway tool configuration
func parseMCPGatewayTool(val any) *MCPGatewayConfig {
if val == nil {
@@ -704,6 +718,8 @@ func (t *Tools) HasTool(name string) bool {
return t.AgenticWorkflows != nil
case "cache-memory":
return t.CacheMemory != nil
+ case "repo-memory":
+ return t.RepoMemory != nil
case "safety-prompt":
return t.SafetyPrompt != nil
case "timeout":
@@ -751,6 +767,9 @@ func (t *Tools) GetToolNames() []string {
if t.CacheMemory != nil {
names = append(names, "cache-memory")
}
+ if t.RepoMemory != nil {
+ names = append(names, "repo-memory")
+ }
if t.SafetyPrompt != nil {
names = append(names, "safety-prompt")
}