diff --git a/.changeset/patch-add-edit-wiki-safe-output.md b/.changeset/patch-add-edit-wiki-safe-output.md
new file mode 100644
index 00000000000..c64f23829bc
--- /dev/null
+++ b/.changeset/patch-add-edit-wiki-safe-output.md
@@ -0,0 +1,5 @@
+---
+"gh-aw": patch
+---
+
+Added the `edit-wiki` safe-output for pushing committed agent changes to repository wikis.
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index 60e2c19224e..2f555e9a38c 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"cc975ed0d58d799d184035327af359c4a3225a02a95981b5d20c04fae0f857bf","agent_id":"pi","agent_model":"copilot/claude-sonnet-4-20250514"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"d658597134da0d7e2e082417a6c4801759d1d3083432b2b4f75a31fe7982dc10","agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["COPILOT_GITHUB_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/cli-proxy:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.35"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.3"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
@@ -126,18 +126,18 @@ jobs:
- name: Generate agentic run info
id: generate_aw_info
env:
- GH_AW_INFO_ENGINE_ID: "pi"
- GH_AW_INFO_ENGINE_NAME: "Pi"
- GH_AW_INFO_MODEL: "copilot/claude-sonnet-4-20250514"
- GH_AW_INFO_VERSION: "0.72.1"
- GH_AW_INFO_AGENT_VERSION: "0.72.1"
+ GH_AW_INFO_ENGINE_ID: "copilot"
+ GH_AW_INFO_ENGINE_NAME: "GitHub Copilot CLI"
+ GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || 'claude-sonnet-4.6' }}
+ GH_AW_INFO_VERSION: "1.0.40"
+ GH_AW_INFO_AGENT_VERSION: "1.0.40"
GH_AW_INFO_WORKFLOW_NAME: "Dev"
- GH_AW_INFO_EXPERIMENTAL: "true"
+ GH_AW_INFO_EXPERIMENTAL: "false"
GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true"
GH_AW_INFO_STAGED: "false"
GH_AW_INFO_ALLOWED_DOMAINS: '["defaults"]'
- GH_AW_INFO_FIREWALL_ENABLED: "false"
- GH_AW_INFO_AWF_VERSION: ""
+ GH_AW_INFO_FIREWALL_ENABLED: "true"
+ GH_AW_INFO_AWF_VERSION: "v0.25.35"
GH_AW_INFO_AWMG_VERSION: ""
GH_AW_INFO_FIREWALL_TYPE: "squid"
GH_AW_COMPILED_STRICT: "false"
@@ -163,7 +163,7 @@ jobs:
await main();
- name: Validate COPILOT_GITHUB_TOKEN secret
id: validate-secret
- run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh" COPILOT_GITHUB_TOKEN Pi https://github.github.com/gh-aw/reference/engines/#pi
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh" COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default
env:
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- name: Checkout .github and .agents folders
@@ -239,20 +239,20 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_820d0bab622491f3_EOF'
+ cat << 'GH_AW_PROMPT_c21ca251edb62f3c_EOF'
- GH_AW_PROMPT_820d0bab622491f3_EOF
+ GH_AW_PROMPT_c21ca251edb62f3c_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_820d0bab622491f3_EOF'
+ cat << 'GH_AW_PROMPT_c21ca251edb62f3c_EOF'
Tools: create_issue, missing_tool, missing_data, noop
- GH_AW_PROMPT_820d0bab622491f3_EOF
+ GH_AW_PROMPT_c21ca251edb62f3c_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
- cat << 'GH_AW_PROMPT_820d0bab622491f3_EOF'
+ cat << 'GH_AW_PROMPT_c21ca251edb62f3c_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -281,19 +281,19 @@ jobs:
{{/if}}
- GH_AW_PROMPT_820d0bab622491f3_EOF
+ GH_AW_PROMPT_c21ca251edb62f3c_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/cli_proxy_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_820d0bab622491f3_EOF'
+ cat << 'GH_AW_PROMPT_c21ca251edb62f3c_EOF'
{{#runtime-import .github/workflows/shared/noop-reminder.md}}
{{#runtime-import .github/workflows/dev.md}}
- GH_AW_PROMPT_820d0bab622491f3_EOF
+ GH_AW_PROMPT_c21ca251edb62f3c_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
- GH_AW_ENGINE_ID: "pi"
+ GH_AW_ENGINE_ID: "copilot"
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -376,10 +376,14 @@ jobs:
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_WORKFLOW_ID_SANITIZED: dev
outputs:
+ agentic_engine_timeout: ${{ steps.detect-copilot-errors.outputs.agentic_engine_timeout || 'false' }}
checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }}
effective_tokens: ${{ steps.parse-mcp-gateway.outputs.effective_tokens }}
has_patch: ${{ steps.collect_output.outputs.has_patch }}
+ inference_access_error: ${{ steps.detect-copilot-errors.outputs.inference_access_error || 'false' }}
+ mcp_policy_error: ${{ steps.detect-copilot-errors.outputs.mcp_policy_error || 'false' }}
model: ${{ needs.activation.outputs.model }}
+ model_not_supported_error: ${{ steps.detect-copilot-errors.outputs.model_not_supported_error || 'false' }}
output: ${{ steps.collect_output.outputs.output }}
output_types: ${{ steps.collect_output.outputs.output_types }}
setup-trace-id: ${{ steps.setup.outputs.trace-id }}
@@ -446,15 +450,12 @@ jobs:
setupGlobals(core, github, context, exec, io, getOctokit);
const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs');
await main();
- - name: Setup Node.js
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
- with:
- node-version: '24'
- package-manager-cache: false
+ - name: Install GitHub Copilot CLI
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh" 1.0.40
+ env:
+ GH_HOST: github.com
- name: Install AWF binary
run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.35
- - name: Install Pi CLI
- run: npm install --ignore-scripts -g @mariozechner/pi-coding-agent@0.72.1
- name: Determine automatic lockdown mode for GitHub MCP Server
id: determine-automatic-lockdown
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
@@ -483,9 +484,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_96fe11ece7634061_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_7b7dd3869d383420_EOF'
{"create_issue":{"expires":168,"max":1,"title_prefix":"[Daily Report] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_96fe11ece7634061_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_7b7dd3869d383420_EOF
- name: Generate Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -674,7 +675,7 @@ jobs:
export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288"
export DEBUG="*"
- export GH_AW_ENGINE="pi"
+ export GH_AW_ENGINE="copilot"
export GH_AW_MCP_CLI_SERVERS='["safeoutputs"]'
echo 'GH_AW_MCP_CLI_SERVERS=["safeoutputs"]' >> "$GITHUB_ENV"
MCP_GATEWAY_UID=$(id -u 2>/dev/null || echo '0')
@@ -682,6 +683,34 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.3'
+ mkdir -p /home/runner/.copilot
+ GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
+ cat << GH_AW_MCP_CONFIG_5a4f9817a0b4b4d0_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ {
+ "mcpServers": {
+ "safeoutputs": {
+ "type": "http",
+ "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT",
+ "headers": {
+ "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}"
+ },
+ "guard-policies": {
+ "write-sink": {
+ "accept": [
+ "*"
+ ]
+ }
+ }
+ }
+ },
+ "gateway": {
+ "port": $MCP_GATEWAY_PORT,
+ "domain": "${MCP_GATEWAY_DOMAIN}",
+ "apiKey": "${MCP_GATEWAY_API_KEY}",
+ "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
+ }
+ }
+ GH_AW_MCP_CONFIG_5a4f9817a0b4b4d0_EOF
- name: Mount MCP servers as CLIs
id: mount-mcp-clis
continue-on-error: true
@@ -711,35 +740,54 @@ jobs:
CLI_PROXY_IMAGE: 'ghcr.io/github/gh-aw-mcpg:v0.3.3'
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/start_cli_proxy.sh"
- - name: Execute Pi CLI
+ - name: Execute GitHub Copilot CLI
id: agentic_execution
+ # Copilot CLI tool arguments (sorted):
+ timeout-minutes: 30
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
+ GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true)
+ export GH_AW_NODE_BIN
(umask 177 && touch /tmp/gh-aw/agent-stdio.log)
- printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.35/awf-config.schema.json","network":{"allowDomains":["api.githubcopilot.com","api.pi.ai","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","github.com","host.docker.internal","json-schema.org","json.schemastore.org","keyserver.ubuntu.com","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","ppa.launchpad.net","raw.githubusercontent.com","registry.npmjs.org","s.symcb.com","s.symcd.com","security.ubuntu.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com"]},"apiProxy":{"enabled":true},"container":{"imageTag":"0.25.35"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.35/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","github.com","host.docker.internal","json-schema.org","json.schemastore.org","keyserver.ubuntu.com","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","ppa.launchpad.net","raw.githubusercontent.com","registry.npmjs.org","s.symcb.com","s.symcd.com","security.ubuntu.com","telemetry.enterprise.githubcopilot.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com"]},"apiProxy":{"enabled":true},"container":{"imageTag":"0.25.35"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
# shellcheck disable=SC1003
sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GH_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull --difc-proxy-host host.docker.internal:18443 --difc-proxy-ca-cert /tmp/gh-aw/difc-proxy-tls/ca.crt \
- -- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && mkdir -p /tmp/gh-aw/pi-agent-dir && echo eyJwcm92aWRlcnMiOnsiYXctZ2F0ZXdheSI6eyJhcGkiOiJvcGVuYWktY29tcGxldGlvbnMiLCJhcGlLZXkiOiJDT1BJTE9UX0dJVEhVQl9UT0tFTiIsImJhc2VVcmwiOiJodHRwOi8vaG9zdC5kb2NrZXIuaW50ZXJuYWw6MTAwMDIiLCJtb2RlbHMiOlt7ImlkIjoiY2xhdWRlLXNvbm5ldC00LTIwMjUwNTE0In1dfX19 | base64 -d > /tmp/gh-aw/pi-agent-dir/models.json && cat /tmp/gh-aw/aw-prompts/prompt.txt | pi --print --mode json --no-session --model aw-gateway/claude-sonnet-4-20250514 --extension "${RUNNER_TEMP}/gh-aw/actions/pi_provider.cjs" --extension "${RUNNER_TEMP}/gh-aw/actions/pi_steering_extension.cjs" 2>&1 | tee /tmp/gh-aw/pi-streaming.jsonl' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
+ -- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || echo node)"; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
+ COPILOT_AGENT_RUNNER_TYPE: STANDALONE
+ COPILOT_API_KEY: dummy-byok-key-for-offline-mode
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ COPILOT_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || 'claude-sonnet-4.6' }}
+ GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_PHASE: agent
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
GH_AW_VERSION: dev
GH_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN || github.token }}
+ GITHUB_API_URL: ${{ github.api_url }}
GITHUB_AW: true
+ GITHUB_COPILOT_INTEGRATION_ID: agentic-workflows
+ GITHUB_HEAD_REF: ${{ github.head_ref }}
+ GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ GITHUB_REF_NAME: ${{ github.ref_name }}
+ GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md
GITHUB_WORKSPACE: ${{ github.workspace }}
GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com
GIT_AUTHOR_NAME: github-actions[bot]
GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com
GIT_COMMITTER_NAME: github-actions[bot]
- PI_CODING_AGENT_DIR: /tmp/gh-aw/pi-agent-dir
+ XDG_CONFIG_HOME: /home/runner
- name: Stop CLI Proxy
if: always()
continue-on-error: true
run: bash "${RUNNER_TEMP}/gh-aw/actions/stop_cli_proxy.sh"
+ - name: Detect Copilot errors
+ id: detect-copilot-errors
+ if: always()
+ continue-on-error: true
+ run: node "${RUNNER_TEMP}/gh-aw/actions/detect_copilot_errors.cjs"
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -753,6 +801,10 @@ jobs:
SERVER_URL_STRIPPED="${SERVER_URL#https://}"
git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
echo "Git configured with standard GitHub Actions identity"
+ - name: Copy Copilot session state files to logs
+ if: always()
+ continue-on-error: true
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/copy_copilot_session_state.sh"
- name: Stop MCP Gateway
if: always()
continue-on-error: true
@@ -793,7 +845,7 @@ jobs:
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }}
- GH_AW_ALLOWED_DOMAINS: "api.githubcopilot.com,api.pi.ai,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
+ GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
with:
@@ -806,12 +858,12 @@ jobs:
if: always()
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
- GH_AW_AGENT_OUTPUT: /tmp/gh-aw/pi-streaming.jsonl
+ GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
setupGlobals(core, github, context, exec, io, getOctokit);
- const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_pi_log.cjs');
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_copilot_log.cjs');
await main();
- name: Parse MCP Gateway logs for step summary
if: always()
@@ -872,7 +924,7 @@ jobs:
name: agent
path: |
/tmp/gh-aw/aw-prompts/prompt.txt
- /tmp/gh-aw/pi-streaming.jsonl
+ /tmp/gh-aw/sandbox/agent/logs/
/tmp/gh-aw/redacted-urls.log
/tmp/gh-aw/mcp-logs/
/tmp/gh-aw/agent_usage.json
@@ -1015,9 +1067,14 @@ jobs:
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
GH_AW_WORKFLOW_ID: "dev"
GH_AW_ACTION_FAILURE_ISSUE_EXPIRES_HOURS: "12"
- GH_AW_ENGINE_ID: "pi"
+ GH_AW_ENGINE_ID: "copilot"
GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }}
GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }}
+ GH_AW_INFERENCE_ACCESS_ERROR: ${{ needs.agent.outputs.inference_access_error }}
+ GH_AW_MCP_POLICY_ERROR: ${{ needs.agent.outputs.mcp_policy_error }}
+ GH_AW_AGENTIC_ENGINE_TIMEOUT: ${{ needs.agent.outputs.agentic_engine_timeout }}
+ GH_AW_MODEL_NOT_SUPPORTED_ERROR: ${{ needs.agent.outputs.model_not_supported_error }}
+ GH_AW_ENGINE_API_HOSTS: "api.enterprise.githubcopilot.com,api.githubcopilot.com,api.business.githubcopilot.com,api.individual.githubcopilot.com"
GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }}
GH_AW_STALE_LOCK_FILE_FAILED: ${{ needs.activation.outputs.stale_lock_file_failed }}
GH_AW_GROUP_REPORTS: "false"
@@ -1166,35 +1223,49 @@ jobs:
with:
node-version: '24'
package-manager-cache: false
+ - name: Install GitHub Copilot CLI
+ run: bash "${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh" 1.0.40
+ env:
+ GH_HOST: github.com
- name: Install AWF binary
run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.35
- - name: Install Pi CLI
- run: npm install --ignore-scripts -g @mariozechner/pi-coding-agent@0.72.1
- - name: Execute Pi CLI
+ - name: Execute GitHub Copilot CLI
if: always() && steps.detection_guard.outputs.run_detection == 'true'
continue-on-error: true
id: detection_agentic_execution
+ # Copilot CLI tool arguments (sorted):
+ timeout-minutes: 20
run: |
set -o pipefail
touch /tmp/gh-aw/agent-step-summary.md
+ GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true)
+ export GH_AW_NODE_BIN
(umask 177 && touch /tmp/gh-aw/threat-detection/detection.log)
- printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.35/awf-config.schema.json","network":{"allowDomains":["api.githubcopilot.com","api.pi.ai","github.com","host.docker.internal","raw.githubusercontent.com","registry.npmjs.org"]},"apiProxy":{"enabled":true},"container":{"imageTag":"0.25.35"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
+ printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.35/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","github.com","host.docker.internal","telemetry.enterprise.githubcopilot.com"]},"apiProxy":{"enabled":true},"container":{"imageTag":"0.25.35"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json
# shellcheck disable=SC1003
sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \
- -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && mkdir -p /tmp/gh-aw/pi-agent-dir && echo eyJwcm92aWRlcnMiOnsiYXctZ2F0ZXdheSI6eyJhcGkiOiJvcGVuYWktY29tcGxldGlvbnMiLCJhcGlLZXkiOiJDT1BJTE9UX0dJVEhVQl9UT0tFTiIsImJhc2VVcmwiOiJodHRwOi8vaG9zdC5kb2NrZXIuaW50ZXJuYWw6MTAwMDIiLCJtb2RlbHMiOlt7ImlkIjoiY2xhdWRlLXNvbm5ldC00LTIwMjUwNTE0In1dfX19 | base64 -d > /tmp/gh-aw/pi-agent-dir/models.json && cat /tmp/gh-aw/aw-prompts/prompt.txt | pi --print --mode json --no-session --model aw-gateway/claude-sonnet-4-20250514 --extension "${RUNNER_TEMP}/gh-aw/actions/pi_provider.cjs" --extension "${RUNNER_TEMP}/gh-aw/actions/pi_steering_extension.cjs" 2>&1 | tee /tmp/gh-aw/pi-streaming.jsonl' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
+ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || echo node)"; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log
env:
+ COPILOT_AGENT_RUNNER_TYPE: STANDALONE
+ COPILOT_API_KEY: dummy-byok-key-for-offline-mode
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ COPILOT_MODEL: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || 'claude-sonnet-4.6' }}
GH_AW_PHASE: detection
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_VERSION: dev
+ GITHUB_API_URL: ${{ github.api_url }}
GITHUB_AW: true
+ GITHUB_COPILOT_INTEGRATION_ID: agentic-workflows
+ GITHUB_HEAD_REF: ${{ github.head_ref }}
+ GITHUB_REF_NAME: ${{ github.ref_name }}
+ GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md
GITHUB_WORKSPACE: ${{ github.workspace }}
GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com
GIT_AUTHOR_NAME: github-actions[bot]
GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com
GIT_COMMITTER_NAME: github-actions[bot]
- PI_CODING_AGENT_DIR: /tmp/gh-aw/pi-agent-dir
+ XDG_CONFIG_HOME: /home/runner
- name: Upload threat detection log
if: always() && steps.detection_guard.outputs.run_detection == 'true'
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
@@ -1291,8 +1362,9 @@ jobs:
GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }}
GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }}
GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }}
- GH_AW_ENGINE_ID: "pi"
- GH_AW_ENGINE_MODEL: "copilot/claude-sonnet-4-20250514"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
+ GH_AW_ENGINE_VERSION: "1.0.40"
GH_AW_WORKFLOW_ID: "dev"
GH_AW_WORKFLOW_NAME: "Dev"
outputs:
@@ -1350,7 +1422,7 @@ jobs:
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
- GH_AW_ALLOWED_DOMAINS: "api.githubcopilot.com,api.pi.ai,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
+ GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"expires\":168,\"max\":1,\"title_prefix\":\"[Daily Report] \"},\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"report_incomplete\":{}}"
diff --git a/.github/workflows/dev.md b/.github/workflows/dev.md
index 1d080fbaa09..61b37dce1e1 100644
--- a/.github/workflows/dev.md
+++ b/.github/workflows/dev.md
@@ -8,9 +8,7 @@ name: Dev
description: Daily status report for gh-aw project
timeout-minutes: 30
strict: false
-engine:
- id: pi
- model: copilot/claude-sonnet-4-20250514
+engine: copilot
permissions:
contents: read
diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml
index 20fe3214318..f6853857359 100644
--- a/.github/workflows/smoke-codex.lock.yml
+++ b/.github/workflows/smoke-codex.lock.yml
@@ -1,5 +1,5 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"5654399396a1259c6dd48c4a40a088432604138bc7c3df639ce30480fcb35914","agent_id":"codex"}
-# gh-aw-manifest: {"version":1,"secrets":["CODEX_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","OPENAI_API_KEY"],"actions":[{"repo":"actions-ecosystem/action-add-labels","sha":"c96b68fec76a0987cd93957189e9abd0b9a72ff1","version":"v1.1.3"},{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.35"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.3"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"8617bc4f8a38bf55b6cce2fba58f2dafc8eedf642fbcd6f67517eee4380c1cc4","agent_id":"codex"}
+# gh-aw-manifest: {"version":1,"secrets":["CODEX_API_KEY","GH_AW_CI_TRIGGER_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","OPENAI_API_KEY"],"actions":[{"repo":"actions-ecosystem/action-add-labels","sha":"c96b68fec76a0987cd93957189e9abd0b9a72ff1","version":"v1.1.3"},{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.35"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.35"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.3"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
# | |_| | __ _ ___ _ __ | |_ _ ___
@@ -36,6 +36,7 @@
#
# Secrets used:
# - CODEX_API_KEY
+# - GH_AW_CI_TRIGGER_TOKEN
# - GH_AW_GITHUB_MCP_SERVER_TOKEN
# - GH_AW_GITHUB_TOKEN
# - GH_AW_OTEL_ENDPOINT
@@ -257,25 +258,25 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_7e886d939ae96a38_EOF'
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_7e886d939ae96a38_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/playwright_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_7e886d939ae96a38_EOF'
Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), missing_tool, missing_data, noop, add_smoked_label
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_7e886d939ae96a38_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_comment_memory.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_7e886d939ae96a38_EOF'
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_7e886d939ae96a38_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_7e886d939ae96a38_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -307,9 +308,9 @@ jobs:
- **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches).
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_7e886d939ae96a38_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF'
+ cat << 'GH_AW_PROMPT_7e886d939ae96a38_EOF'
## Serena Code Analysis
@@ -350,7 +351,7 @@ jobs:
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/shared/noop-reminder.md}}
{{#runtime-import .github/workflows/smoke-codex.md}}
- GH_AW_PROMPT_6bd5d9ef0cfb17a5_EOF
+ GH_AW_PROMPT_7e886d939ae96a38_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
@@ -597,9 +598,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_990c1eb48269b369_EOF'
- {"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-codex"]},"add_smoked_label":true,"comment_memory":{"max":1,"memory_id":"default"},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-codex","expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"hide_comment":{"max":5},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"report_incomplete":{},"unassign_from_user":{"allowed":["githubactionagent"],"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_990c1eb48269b369_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_a3651798fc7a531f_EOF'
+ {"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-codex"]},"add_smoked_label":true,"comment_memory":{"max":1,"memory_id":"default"},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-codex","expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"edit_wiki":{"if_no_changes":"warn","max_patch_size":1024},"hide_comment":{"max":5},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"report_incomplete":{},"unassign_from_user":{"allowed":["githubactionagent"],"max":1}}
+ GH_AW_SAFE_OUTPUTS_CONFIG_a3651798fc7a531f_EOF
- name: Generate Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -927,7 +928,7 @@ jobs:
- name: Write MCP Scripts Config
run: |
mkdir -p "${RUNNER_TEMP}/gh-aw/mcp-scripts/logs"
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_ea62428d6b567c2d_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_6836ba84d60d1463_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -957,8 +958,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_ea62428d6b567c2d_EOF
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_9e9be9620ff39e25_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_6836ba84d60d1463_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_62c64cf425021ce4_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -972,12 +973,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_9e9be9620ff39e25_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_62c64cf425021ce4_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs"
- name: Write MCP Scripts Tool Files
run: |
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_e06f151e3fec9952_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_c49b15be455bb31a_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -989,7 +990,7 @@ jobs:
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_e06f151e3fec9952_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_c49b15be455bb31a_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh"
- name: Generate MCP Scripts Server Config
@@ -1062,7 +1063,7 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e CODEX_HOME -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.3'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_732c588478440710_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_dc0f5c421375b6db_EOF
[history]
persistence = "none"
@@ -1114,11 +1115,11 @@ jobs:
[mcp_servers.serena."guard-policies".write-sink]
accept = ["*"]
- GH_AW_MCP_CONFIG_732c588478440710_EOF
+ GH_AW_MCP_CONFIG_dc0f5c421375b6db_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_732c588478440710_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_dc0f5c421375b6db_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
@@ -1204,11 +1205,11 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_732c588478440710_EOF
+ GH_AW_MCP_CONFIG_dc0f5c421375b6db_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_db31288df1eb158b_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_a27ff31dd57422aa_EOF
model_provider = "openai-proxy"
@@ -1220,7 +1221,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "GH_AW_ASSETS_ALLOWED_EXTS", "GH_AW_ASSETS_BRANCH", "GH_AW_ASSETS_MAX_SIZE_KB", "GH_AW_SAFE_OUTPUTS", "GITHUB_PERSONAL_ACCESS_TOKEN", "GITHUB_REPOSITORY", "GITHUB_SERVER_URL", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_db31288df1eb158b_EOF
+ GH_AW_CODEX_SHELL_POLICY_a27ff31dd57422aa_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
@@ -1478,7 +1479,7 @@ jobs:
needs.activation.outputs.stale_lock_file_failed == 'true')
runs-on: ubuntu-slim
permissions:
- contents: read
+ contents: write
discussions: write
issues: write
pull-requests: write
@@ -1809,18 +1810,18 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.3'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_2170c55e41a705e7_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_9a805a75a952a510_EOF
[history]
persistence = "none"
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_MCP_CONFIG_2170c55e41a705e7_EOF
+ GH_AW_MCP_CONFIG_9a805a75a952a510_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_afb5da94a83dd438_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_e19470f042069fdb_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
},
@@ -1831,11 +1832,11 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_afb5da94a83dd438_EOF
+ GH_AW_MCP_CONFIG_e19470f042069fdb_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_fc7fd06995230739_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_355e0852a9942342_EOF
model_provider = "openai-proxy"
[model_providers.openai-proxy]
name = "OpenAI AWF proxy"
@@ -1845,7 +1846,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_fc7fd06995230739_EOF
+ GH_AW_CODEX_SHELL_POLICY_355e0852a9942342_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
@@ -1972,7 +1973,7 @@ jobs:
if: (!cancelled()) && needs.agent.result != 'skipped' && needs.detection.result == 'success'
runs-on: ubuntu-slim
permissions:
- contents: read
+ contents: write
discussions: write
issues: write
pull-requests: write
@@ -2032,6 +2033,12 @@ jobs:
mkdir -p /tmp/gh-aw/
find "/tmp/gh-aw/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT"
+ - name: Download patch artifact
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: agent
+ path: /tmp/gh-aw/
- name: Configure GH_HOST for enterprise compatibility
id: ghes-host-config
shell: bash
@@ -2050,7 +2057,8 @@ jobs:
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
GH_AW_SAFE_OUTPUT_ACTIONS: "{\"add_smoked_label\":\"add_smoked_label\"}"
- GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"add_labels\":{\"allowed\":[\"smoke-codex\"]},\"comment_memory\":{\"max\":1,\"memory_id\":\"default\"},\"create_issue\":{\"close_older_issues\":true,\"close_older_key\":\"smoke-codex\",\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_report_incomplete_issue\":{},\"hide_comment\":{\"max\":5},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"remove_labels\":{\"allowed\":[\"smoke\"]},\"report_incomplete\":{},\"unassign_from_user\":{\"allowed\":[\"githubactionagent\"],\"max\":1}}"
+ GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"add_labels\":{\"allowed\":[\"smoke-codex\"]},\"comment_memory\":{\"max\":1,\"memory_id\":\"default\"},\"create_issue\":{\"close_older_issues\":true,\"close_older_key\":\"smoke-codex\",\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_report_incomplete_issue\":{},\"edit_wiki\":{\"if_no_changes\":\"warn\",\"max_patch_size\":1024},\"hide_comment\":{\"max\":5},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"remove_labels\":{\"allowed\":[\"smoke\"]},\"report_incomplete\":{},\"unassign_from_user\":{\"allowed\":[\"githubactionagent\"],\"max\":1}}"
+ GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }}
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
diff --git a/.github/workflows/smoke-codex.md b/.github/workflows/smoke-codex.md
index 57da2faf942..2f5c0ebda60 100644
--- a/.github/workflows/smoke-codex.md
+++ b/.github/workflows/smoke-codex.md
@@ -61,6 +61,7 @@ safe-outputs:
allowed: [githubactionagent]
max: 1
hide-comment:
+ edit-wiki:
messages:
footer: "> 🔮 *The oracle has spoken through [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}"
run-started: "🔮 The ancient spirits stir... [{workflow_name}]({run_url}) awakens to divine this {event_type}..."
@@ -102,13 +103,20 @@ checkout:
- Check if `/tmp/gh-aw/cache-memory/smoke-codex-history.json` exists; if it does, read it and note the previous run's results (run ID, timestamp, status)
- Write current run results to `/tmp/gh-aw/cache-memory/smoke-codex-history.json` with content: `{"run_id": "${{ github.run_id }}", "timestamp": "", "status": "PASS or FAIL", "tests_passed": , "tests_failed": }` (create the parent directory if it doesn't exist)
- Use bash to verify the file was written successfully (use `cat` to read it back)
+10. **Edit Wiki Testing**: Create a new wiki page with a haiku and link it from the main wiki page using the `edit_wiki` safe-output tool:
+ - Clone the wiki repo to `/tmp/gh-aw/wiki` using: `git clone https://github.com/${{ github.repository }}.wiki.git /tmp/gh-aw/wiki` (if the wiki doesn't exist yet, skip this test and mark it ⚠️ Skipped)
+ - Configure git identity in the wiki clone: `git -C /tmp/gh-aw/wiki config user.email "github-actions[bot]@users.noreply.github.com"` and `git -C /tmp/gh-aw/wiki config user.name "github-actions[bot]"`
+ - Create a new wiki page `/tmp/gh-aw/wiki/Smoke-Test-Haiku.md` with content: a title "# Smoke Test Haiku", a blank line, and an original 3-line haiku about automated testing or continuous integration (write the haiku content in the file, NOT in the commit message)
+ - Update `/tmp/gh-aw/wiki/Home.md` (if it exists) to append a link to the new page: `- [Smoke Test Haiku](Smoke-Test-Haiku)` (if Home.md does not exist, create it with that link)
+ - Stage and commit the wiki changes: `git -C /tmp/gh-aw/wiki add . && git -C /tmp/gh-aw/wiki commit -m "Add smoke test haiku page [run ${{ github.run_id }}]"`
+ - Call the `edit_wiki` safe-output tool with `{ "message": "Add smoke test haiku page", "wiki_dir": "/tmp/gh-aw/wiki" }`
## Output
**ALWAYS create an issue** with a summary of the smoke test run:
- Title: "Smoke Test: Codex - ${{ github.run_id }}"
- Body should include:
- - Test results (✅ or ❌ for each test, including test #9 Cache Memory)
+ - Test results (✅ or ❌ for each test, including test #9 Cache Memory and test #10 Edit Wiki)
- Overall status: PASS or FAIL
- Run URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
- Timestamp
diff --git a/actions/setup/js/edit_wiki.cjs b/actions/setup/js/edit_wiki.cjs
new file mode 100644
index 00000000000..1158da3ffbf
--- /dev/null
+++ b/actions/setup/js/edit_wiki.cjs
@@ -0,0 +1,372 @@
+// @ts-check
+///
+
+/** @type {typeof import("fs")} */
+const fs = require("fs");
+const os = require("os");
+const nodePath = require("path");
+const { generateStagedPreview } = require("./staged_preview.cjs");
+const { isStagedMode } = require("./safe_output_helpers.cjs");
+const { getErrorMessage } = require("./error_helpers.cjs");
+const { resolveTargetRepoConfig, resolveAndValidateRepo } = require("./repo_helpers.cjs");
+const { getGitAuthEnv } = require("./git_helpers.cjs");
+const { createAuthenticatedGitHubClient } = require("./handler_auth.cjs");
+const { pushSignedCommits } = require("./push_signed_commits.cjs");
+
+/**
+ * @typedef {import('./types/handler-factory').HandlerFactoryFunction} HandlerFactoryFunction
+ */
+
+/** @type {string} Safe output type handled by this module */
+const HANDLER_TYPE = "edit_wiki";
+
+/**
+ * Main handler factory for edit_wiki
+ * Returns a message handler function that processes individual edit_wiki messages
+ * @type {HandlerFactoryFunction}
+ */
+async function main(config = {}) {
+ const ifNoChanges = config.if_no_changes || "warn";
+ const commitTitleSuffix = config.commit_title_suffix || "";
+ const maxSizeKb = config.max_patch_size ? parseInt(String(config.max_patch_size), 10) : 1024;
+ const maxCount = config.max || 0; // 0 means no limit
+
+ // Cross-repo support: resolve target repository from config
+ const { defaultTargetRepo, allowedRepos } = resolveTargetRepoConfig(config);
+
+ // Build git auth env once for all network operations in this handler.
+ const gitAuthEnv = getGitAuthEnv(config["github-token"]);
+
+ // Create authenticated GitHub client (same pattern as push_to_pull_request_branch).
+ // Used by pushSignedCommits for the signed-commit GraphQL path and fallback git push.
+ const githubClient = await createAuthenticatedGitHubClient(config);
+
+ // Check if we're in staged mode
+ const isStaged = isStagedMode(config);
+
+ core.info(`If no changes: ${ifNoChanges}`);
+ if (commitTitleSuffix) {
+ core.info(`Commit title suffix: ${commitTitleSuffix}`);
+ }
+ core.info(`Max patch size: ${maxSizeKb} KB`);
+ core.info(`Max count: ${maxCount || "unlimited"}`);
+ core.info(`Default target repo: ${defaultTargetRepo}`);
+ if (allowedRepos.size > 0) {
+ core.info(`Allowed repos: ${[...allowedRepos].join(", ")}`);
+ }
+
+ // Track how many items we've processed for max limit
+ let processedCount = 0;
+
+ /**
+ * Message handler function - processes individual edit_wiki messages
+ * @param {any} message - The edit_wiki message to process
+ * @param {import('./types/handler-factory').ResolvedTemporaryIds} resolvedTemporaryIds - Map of temporary IDs to resolved IDs
+ * @returns {Promise}
+ */
+ return async function handleEditWiki(message, resolvedTemporaryIds) {
+ // Check max count
+ if (maxCount > 0 && processedCount >= maxCount) {
+ core.info(`Skipping message - max count (${maxCount}) reached`);
+ return { success: false, error: `Max count (${maxCount}) reached`, skipped: true };
+ }
+
+ processedCount++;
+
+ // Determine the patch file path from the message
+ const patchFilePath = message.patch_path;
+ core.info(`Patch file path: ${patchFilePath || "(not set)"}`);
+
+ // Check if patch file exists and has valid content
+ if (!patchFilePath || !fs.existsSync(patchFilePath)) {
+ const msg = "No patch file found - cannot push wiki changes without a patch";
+
+ switch (ifNoChanges) {
+ case "error":
+ return { success: false, error: msg };
+ case "ignore":
+ return { success: false, error: msg, skipped: true };
+ case "warn":
+ default:
+ core.info(msg);
+ return { success: false, error: msg, skipped: true };
+ }
+ }
+
+ const patchContent = fs.readFileSync(patchFilePath, "utf8");
+
+ // Check for actual error conditions
+ if (patchContent.includes("Failed to generate patch")) {
+ const msg = "Patch file contains error message - cannot push wiki changes";
+ core.error("Patch file generation failed");
+ core.error(`Patch file location: ${patchFilePath}`);
+ return { success: false, error: msg };
+ }
+
+ const isEmpty = !patchContent || !patchContent.trim();
+
+ // Validate patch size
+ if (!isEmpty) {
+ const patchSizeBytes = Buffer.byteLength(patchContent, "utf8");
+ const patchSizeKb = Math.ceil(patchSizeBytes / 1024);
+
+ const diffSizeBytesRaw = message.diff_size;
+ const haveDiffSize = typeof diffSizeBytesRaw === "number" && diffSizeBytesRaw >= 0;
+
+ let sizeForCheckBytes;
+ let sizeLabel;
+ if (haveDiffSize) {
+ sizeForCheckBytes = diffSizeBytesRaw;
+ sizeLabel = "Incremental diff size";
+ } else {
+ sizeForCheckBytes = patchSizeBytes;
+ sizeLabel = "Patch size";
+ }
+ const sizeForCheckKb = Math.ceil(sizeForCheckBytes / 1024);
+
+ core.info(`Patch file size: ${patchSizeKb} KB`);
+ core.info(`${sizeLabel}: ${sizeForCheckKb} KB (maximum allowed: ${maxSizeKb} KB)`);
+
+ if (sizeForCheckKb > maxSizeKb) {
+ const msg = `${sizeLabel} (${sizeForCheckKb} KB) exceeds maximum allowed size (${maxSizeKb} KB)`;
+ return { success: false, error: msg };
+ }
+
+ core.info("Patch size validation passed");
+ }
+
+ if (isEmpty) {
+ const msg = "Patch file is empty - no changes to apply to wiki";
+
+ switch (ifNoChanges) {
+ case "error":
+ return { success: false, error: "No wiki changes to push - failing as configured by if-no-changes: error" };
+ case "ignore":
+ return { success: false, error: msg, skipped: true };
+ case "warn":
+ default:
+ core.info(msg);
+ return { success: false, error: msg, skipped: true };
+ }
+ }
+
+ // If in staged mode, emit staged preview
+ if (isStaged) {
+ await generateStagedPreview({
+ title: "Edit Wiki",
+ description: "The following wiki changes would be pushed if staged mode was disabled:",
+ items: [{ commit_message: message.message || message.commit_message || "(no message)" }],
+ renderItem: item => {
+ let content = `**Wiki:** ${defaultTargetRepo || process.env.GITHUB_REPOSITORY || "(current repo)"}\n\n`;
+ if (item.commit_message) {
+ content += `**Commit Message:** ${item.commit_message}\n\n`;
+ }
+ if (patchFilePath && fs.existsSync(patchFilePath)) {
+ const patchStats = fs.readFileSync(patchFilePath, "utf8");
+ if (patchStats.trim()) {
+ content += `**Changes:** Patch file exists with ${patchStats.split("\n").length} lines\n\n`;
+ content += `Show patch preview
\n\n\`\`\`diff\n${patchStats.slice(0, 2000)}${patchStats.length > 2000 ? "\n... (truncated)" : ""}\n\`\`\`\n\n \n\n`;
+ } else {
+ content += `**Changes:** No changes (empty patch)\n\n`;
+ }
+ }
+ return content;
+ },
+ });
+ return { success: true, staged: true };
+ }
+
+ // Resolve and validate target repository
+ const repoResult = resolveAndValidateRepo(message, defaultTargetRepo, allowedRepos, "edit wiki");
+ if (!repoResult.success) {
+ return { success: false, error: repoResult.error };
+ }
+ const itemRepo = repoResult.repo;
+ const repoParts = repoResult.repoParts;
+
+ core.info(`Target repository: ${itemRepo}`);
+
+ // Build the wiki git URL
+ const serverUrl = (process.env.GITHUB_SERVER_URL || "https://github.com").replace(/\/$/, "");
+ const wikiGitUrl = `${serverUrl}/${repoParts.owner}/${repoParts.repo}.wiki.git`;
+ core.info(`Wiki git URL: ${wikiGitUrl.replace(/:\/\/[^@]+@/, "://***@")}`);
+
+ // Clone wiki to a temp directory
+ const wikiCloneDir = fs.mkdtempSync(nodePath.join(os.tmpdir(), "gh-aw-wiki-"));
+ core.info(`Cloning wiki to: ${wikiCloneDir}`);
+
+ try {
+ // Clone the wiki repo using auth env vars for authentication
+ const cloneResult = await exec.getExecOutput("git", ["clone", wikiGitUrl, wikiCloneDir], {
+ env: { ...process.env, ...gitAuthEnv },
+ ignoreReturnCode: true,
+ });
+
+ if (cloneResult.exitCode !== 0) {
+ const stderr = (cloneResult.stderr || "").trim();
+ // Distinguish empty wiki (no commits yet) from a real error
+ if (stderr.includes("You appear to have cloned an empty repository") || stderr.includes("warning: You appear to have cloned an empty repository")) {
+ core.info("Wiki repository is empty - will initialize with first commit");
+ // Initialize a new git repo in the clone dir
+ await exec.exec("git", ["init", wikiCloneDir]);
+ await exec.exec("git", ["remote", "add", "origin", wikiGitUrl], { cwd: wikiCloneDir, env: { ...process.env, ...gitAuthEnv } });
+ } else {
+ return {
+ success: false,
+ error: `Failed to clone wiki repository: ${stderr || `git clone exited with code ${cloneResult.exitCode}`}`,
+ };
+ }
+ }
+
+ core.info("Wiki cloned successfully");
+
+ // Configure git identity in the wiki clone
+ await exec.exec("git", ["config", "user.email", "github-actions[bot]@users.noreply.github.com"], { cwd: wikiCloneDir });
+ await exec.exec("git", ["config", "user.name", "github-actions[bot]"], { cwd: wikiCloneDir });
+ await exec.exec("git", ["config", "am.keepcr", "true"], { cwd: wikiCloneDir });
+
+ // Determine the current branch name (wiki default is typically 'master')
+ let wikiBranch = "master";
+ try {
+ const branchResult = await exec.getExecOutput("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
+ cwd: wikiCloneDir,
+ ignoreReturnCode: true,
+ });
+ if (branchResult.exitCode === 0 && branchResult.stdout.trim() && branchResult.stdout.trim() !== "HEAD") {
+ wikiBranch = branchResult.stdout.trim();
+ }
+ } catch {
+ // Use default 'master'
+ }
+ core.info(`Wiki branch: ${wikiBranch}`);
+
+ // Apply patch to wiki clone
+ let patchFileToApply = patchFilePath;
+
+ if (commitTitleSuffix) {
+ core.info(`Appending commit title suffix: "${commitTitleSuffix}"`);
+ let patchFileContent = fs.readFileSync(patchFilePath, "utf8");
+ patchFileContent = patchFileContent.replace(/^Subject: (\[PATCH[^\]]*\] )?(.*)$/gm, (match, patchPrefix, title) => `Subject: ${patchPrefix || "[PATCH] "}${title}${commitTitleSuffix}`);
+ patchFileToApply = nodePath.join(wikiCloneDir, "..", `aw-wiki-modified-${Date.now()}.patch`);
+ fs.writeFileSync(patchFileToApply, patchFileContent, "utf8");
+ core.info(`Patch modified with commit title suffix`);
+ }
+
+ // Log first 100 lines of patch for debugging
+ const finalPatchContent = fs.readFileSync(patchFileToApply, "utf8");
+ const patchLines = finalPatchContent.split("\n");
+ const previewLineCount = Math.min(100, patchLines.length);
+ core.info(`Patch preview (first ${previewLineCount} of ${patchLines.length} lines):`);
+ for (let i = 0; i < previewLineCount; i++) {
+ core.info(patchLines[i]);
+ }
+
+ // Apply the patch with git am --3way
+ // --3way handles cases where the patch base may differ from the wiki state
+ const amResult = await exec.getExecOutput("git", ["am", "--3way", patchFileToApply], {
+ cwd: wikiCloneDir,
+ ignoreReturnCode: true,
+ });
+
+ if (amResult.exitCode !== 0) {
+ const amError = (amResult.stderr || amResult.stdout || "").trim();
+ core.error(`Failed to apply patch to wiki: ${amError}`);
+
+ // Log debug info
+ try {
+ const statusResult = await exec.getExecOutput("git", ["status"], { cwd: wikiCloneDir });
+ core.info(`Git status:\n${statusResult.stdout}`);
+ } catch {
+ // Non-fatal
+ }
+
+ // Abort git am in case of partial apply
+ try {
+ await exec.exec("git", ["am", "--abort"], { cwd: wikiCloneDir });
+ } catch {
+ // Ignore abort errors
+ }
+
+ return { success: false, error: `Failed to apply patch to wiki: ${amError || "git am failed"}` };
+ }
+
+ core.info("Patch applied to wiki successfully");
+
+ // Configure an authenticated remote URL before pushing so that git push
+ // works reliably from a fresh clone. This mirrors what actions/checkout does
+ // internally (https://x-access-token:TOKEN@...) and ensures the push succeeds
+ // even when GIT_CONFIG_* environment variables are not propagated correctly to
+ // child processes in the github-script execution environment.
+ const authToken = config["github-token"] || process.env.GITHUB_TOKEN;
+ if (authToken) {
+ core.setSecret(authToken);
+ const serverUrlHost = serverUrl.replace(/^https?:\/\//, "").replace(/\/$/, "");
+ const authWikiUrl = `https://x-access-token:${authToken}@${serverUrlHost}/${repoParts.owner}/${repoParts.repo}.wiki.git`;
+ // Use silent: true to suppress any command logging that could expose the token
+ // (core.setSecret also masks the value, but defense-in-depth)
+ const setUrlResult = await exec.getExecOutput("git", ["remote", "set-url", "origin", authWikiUrl], {
+ cwd: wikiCloneDir,
+ silent: true,
+ ignoreReturnCode: true,
+ });
+ if (setUrlResult.exitCode !== 0) {
+ core.warning(`Failed to set authenticated remote URL: ${(setUrlResult.stderr || "").trim()}`);
+ }
+ }
+
+ // Push commits using pushSignedCommits (the same helper used by push_to_pull_request_branch).
+ // Wiki repos are not accessible via the GitHub GraphQL createCommitOnBranch mutation,
+ // so the GraphQL path will fail gracefully and the fallback git push will be used.
+ core.info(`Pushing wiki changes to: ${wikiBranch}`);
+ let commitSha = "";
+ try {
+ const pushedSha = await pushSignedCommits({
+ githubClient,
+ owner: repoParts.owner,
+ repo: `${repoParts.repo}.wiki`,
+ branch: wikiBranch,
+ baseRef: `origin/${wikiBranch}`,
+ cwd: wikiCloneDir,
+ gitAuthEnv,
+ });
+ if (pushedSha) {
+ commitSha = pushedSha;
+ }
+ core.info("Wiki changes pushed successfully");
+ } catch (pushError) {
+ const pushErrorMessage = getErrorMessage(pushError);
+ core.error(`Failed to push wiki changes: ${pushErrorMessage}`);
+ return { success: false, error: `Failed to push wiki changes: ${pushErrorMessage}` };
+ }
+
+ // Get the commit SHA for the activation comment update if not set by pushSignedCommits
+ if (!commitSha) {
+ try {
+ const shaResult = await exec.getExecOutput("git", ["rev-parse", "HEAD"], { cwd: wikiCloneDir });
+ commitSha = shaResult.stdout.trim();
+ } catch {
+ // Non-fatal
+ }
+ }
+
+ const wikiUrl = `${serverUrl}/${repoParts.owner}/${repoParts.repo}/wiki`;
+ core.info(`Wiki URL: ${wikiUrl}`);
+
+ return {
+ success: true,
+ url: wikiUrl,
+ sha: commitSha,
+ };
+ } finally {
+ // Clean up wiki clone directory
+ try {
+ fs.rmSync(wikiCloneDir, { recursive: true, force: true });
+ core.info("Cleaned up wiki clone directory");
+ } catch (cleanupError) {
+ core.warning(`Failed to clean up wiki clone directory: ${getErrorMessage(cleanupError)}`);
+ }
+ }
+ };
+}
+
+module.exports = { main };
diff --git a/actions/setup/js/safe_output_handler_manager.cjs b/actions/setup/js/safe_output_handler_manager.cjs
index e9fd0baae2f..737f2ac8cb1 100644
--- a/actions/setup/js/safe_output_handler_manager.cjs
+++ b/actions/setup/js/safe_output_handler_manager.cjs
@@ -51,6 +51,7 @@ const HANDLER_MAP = {
resolve_pull_request_review_thread: "./resolve_pr_review_thread.cjs",
create_pull_request: "./create_pull_request.cjs",
push_to_pull_request_branch: "./push_to_pull_request_branch.cjs",
+ edit_wiki: "./edit_wiki.cjs",
update_pull_request: "./update_pull_request.cjs",
merge_pull_request: "./merge_pull_request.cjs",
close_pull_request: "./close_pull_request.cjs",
diff --git a/actions/setup/js/safe_outputs_handlers.cjs b/actions/setup/js/safe_outputs_handlers.cjs
index 606024574e9..0f526d97f66 100644
--- a/actions/setup/js/safe_outputs_handlers.cjs
+++ b/actions/setup/js/safe_outputs_handlers.cjs
@@ -705,6 +705,116 @@ function createHandlers(server, appendSafeOutput, config = {}) {
};
};
+ /**
+ * Handler for edit_wiki tool
+ * Generates a git patch from commits made to a wiki clone directory.
+ * The agent must clone the wiki repo, commit changes, then call this tool.
+ * The patch is applied to the wiki repo by the safe_outputs job handler (edit_wiki.cjs).
+ */
+ const editWikiHandler = async args => {
+ const entry = { ...(args || {}), type: "edit_wiki" };
+ const wikiConfig = config.edit_wiki || {};
+
+ // Determine the wiki directory - from args, config, or standard default location.
+ // The agent is expected to clone the wiki to this directory and commit changes before
+ // calling this tool.
+ const wikiDir = (args && args.wiki_dir) || wikiConfig.wiki_dir || "/tmp/gh-aw/wiki";
+
+ if (!wikiDir || !fs.existsSync(wikiDir)) {
+ return {
+ content: [
+ {
+ type: "text",
+ text: JSON.stringify({
+ result: "error",
+ error: `Wiki directory '${wikiDir}' does not exist. Clone the wiki repo first (e.g. git clone https://github.com/OWNER/REPO.wiki.git /tmp/gh-aw/wiki), commit your changes, then call this tool.`,
+ }),
+ },
+ ],
+ isError: true,
+ };
+ }
+
+ // Detect the current branch in the wiki clone (typically 'master' or 'main')
+ const wikiBranch = getCurrentBranch(wikiDir);
+ if (!wikiBranch) {
+ return {
+ content: [
+ {
+ type: "text",
+ text: JSON.stringify({
+ result: "error",
+ error: `Could not detect current branch in wiki directory '${wikiDir}'. Ensure the directory is a valid git repository with committed changes.`,
+ }),
+ },
+ ],
+ isError: true,
+ };
+ }
+
+ server.debug(`Generating incremental patch for edit_wiki from dir=${wikiDir}, branch=${wikiBranch}`);
+
+ // Use incremental mode: include commits since origin/.
+ // This captures exactly the commits the agent made on top of the cloned state.
+ const patchOptions = {
+ mode: "incremental",
+ cwd: wikiDir,
+ repoSlug: "wiki",
+ };
+ if (wikiConfig["github-token"]) {
+ patchOptions.token = wikiConfig["github-token"];
+ }
+
+ const patchResult = await generateGitPatch(wikiBranch, wikiBranch, patchOptions);
+
+ if (!patchResult.success) {
+ const errorMsg = patchResult.error || "Failed to generate patch for wiki changes";
+ server.debug(`Wiki patch generation failed: ${errorMsg}`);
+ return {
+ content: [
+ {
+ type: "text",
+ text: JSON.stringify({
+ result: "error",
+ error: errorMsg,
+ details: "No commits were found in the wiki directory. Make sure you have committed your changes using git add and git commit before calling edit_wiki.",
+ }),
+ },
+ ],
+ isError: true,
+ };
+ }
+
+ server.debug(`Wiki patch generated successfully: ${patchResult.patchPath} (${patchResult.patchSize} bytes, ${patchResult.patchLines} lines)`);
+
+ entry.patch_path = patchResult.patchPath;
+
+ if (patchResult.baseCommit) {
+ entry.base_commit = patchResult.baseCommit;
+ }
+
+ if (typeof patchResult.diffSize === "number" && patchResult.diffSize >= 0) {
+ entry.diff_size = patchResult.diffSize;
+ }
+
+ appendSafeOutput(entry);
+ return {
+ content: [
+ {
+ type: "text",
+ text: JSON.stringify({
+ result: "success",
+ patch: {
+ path: patchResult.patchPath,
+ size: patchResult.patchSize,
+ lines: patchResult.patchLines,
+ },
+ }),
+ },
+ ],
+ };
+ };
+
/**
* Handler for push_repo_memory tool
* Validates that memory files in the configured memory directory are within size limits.
@@ -1084,6 +1194,7 @@ function createHandlers(server, appendSafeOutput, config = {}) {
uploadArtifactHandler,
createPullRequestHandler,
pushToPullRequestBranchHandler,
+ editWikiHandler,
pushRepoMemoryHandler,
createProjectHandler,
addCommentHandler,
diff --git a/actions/setup/js/safe_outputs_tools.json b/actions/setup/js/safe_outputs_tools.json
index d333fbcd9a7..76c0708c516 100644
--- a/actions/setup/js/safe_outputs_tools.json
+++ b/actions/setup/js/safe_outputs_tools.json
@@ -883,6 +883,33 @@
"additionalProperties": false
}
},
+ {
+ "name": "edit_wiki",
+ "description": "Push committed changes to a repository wiki. Use this to add or update wiki pages by committing changes locally and then calling this tool. The changes are applied to the main branch of the wiki.",
+ "inputSchema": {
+ "type": "object",
+ "required": ["message"],
+ "properties": {
+ "message": {
+ "type": "string",
+ "description": "Commit message describing the wiki changes. Follow repository commit message conventions."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")."
+ },
+ "wiki_dir": {
+ "type": "string",
+ "description": "Path to the local wiki repo clone (e.g., /tmp/gh-aw/wiki). Defaults to /tmp/gh-aw/wiki. Clone the wiki repo with git clone before calling this tool."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
{
"name": "upload_asset",
"description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content.",
diff --git a/actions/setup/js/safe_outputs_tools_loader.cjs b/actions/setup/js/safe_outputs_tools_loader.cjs
index b13e940cec9..e888622819b 100644
--- a/actions/setup/js/safe_outputs_tools_loader.cjs
+++ b/actions/setup/js/safe_outputs_tools_loader.cjs
@@ -74,6 +74,7 @@ function attachHandlers(tools, handlers) {
const handlerMap = {
create_pull_request: handlers.createPullRequestHandler,
push_to_pull_request_branch: handlers.pushToPullRequestBranchHandler,
+ edit_wiki: handlers.editWikiHandler,
push_repo_memory: handlers.pushRepoMemoryHandler,
upload_asset: handlers.uploadAssetHandler,
upload_artifact: handlers.uploadArtifactHandler,
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index ec69d560e4f..24510225f90 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4760,7 +4760,7 @@
},
"safe-outputs": {
"type": "object",
- "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, merge-pull-request, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
+ "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, merge-pull-request, missing-data, missing-tool, noop, edit-wiki, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
"description": "Safe output processing configuration that automatically creates GitHub issues, comments, and pull requests from AI workflow output without requiring write permissions in the main job",
"examples": [
{
@@ -9043,6 +9043,78 @@
}
],
"description": "Enable AI agents to signal that a task could not be completed due to infrastructure or tool failures (e.g., MCP crash, missing auth, inaccessible repository). Activates failure handling even when the agent exits 0."
+ },
+ "edit-wiki": {
+ "oneOf": [
+ {
+ "type": "null",
+ "description": "Use default configuration (if-no-changes: 'warn')"
+ },
+ {
+ "type": "object",
+ "description": "Configuration for pushing committed changes to a repository wiki. Changes are applied to the main branch of the wiki.",
+ "properties": {
+ "max": {
+ "description": "Maximum number of wiki edit operations to perform (default: 1). Supports integer or GitHub Actions expression.",
+ "oneOf": [
+ {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 10,
+ "default": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^\\$\\{\\{.*\\}\\}$",
+ "description": "GitHub Actions expression that resolves to an integer at runtime"
+ }
+ ]
+ },
+ "repo": {
+ "type": "string",
+ "description": "Target repository for wiki edits in 'owner/repo' format. Defaults to the current repository. Must be in allowed-repos if allowed-repos is configured."
+ },
+ "allowed-repos": {
+ "description": "List of repositories in 'owner/repo' format that the wiki edit can target. When configured, allows specifying a 'repo' field in the tool call. Accepts an array of repository slugs or a GitHub Actions expression.",
+ "oneOf": [
+ {
+ "type": "array",
+ "description": "Array of repository slugs",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "type": "string",
+ "pattern": "^\\$\\{\\{.*\\}\\}$",
+ "description": "GitHub Actions expression that resolves to a comma-separated list of repository slugs"
+ }
+ ]
+ },
+ "if-no-changes": {
+ "type": "string",
+ "enum": ["warn", "error", "ignore"],
+ "default": "warn",
+ "description": "Behavior when no changes are present in the patch: 'warn' (default, skip with warning), 'error' (fail the step), 'ignore' (skip silently)"
+ },
+ "commit-title-suffix": {
+ "type": "string",
+ "description": "Optional suffix to append to the generated commit title (e.g., ' [automated]')"
+ },
+ "github-token": {
+ "type": "string",
+ "description": "GitHub token for wiki push operations. Supports GitHub Actions secret expressions (e.g. '${{ secrets.MY_TOKEN }}'). Defaults to the safe-outputs github-token or GITHUB_TOKEN."
+ },
+ "staged": {
+ "type": "boolean",
+ "default": false,
+ "description": "If true, emit a step summary preview instead of pushing to the wiki. Useful for testing workflow configuration."
+ }
+ },
+ "additionalProperties": false
+ }
+ ],
+ "description": "Push committed changes to a repository wiki. Changes are applied to the main branch of the wiki using a git patch."
}
},
"additionalProperties": false
diff --git a/pkg/workflow/compiler_safe_outputs_handlers.go b/pkg/workflow/compiler_safe_outputs_handlers.go
index 8c1982522f8..b4a5e6adb5d 100644
--- a/pkg/workflow/compiler_safe_outputs_handlers.go
+++ b/pkg/workflow/compiler_safe_outputs_handlers.go
@@ -455,6 +455,26 @@ var handlerRegistry = map[string]handlerBuilder{
AddBoolPtr("check_branch_protection", c.CheckBranchProtection).
Build()
},
+ "edit_wiki": func(cfg *SafeOutputsConfig) map[string]any {
+ if cfg.EditWiki == nil {
+ return nil
+ }
+ c := cfg.EditWiki
+ maxPatchSize := 1024 // default 1024 KB
+ if cfg.MaximumPatchSize > 0 {
+ maxPatchSize = cfg.MaximumPatchSize
+ }
+ return newHandlerConfigBuilder().
+ AddTemplatableInt("max", c.Max).
+ AddIfNotEmpty("if_no_changes", c.IfNoChanges).
+ AddIfNotEmpty("commit_title_suffix", c.CommitTitleSuffix).
+ AddDefault("max_patch_size", maxPatchSize).
+ AddIfNotEmpty("target-repo", c.TargetRepoSlug).
+ AddTemplatableStringSlice("allowed_repos", c.AllowedRepos).
+ AddIfNotEmpty("github-token", c.GitHubToken).
+ AddIfTrue("staged", c.Staged).
+ Build()
+ },
"update_pull_request": func(cfg *SafeOutputsConfig) map[string]any {
if cfg.UpdatePullRequests == nil {
return nil
diff --git a/pkg/workflow/compiler_safe_outputs_job.go b/pkg/workflow/compiler_safe_outputs_job.go
index 2b3d879db4f..b3984e1d377 100644
--- a/pkg/workflow/compiler_safe_outputs_job.go
+++ b/pkg/workflow/compiler_safe_outputs_job.go
@@ -91,11 +91,11 @@ func (c *Compiler) buildSafeOutputsSetupAndDownloadSteps(data *WorkflowData, age
// In workflow_call context, use the per-invocation prefix to avoid artifact name clashes.
steps = append(steps, buildAgentOutputDownloadSteps(agentArtifactPrefix)...)
- // Add patch artifact download if create-pull-request or push-to-pull-request-branch is enabled
- // Both of these safe outputs require the patch file to apply changes
+ // Add patch artifact download if create-pull-request, push-to-pull-request-branch, or edit-wiki is enabled.
+ // All of these safe outputs require the patch file to apply changes.
// Download from unified agent artifact (prefixed in workflow_call context)
if usesPatchesAndCheckouts(data.SafeOutputs) {
- consolidatedSafeOutputsJobLog.Print("Adding patch artifact download for create-pull-request or push-to-pull-request-branch")
+ consolidatedSafeOutputsJobLog.Print("Adding patch artifact download for create-pull-request, push-to-pull-request-branch, or edit-wiki")
patchDownloadSteps := buildArtifactDownloadSteps(ArtifactDownloadConfig{
ArtifactName: agentArtifactPrefix + constants.AgentArtifactName,
DownloadPath: "/tmp/gh-aw/",
@@ -104,10 +104,14 @@ func (c *Compiler) buildSafeOutputsSetupAndDownloadSteps(data *WorkflowData, age
})
steps = append(steps, patchDownloadSteps...)
- // Add checkout and git config steps for PR operations
- consolidatedSafeOutputsJobLog.Print("Adding shared checkout step for PR operations")
- checkoutSteps := c.buildSharedPRCheckoutSteps(data)
- steps = append(steps, checkoutSteps...)
+ // Add checkout and git config steps only for PR-based operations.
+ // edit-wiki clones the wiki repository at runtime in its handler and
+ // does not need a compile-time checkout of the source repository.
+ if usesPRCheckout(data.SafeOutputs) {
+ consolidatedSafeOutputsJobLog.Print("Adding shared checkout step for PR operations")
+ checkoutSteps := c.buildSharedPRCheckoutSteps(data)
+ steps = append(steps, checkoutSteps...)
+ }
}
// Configure GH_HOST for GHES/GHEC compatibility.
@@ -483,7 +487,7 @@ func (c *Compiler) buildSafeOutputsJobFromParts(
// "Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under .../actions/setup".
// We add a restore checkout step (if: always()) as the last step so the post-step
// can always find action.yml and complete its /tmp/gh-aw cleanup.
- if c.actionMode.IsDev() && usesPatchesAndCheckouts(data.SafeOutputs) {
+ if c.actionMode.IsDev() && usesPRCheckout(data.SafeOutputs) {
steps = append(steps, c.generateRestoreActionsSetupStep())
consolidatedSafeOutputsJobLog.Print("Added restore actions folder step to safe_outputs job (dev mode with checkout)")
}
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index b62376f4f27..cf552ce69ee 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -590,6 +590,7 @@ type SafeOutputsConfig struct {
UpdatePullRequests *UpdatePullRequestsConfig `yaml:"update-pull-request,omitempty"` // Update GitHub pull request title/body
MergePullRequest *MergePullRequestConfig `yaml:"merge-pull-request,omitempty"` // Merge pull requests under constrained policy checks
PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pull-request-branch,omitempty"`
+ EditWiki *EditWikiConfig `yaml:"edit-wiki,omitempty"` // Push changes to a repository's wiki
UploadAssets *UploadAssetsConfig `yaml:"upload-asset,omitempty"`
UploadArtifact *UploadArtifactConfig `yaml:"upload-artifact,omitempty"` // Upload files as run-scoped GitHub Actions artifacts
UpdateRelease *UpdateReleaseConfig `yaml:"update-release,omitempty"` // Update GitHub release descriptions
diff --git a/pkg/workflow/edit_wiki.go b/pkg/workflow/edit_wiki.go
new file mode 100644
index 00000000000..67319708f6b
--- /dev/null
+++ b/pkg/workflow/edit_wiki.go
@@ -0,0 +1,75 @@
+package workflow
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/github/gh-aw/pkg/logger"
+)
+
+var editWikiLog = logger.New("workflow:edit_wiki")
+
+// EditWikiConfig holds configuration for pushing changes to a repository's wiki
+type EditWikiConfig struct {
+ BaseSafeOutputConfig `yaml:",inline"`
+ TargetRepoSlug string `yaml:"repo,omitempty"` // Target repository in format "owner/repo". Defaults to the current repository.
+ AllowedRepos []string `yaml:"allowed-repos,omitempty"` // List of repositories in format "owner/repo" that the wiki edit can target
+ IfNoChanges string `yaml:"if-no-changes,omitempty"` // Behavior when no changes to push: "warn", "error", or "ignore" (default: "warn")
+ CommitTitleSuffix string `yaml:"commit-title-suffix,omitempty"` // Optional suffix to append to generated commit titles
+}
+
+// parseEditWikiConfig handles edit-wiki configuration
+func (c *Compiler) parseEditWikiConfig(outputMap map[string]any) *EditWikiConfig {
+ if configData, exists := outputMap["edit-wiki"]; exists {
+ editWikiLog.Print("Parsing edit-wiki configuration")
+ editWikiConfig := &EditWikiConfig{
+ IfNoChanges: "warn", // Default behavior: warn when no changes
+ }
+
+ // Handle the case where configData is nil (edit-wiki: with no value)
+ if configData == nil {
+ return editWikiConfig
+ }
+
+ if configMap, ok := configData.(map[string]any); ok {
+ // Parse repo (optional, defaults to current repository)
+ if repo, exists := configMap["repo"]; exists {
+ if repoStr, ok := repo.(string); ok {
+ editWikiConfig.TargetRepoSlug = repoStr
+ }
+ }
+
+ // Parse allowed-repos (expression-aware)
+ editWikiConfig.AllowedRepos = ParseStringArrayOrExprFromConfig(configMap, "allowed-repos", editWikiLog)
+
+ // Parse if-no-changes (optional, defaults to "warn")
+ if ifNoChanges, exists := configMap["if-no-changes"]; exists {
+ if ifNoChangesStr, ok := ifNoChanges.(string); ok {
+ switch ifNoChangesStr {
+ case "warn", "error", "ignore":
+ editWikiConfig.IfNoChanges = ifNoChangesStr
+ default:
+ if c.verbose {
+ fmt.Fprintf(os.Stderr, "Warning: invalid if-no-changes value '%s' for edit-wiki, using default 'warn'\n", ifNoChangesStr)
+ }
+ editWikiConfig.IfNoChanges = "warn"
+ }
+ }
+ }
+
+ // Parse commit-title-suffix (optional)
+ if commitTitleSuffix, exists := configMap["commit-title-suffix"]; exists {
+ if commitTitleSuffixStr, ok := commitTitleSuffix.(string); ok {
+ editWikiConfig.CommitTitleSuffix = commitTitleSuffixStr
+ }
+ }
+
+ // Parse common base fields with default max of 1
+ c.parseBaseSafeOutputConfig(configMap, &editWikiConfig.BaseSafeOutputConfig, 1)
+ }
+
+ return editWikiConfig
+ }
+
+ return nil
+}
diff --git a/pkg/workflow/edit_wiki_test.go b/pkg/workflow/edit_wiki_test.go
new file mode 100644
index 00000000000..45be8729eec
--- /dev/null
+++ b/pkg/workflow/edit_wiki_test.go
@@ -0,0 +1,372 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "encoding/json"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/github/gh-aw/pkg/stringutil"
+ "github.com/github/gh-aw/pkg/testutil"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "go.yaml.in/yaml/v3"
+)
+
+// extractEditWikiHandlerConfig extracts the edit_wiki handler config from a compiled
+// lock file's GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG env var.
+func extractEditWikiHandlerConfig(t *testing.T, lockContent []byte) map[string]any {
+ t.Helper()
+
+ var workflowDoc map[string]any
+ require.NoError(t, yaml.Unmarshal(lockContent, &workflowDoc), "Failed to unmarshal lock workflow YAML")
+
+ jobsRaw, ok := workflowDoc["jobs"].(map[string]any)
+ require.True(t, ok, "Generated workflow should contain jobs map")
+
+ safeOutputsJobRaw, ok := jobsRaw["safe_outputs"].(map[string]any)
+ require.True(t, ok, "Generated workflow should contain safe_outputs job")
+
+ stepsRaw, ok := safeOutputsJobRaw["steps"].([]any)
+ require.True(t, ok, "Generated workflow safe_outputs job should contain steps array")
+
+ var handlerConfigJSON string
+ for _, step := range stepsRaw {
+ stepMap, ok := step.(map[string]any)
+ if !ok {
+ continue
+ }
+ envMap, ok := stepMap["env"].(map[string]any)
+ if !ok {
+ continue
+ }
+ rawConfig, ok := envMap["GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG"].(string)
+ if ok && rawConfig != "" {
+ handlerConfigJSON = rawConfig
+ break
+ }
+ }
+
+ require.NotEmpty(t, handlerConfigJSON, "Generated workflow should contain GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG env var")
+
+ var handlerConfig map[string]any
+ require.NoError(t, json.Unmarshal([]byte(handlerConfigJSON), &handlerConfig), "Failed to unmarshal GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG JSON")
+
+ editWikiCfgRaw, ok := handlerConfig["edit_wiki"].(map[string]any)
+ require.True(t, ok, "Handler config should contain edit_wiki object")
+
+ return editWikiCfgRaw
+}
+
+func TestEditWikiConfigParsing(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+
+ testMarkdown := `---
+on:
+ issues:
+ types: [opened]
+safe-outputs:
+ edit-wiki:
+ noop:
+ report-as-issue: false
+---
+
+# Test Edit Wiki
+
+This is a test workflow to validate edit-wiki configuration parsing.
+`
+
+ mdFile := filepath.Join(tmpDir, "test-edit-wiki.md")
+ require.NoError(t, os.WriteFile(mdFile, []byte(testMarkdown), 0644), "Failed to write test markdown file")
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(mdFile), "Failed to compile workflow")
+
+ lockFile := stringutil.MarkdownToLockFile(mdFile)
+ lockContent, err := os.ReadFile(lockFile)
+ require.NoError(t, err, "Failed to read lock file")
+
+ lockContentStr := string(lockContent)
+
+ // Verify safe_outputs job is generated
+ assert.Contains(t, lockContentStr, "safe_outputs:", "Generated workflow should contain safe_outputs job")
+
+ // Verify handler manager step is present
+ assert.Contains(t, lockContentStr, "id: process_safe_outputs", "Generated workflow should contain process_safe_outputs step")
+
+ // Verify edit_wiki config is in handler manager config
+ assert.Contains(t, lockContentStr, "edit_wiki", "Generated workflow should contain edit_wiki in handler config")
+
+ // Verify that required permissions are present
+ safeOutputsJobSection := extractJobSection(lockContentStr, "safe_outputs")
+ assert.NotEmpty(t, safeOutputsJobSection, "safe_outputs job section should be present")
+ assert.Contains(t, safeOutputsJobSection, "contents: write", "Generated workflow should have contents: write permission")
+
+ // Verify that the patch download step is included (edit-wiki needs patches)
+ assert.Contains(t, lockContentStr, "Download patch artifact", "Generated workflow should download patch artifact")
+
+ // Verify there is no Checkout repository step specifically for the main repo in the safe_outputs job
+ // (edit-wiki does not need a compile-time checkout of the source repo)
+ assert.NotContains(t, safeOutputsJobSection, "Checkout repository", "edit-wiki alone should not generate a repo checkout step")
+}
+
+func TestEditWikiWithAllowedRepos(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+
+ testMarkdown := `---
+on:
+ issues:
+ types: [opened]
+safe-outputs:
+ edit-wiki:
+ allowed-repos:
+ - "org/other-repo"
+ noop:
+ report-as-issue: false
+---
+
+# Test Edit Wiki Allowed Repos
+`
+
+ mdFile := filepath.Join(tmpDir, "test-edit-wiki-allowed-repos.md")
+ require.NoError(t, os.WriteFile(mdFile, []byte(testMarkdown), 0644), "Failed to write test markdown file")
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(mdFile), "Failed to compile workflow")
+
+ lockFile := stringutil.MarkdownToLockFile(mdFile)
+ lockContent, err := os.ReadFile(lockFile)
+ require.NoError(t, err, "Failed to read lock file")
+
+ wikiCfg := extractEditWikiHandlerConfig(t, lockContent)
+
+ // Verify allowed_repos is in the handler config
+ allowedRepos, exists := wikiCfg["allowed_repos"]
+ assert.True(t, exists, "edit_wiki handler config should contain allowed_repos")
+ allowedReposSlice, ok := allowedRepos.([]any)
+ assert.True(t, ok, "allowed_repos should be a slice")
+ assert.Len(t, allowedReposSlice, 1, "allowed_repos should have 1 entry")
+ assert.Equal(t, "org/other-repo", allowedReposSlice[0], "allowed_repos should contain org/other-repo")
+}
+
+func TestEditWikiWithTargetRepo(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+
+ testMarkdown := `---
+on:
+ issues:
+ types: [opened]
+safe-outputs:
+ edit-wiki:
+ repo: "org/target-repo"
+ noop:
+ report-as-issue: false
+---
+
+# Test Edit Wiki Target Repo
+`
+
+ mdFile := filepath.Join(tmpDir, "test-edit-wiki-target-repo.md")
+ require.NoError(t, os.WriteFile(mdFile, []byte(testMarkdown), 0644), "Failed to write test markdown file")
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(mdFile), "Failed to compile workflow")
+
+ lockFile := stringutil.MarkdownToLockFile(mdFile)
+ lockContent, err := os.ReadFile(lockFile)
+ require.NoError(t, err, "Failed to read lock file")
+
+ wikiCfg := extractEditWikiHandlerConfig(t, lockContent)
+
+ // Verify target-repo is in the handler config
+ targetRepo, exists := wikiCfg["target-repo"]
+ assert.True(t, exists, "edit_wiki handler config should contain target-repo")
+ assert.Equal(t, "org/target-repo", targetRepo, "target-repo should be org/target-repo")
+}
+
+func TestEditWikiIfNoChanges(t *testing.T) {
+ tests := []struct {
+ name string
+ ifNoChanges string
+ expectInJSON string
+ }{
+ {
+ name: "error value",
+ ifNoChanges: "error",
+ expectInJSON: `"if_no_changes":"error"`,
+ },
+ {
+ name: "ignore value",
+ ifNoChanges: "ignore",
+ expectInJSON: `"if_no_changes":"ignore"`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+
+ testMarkdown := `---
+on:
+ issues:
+ types: [opened]
+safe-outputs:
+ edit-wiki:
+ if-no-changes: ` + tt.ifNoChanges + `
+ noop:
+ report-as-issue: false
+---
+
+# Test Edit Wiki If No Changes
+`
+
+ mdFile := filepath.Join(tmpDir, "test-edit-wiki-if-no-changes.md")
+ require.NoError(t, os.WriteFile(mdFile, []byte(testMarkdown), 0644), "Failed to write test markdown file")
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(mdFile), "Failed to compile workflow")
+
+ lockFile := stringutil.MarkdownToLockFile(mdFile)
+ lockContent, err := os.ReadFile(lockFile)
+ require.NoError(t, err, "Failed to read lock file")
+
+ lockContentStr := string(lockContent)
+ assert.True(t,
+ strings.Contains(lockContentStr, tt.expectInJSON) ||
+ strings.Contains(lockContentStr, strings.ReplaceAll(tt.expectInJSON, `"`, `\"`)),
+ "Generated workflow should contain if_no_changes=%s in handler config", tt.ifNoChanges,
+ )
+ })
+ }
+}
+
+func TestEditWikiPermissions(t *testing.T) {
+ tmpDir := testutil.TempDir(t, "test-*")
+
+ testMarkdown := `---
+on:
+ issues:
+ types: [opened]
+safe-outputs:
+ edit-wiki:
+ noop:
+ report-as-issue: false
+---
+
+# Test Edit Wiki Permissions
+`
+
+ mdFile := filepath.Join(tmpDir, "test-edit-wiki-permissions.md")
+ require.NoError(t, os.WriteFile(mdFile, []byte(testMarkdown), 0644), "Failed to write test markdown file")
+
+ compiler := NewCompiler()
+ require.NoError(t, compiler.CompileWorkflow(mdFile), "Failed to compile workflow")
+
+ lockFile := stringutil.MarkdownToLockFile(mdFile)
+ lockContent, err := os.ReadFile(lockFile)
+ require.NoError(t, err, "Failed to read lock file")
+
+ lockContentStr := string(lockContent)
+ safeOutputsJobSection := extractJobSection(lockContentStr, "safe_outputs")
+ require.NotEmpty(t, safeOutputsJobSection, "safe_outputs job section should be present")
+
+ // Edit-wiki only requires contents: write (not pull-requests: write)
+ assert.Contains(t, safeOutputsJobSection, "contents: write", "Generated workflow should have contents: write permission for edit-wiki")
+ assert.NotContains(t, safeOutputsJobSection, "pull-requests: write", "Generated workflow should NOT have pull-requests: write permission for edit-wiki")
+}
+
+func TestUsesPatchesAndCheckoutsIncludesEditWiki(t *testing.T) {
+ tests := []struct {
+ name string
+ safeOutputs *SafeOutputsConfig
+ expected bool
+ }{
+ {
+ name: "nil safe outputs",
+ safeOutputs: nil,
+ expected: false,
+ },
+ {
+ name: "edit-wiki configured",
+ safeOutputs: &SafeOutputsConfig{
+ EditWiki: &EditWikiConfig{},
+ },
+ expected: true,
+ },
+ {
+ name: "edit-wiki staged returns false",
+ safeOutputs: &SafeOutputsConfig{
+ EditWiki: &EditWikiConfig{BaseSafeOutputConfig: BaseSafeOutputConfig{Staged: true}},
+ },
+ expected: false,
+ },
+ {
+ name: "edit-wiki with globally staged returns false",
+ safeOutputs: &SafeOutputsConfig{
+ Staged: true,
+ EditWiki: &EditWikiConfig{},
+ },
+ expected: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := usesPatchesAndCheckouts(tt.safeOutputs)
+ assert.Equal(t, tt.expected, result, "usesPatchesAndCheckouts should return expected value")
+ })
+ }
+}
+
+func TestUsesPRCheckoutExcludesEditWiki(t *testing.T) {
+ tests := []struct {
+ name string
+ safeOutputs *SafeOutputsConfig
+ expected bool
+ }{
+ {
+ name: "nil safe outputs",
+ safeOutputs: nil,
+ expected: false,
+ },
+ {
+ name: "edit-wiki only returns false (no PR checkout needed)",
+ safeOutputs: &SafeOutputsConfig{
+ EditWiki: &EditWikiConfig{},
+ },
+ expected: false,
+ },
+ {
+ name: "create-pull-request returns true",
+ safeOutputs: &SafeOutputsConfig{
+ CreatePullRequests: &CreatePullRequestsConfig{},
+ },
+ expected: true,
+ },
+ {
+ name: "push-to-pull-request-branch returns true",
+ safeOutputs: &SafeOutputsConfig{
+ PushToPullRequestBranch: &PushToPullRequestBranchConfig{},
+ },
+ expected: true,
+ },
+ {
+ name: "edit-wiki with create-pull-request returns true",
+ safeOutputs: &SafeOutputsConfig{
+ EditWiki: &EditWikiConfig{},
+ CreatePullRequests: &CreatePullRequestsConfig{},
+ },
+ expected: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := usesPRCheckout(tt.safeOutputs)
+ assert.Equal(t, tt.expected, result, "usesPRCheckout should return expected value")
+ })
+ }
+}
diff --git a/pkg/workflow/js/safe_outputs_tools.json b/pkg/workflow/js/safe_outputs_tools.json
index 75c123855cb..55d98a08bb3 100644
--- a/pkg/workflow/js/safe_outputs_tools.json
+++ b/pkg/workflow/js/safe_outputs_tools.json
@@ -1032,6 +1032,35 @@
"additionalProperties": false
}
},
+ {
+ "name": "edit_wiki",
+ "description": "Push committed changes to a repository wiki. Use this to add or update wiki pages by committing changes locally and then calling this tool. The changes are applied to the main branch of the wiki.",
+ "inputSchema": {
+ "type": "object",
+ "required": [
+ "message"
+ ],
+ "properties": {
+ "message": {
+ "type": "string",
+ "description": "Commit message describing the wiki changes. Follow repository commit message conventions."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")."
+ },
+ "wiki_dir": {
+ "type": "string",
+ "description": "Path to the local wiki repo clone (e.g., /tmp/gh-aw/wiki). Defaults to /tmp/gh-aw/wiki. Clone the wiki repo with git clone before calling this tool."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
{
"name": "upload_asset",
"description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content.",
diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go
index b825bc86aa6..aa6b8237646 100644
--- a/pkg/workflow/safe_outputs_config.go
+++ b/pkg/workflow/safe_outputs_config.go
@@ -271,6 +271,12 @@ func (c *Compiler) extractSafeOutputsConfig(frontmatter map[string]any) *SafeOut
config.PushToPullRequestBranch = pushToBranchConfig
}
+ // Handle edit-wiki
+ editWikiConfig := c.parseEditWikiConfig(outputMap)
+ if editWikiConfig != nil {
+ config.EditWiki = editWikiConfig
+ }
+
// Handle upload-asset
uploadAssetsConfig := c.parseUploadAssetConfig(outputMap)
if uploadAssetsConfig != nil {
diff --git a/pkg/workflow/safe_outputs_permissions.go b/pkg/workflow/safe_outputs_permissions.go
index 6e75492f492..d0d8f6293a2 100644
--- a/pkg/workflow/safe_outputs_permissions.go
+++ b/pkg/workflow/safe_outputs_permissions.go
@@ -177,6 +177,10 @@ func ComputePermissionsForSafeOutputs(safeOutputs *SafeOutputsConfig) *Permissio
permissions.Set(PermissionAdministration, PermissionRead)
}
}
+ if safeOutputs.EditWiki != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.EditWiki.Staged) {
+ safeOutputsPermissionsLog.Print("Adding permissions for edit-wiki")
+ permissions.Merge(NewPermissionsContentsWrite())
+ }
if safeOutputs.UpdatePullRequests != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.UpdatePullRequests.Staged) {
safeOutputsPermissionsLog.Print("Adding permissions for update-pull-request")
if safeOutputs.UpdatePullRequests.UpdateBranch != nil && *safeOutputs.UpdatePullRequests.UpdateBranch {
diff --git a/pkg/workflow/safe_outputs_runtime.go b/pkg/workflow/safe_outputs_runtime.go
index 04155866f94..5403123adbd 100644
--- a/pkg/workflow/safe_outputs_runtime.go
+++ b/pkg/workflow/safe_outputs_runtime.go
@@ -36,17 +36,37 @@ func (c *Compiler) formatFrameworkJobRunsOn(data *WorkflowData) string {
}
// usesPatchesAndCheckouts checks if the workflow uses safe outputs that require
-// git patches and checkouts (create-pull-request or push-to-pull-request-branch).
+// git patches and checkouts (create-pull-request, push-to-pull-request-branch, or edit-wiki).
// Staged handlers are excluded because they only emit preview output and do not
// perform real git operations or API calls.
func usesPatchesAndCheckouts(safeOutputs *SafeOutputsConfig) bool {
+ if safeOutputs == nil {
+ return false
+ }
+ createPRNeedsCheckout := safeOutputs.CreatePullRequests != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.CreatePullRequests.Staged)
+ pushToPRNeedsCheckout := safeOutputs.PushToPullRequestBranch != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.PushToPullRequestBranch.Staged)
+ editWikiNeedsCheckout := safeOutputs.EditWiki != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.EditWiki.Staged)
+ result := createPRNeedsCheckout || pushToPRNeedsCheckout || editWikiNeedsCheckout
+ safeOutputsRuntimeLog.Printf("usesPatchesAndCheckouts: createPR=%v(needsCheckout=%v), pushToPRBranch=%v(needsCheckout=%v), editWiki=%v(needsCheckout=%v), result=%v",
+ safeOutputs.CreatePullRequests != nil, createPRNeedsCheckout,
+ safeOutputs.PushToPullRequestBranch != nil, pushToPRNeedsCheckout,
+ safeOutputs.EditWiki != nil, editWikiNeedsCheckout,
+ result)
+ return result
+}
+
+// usesPRCheckout checks if the workflow uses safe outputs that require checking out
+// the repository and configuring git (create-pull-request or push-to-pull-request-branch).
+// edit-wiki is excluded because the edit-wiki handler clones the wiki repository at
+// runtime and does not need a compile-time checkout of the source repository.
+func usesPRCheckout(safeOutputs *SafeOutputsConfig) bool {
if safeOutputs == nil {
return false
}
createPRNeedsCheckout := safeOutputs.CreatePullRequests != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.CreatePullRequests.Staged)
pushToPRNeedsCheckout := safeOutputs.PushToPullRequestBranch != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.PushToPullRequestBranch.Staged)
result := createPRNeedsCheckout || pushToPRNeedsCheckout
- safeOutputsRuntimeLog.Printf("usesPatchesAndCheckouts: createPR=%v(needsCheckout=%v), pushToPRBranch=%v(needsCheckout=%v), result=%v",
+ safeOutputsRuntimeLog.Printf("usesPRCheckout: createPR=%v(needsCheckout=%v), pushToPRBranch=%v(needsCheckout=%v), result=%v",
safeOutputs.CreatePullRequests != nil, createPRNeedsCheckout,
safeOutputs.PushToPullRequestBranch != nil, pushToPRNeedsCheckout,
result)
diff --git a/pkg/workflow/safe_outputs_tools_repo_params.go b/pkg/workflow/safe_outputs_tools_repo_params.go
index 787b213895e..b7cb880f60c 100644
--- a/pkg/workflow/safe_outputs_tools_repo_params.go
+++ b/pkg/workflow/safe_outputs_tools_repo_params.go
@@ -35,6 +35,11 @@ func addRepoParameterIfNeeded(tool map[string]any, toolName string, safeOutputs
hasAllowedRepos = len(config.AllowedRepos) > 0
targetRepoSlug = config.TargetRepoSlug
}
+ case "edit_wiki":
+ if config := safeOutputs.EditWiki; config != nil {
+ hasAllowedRepos = len(config.AllowedRepos) > 0
+ targetRepoSlug = config.TargetRepoSlug
+ }
case "create_pull_request_review_comment":
if config := safeOutputs.CreatePullRequestReviewComments; config != nil {
hasAllowedRepos = len(config.AllowedRepos) > 0
diff --git a/scratchpad/dev.md b/scratchpad/dev.md
index f158f364fc5..bf7b4533be6 100644
--- a/scratchpad/dev.md
+++ b/scratchpad/dev.md
@@ -727,6 +727,9 @@ sequenceDiagram
- `create-project-status-update`
- `update-release`, `upload-asset`
+**Wiki**:
+- `edit-wiki`
+
**Security & Agent Tasks**:
- `create-code-scanning-alert`
- `create-agent-session`
@@ -805,7 +808,49 @@ safe-outputs:
The `blocked` field accepts a list of usernames or glob patterns. If the AI agent attempts to assign/unassign a blocked user, the operation is rejected. The `allowed` field restricts which users can be operated on; if omitted, all non-blocked users are permitted.
-### Attribution Footers
+**Edit Wiki** (`edit-wiki`):
+```yaml
+safe-outputs:
+ edit-wiki: # minimal — targets the current repo's wiki
+
+ # or with options:
+ edit-wiki:
+ repo: "org/other-repo" # optional, defaults to current repo
+ allowed-repos: # restricts which repos can be targeted
+ - "org/other-repo"
+ if-no-changes: warn # warn | error | ignore (default: warn)
+ commit-title-suffix: " [bot]" # appended to each commit subject line
+ github-token: ${{ secrets.MY_TOKEN }}
+ staged: false
+```
+
+The agent clones the wiki repo (`OWNER/REPO.wiki.git`) to a local directory, creates or edits wiki page files, commits the changes, then calls the `edit_wiki` tool. The safe-output job applies the patch via `git am --3way` and pushes to the wiki's default branch (`master`).
+
+Agent usage pattern:
+```bash
+# 1. Clone the wiki
+git clone https://github.com/OWNER/REPO.wiki.git /tmp/gh-aw/wiki
+git -C /tmp/gh-aw/wiki config user.email "github-actions[bot]@users.noreply.github.com"
+git -C /tmp/gh-aw/wiki config user.name "github-actions[bot]"
+
+# 2. Create or edit wiki pages
+echo "# My Page\nContent here" > /tmp/gh-aw/wiki/My-Page.md
+
+# 3. Commit the changes
+git -C /tmp/gh-aw/wiki add .
+git -C /tmp/gh-aw/wiki commit -m "Add My-Page"
+
+# 4. Call the edit_wiki tool
+# { "message": "Add My-Page", "wiki_dir": "/tmp/gh-aw/wiki" }
+```
+
+Testing `edit-wiki` locally with `smoke-codex.md`:
+1. Add `edit-wiki:` under `safe-outputs:` in your workflow
+2. Run the workflow: `gh aw run .github/workflows/smoke-codex.md`
+3. The smoke test (#10) clones the wiki, creates `Smoke-Test-Haiku.md`, appends a link to `Home.md`, commits, and calls the tool
+4. Check the wiki at `https://github.com/OWNER/REPO/wiki` to confirm the new page and link appear
+
+
All GitHub content created by safe outputs includes attribution:
@@ -2590,6 +2635,7 @@ type Everything interface {
| `add-comment` | 1 | ✅ | `issues: write` or `pull-requests: write` |
| `assign-to-user` | 1 | ✅ | `issues: write` |
| `unassign-from-user` | 1 | ✅ | `issues: write` |
+| `edit-wiki` | 1 | ✅ | `contents: write` |
| `missing-tool` | 0 (unlimited) | N/A | Optional `issues: write` |
| `noop` | 1 | N/A | None |