diff --git a/.changeset/patch-add-trufflehog-shared-workflow.md b/.changeset/patch-add-trufflehog-shared-workflow.md
new file mode 100644
index 00000000000..27b1058a77d
--- /dev/null
+++ b/.changeset/patch-add-trufflehog-shared-workflow.md
@@ -0,0 +1,5 @@
+---
+"gh-aw": patch
+---
+
+Added a shared TruffleHog agentic workflow for scanning agent output and memory artifacts for secrets.
diff --git a/.github/workflows/shared/trufflehog.md b/.github/workflows/shared/trufflehog.md
new file mode 100644
index 00000000000..c81bb2b27b8
--- /dev/null
+++ b/.github/workflows/shared/trufflehog.md
@@ -0,0 +1,223 @@
+---
+jobs:
+ trufflehog_scan:
+ runs-on: ubuntu-latest
+ needs: [agent, detection]
+ if: always() && needs.agent.result != 'skipped' && needs.detection.result != 'skipped'
+ permissions:
+ contents: read
+ outputs:
+ secrets_found: ${{ steps.evaluate.outputs.secrets_found }}
+ secrets_locations: ${{ steps.evaluate.outputs.secrets_locations }}
+ steps:
+ - name: Download agent output artifact
+ id: download-agent
+ continue-on-error: true
+ uses: actions/download-artifact@v8
+ with:
+ name: agent
+ path: /tmp/gh-aw
+
+ - name: Download cache-memory artifact
+ id: download-cache-memory
+ continue-on-error: true
+ uses: actions/download-artifact@v8
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+
+ - name: Download repo-memory artifact
+ id: download-repo-memory
+ continue-on-error: true
+ uses: actions/download-artifact@v8
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory/default
+
+ - name: Install TruffleHog
+ id: install-trufflehog
+ env:
+ TRUFFLEHOG_VERSION: "3.88.27"
+ run: |
+ echo "Installing TruffleHog v${TRUFFLEHOG_VERSION}..."
+ curl -sSfL https://raw.githubusercontent.com/trufflesecurity/trufflehog/main/scripts/install.sh | sh -s -- -b /usr/local/bin "v${TRUFFLEHOG_VERSION}"
+ trufflehog --version
+
+ - name: Scan agent output for secrets
+ id: scan-agent-output
+ continue-on-error: true
+ run: |
+ mkdir -p /tmp/gh-aw/trufflehog
+ SCAN_DIR="/tmp/gh-aw"
+ OUTPUT_FILE="/tmp/gh-aw/trufflehog/agent-output-results.jsonl"
+ if [ -d "$SCAN_DIR" ] && find "$SCAN_DIR" -mindepth 1 -maxdepth 1 -quit 2>/dev/null | grep -q .; then
+ echo "Scanning agent output in $SCAN_DIR"
+ trufflehog filesystem "$SCAN_DIR" \
+ --json --no-update --fail \
+ --exclude-paths /tmp/gh-aw/cache-memory \
+ --exclude-paths /tmp/gh-aw/repo-memory \
+ --exclude-paths /tmp/gh-aw/trufflehog \
+ 2>/dev/null | tee "$OUTPUT_FILE" || SCAN_EXIT=${PIPESTATUS[0]}
+ SCAN_EXIT=${SCAN_EXIT:-0}
+ else
+ echo "Agent output directory is empty or missing, skipping"
+ SCAN_EXIT=0
+ fi
+ if [ "$SCAN_EXIT" -eq 183 ]; then
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Scan cache-memory for secrets
+ id: scan-cache-memory
+ continue-on-error: true
+ run: |
+ mkdir -p /tmp/gh-aw/trufflehog
+ SCAN_DIR="/tmp/gh-aw/cache-memory"
+ OUTPUT_FILE="/tmp/gh-aw/trufflehog/cache-memory-results.jsonl"
+ if [ -d "$SCAN_DIR" ] && find "$SCAN_DIR" -mindepth 1 -maxdepth 1 -quit 2>/dev/null | grep -q .; then
+ echo "Scanning cache-memory in $SCAN_DIR"
+ trufflehog filesystem "$SCAN_DIR" --json --no-update --fail 2>/dev/null | tee "$OUTPUT_FILE" || SCAN_EXIT=${PIPESTATUS[0]}
+ SCAN_EXIT=${SCAN_EXIT:-0}
+ else
+ echo "cache-memory directory is empty or missing, skipping"
+ SCAN_EXIT=0
+ fi
+ if [ "$SCAN_EXIT" -eq 183 ]; then
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Scan repo-memory for secrets
+ id: scan-repo-memory
+ continue-on-error: true
+ run: |
+ mkdir -p /tmp/gh-aw/trufflehog
+ SCAN_DIR="/tmp/gh-aw/repo-memory"
+ OUTPUT_FILE="/tmp/gh-aw/trufflehog/repo-memory-results.jsonl"
+ if [ -d "$SCAN_DIR" ] && find "$SCAN_DIR" -mindepth 1 -maxdepth 1 -quit 2>/dev/null | grep -q .; then
+ echo "Scanning repo-memory in $SCAN_DIR"
+ trufflehog filesystem "$SCAN_DIR" --json --no-update --fail 2>/dev/null | tee "$OUTPUT_FILE" || SCAN_EXIT=${PIPESTATUS[0]}
+ SCAN_EXIT=${SCAN_EXIT:-0}
+ else
+ echo "repo-memory directory is empty or missing, skipping"
+ SCAN_EXIT=0
+ fi
+ if [ "$SCAN_EXIT" -eq 183 ]; then
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Evaluate TruffleHog results
+ id: evaluate
+ if: always()
+ env:
+ AGENT_FOUND: ${{ steps.scan-agent-output.outputs.secrets_found }}
+ CACHE_FOUND: ${{ steps.scan-cache-memory.outputs.secrets_found }}
+ REPO_FOUND: ${{ steps.scan-repo-memory.outputs.secrets_found }}
+ run: |
+ echo "==================================="
+ echo "🔍 TruffleHog Scan Summary"
+ echo "==================================="
+ echo "Agent output: ${AGENT_FOUND:-clean}"
+ echo "Cache-memory: ${CACHE_FOUND:-clean}"
+ echo "Repo-memory: ${REPO_FOUND:-clean}"
+ echo "==================================="
+
+ if [[ "$AGENT_FOUND" == "true" || "$CACHE_FOUND" == "true" || "$REPO_FOUND" == "true" ]]; then
+ LOCATIONS=()
+ [[ "$AGENT_FOUND" == "true" ]] && LOCATIONS+=("agent output")
+ [[ "$CACHE_FOUND" == "true" ]] && LOCATIONS+=("cache-memory")
+ [[ "$REPO_FOUND" == "true" ]] && LOCATIONS+=("repo-memory")
+ LOCATIONS_STR=$(IFS=', '; echo "${LOCATIONS[*]}")
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ echo "secrets_locations=${LOCATIONS_STR}" >> "$GITHUB_OUTPUT"
+ echo "::error::TruffleHog detected secrets in: ${LOCATIONS_STR}"
+ exit 1
+ else
+ echo "secrets_found=false" >> "$GITHUB_OUTPUT"
+ echo "✅ No secrets detected by TruffleHog"
+ fi
+
+ - name: Upload TruffleHog scan results
+ if: always()
+ uses: actions/upload-artifact@v7.0.1
+ with:
+ name: trufflehog-scan-results
+ path: /tmp/gh-aw/trufflehog/
+ if-no-files-found: ignore
+
+ conclusion:
+ pre-steps:
+ - name: Report TruffleHog secret scan failure
+ if: always() && needs.trufflehog_scan.result == 'failure' && needs.trufflehog_scan.outputs.secrets_found == 'true'
+ continue-on-error: true
+ uses: actions/github-script@v9
+ env:
+ GH_AW_TRUFFLEHOG_SECRETS_LOCATIONS: ${{ needs.trufflehog_scan.outputs.secrets_locations }}
+ GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ GH_AW_WORKFLOW_NAME: ${{ github.workflow }}
+ with:
+ script: |
+ const locations = process.env.GH_AW_TRUFFLEHOG_SECRETS_LOCATIONS || 'unknown locations';
+ const runUrl = process.env.GH_AW_RUN_URL;
+ const workflowName = process.env.GH_AW_WORKFLOW_NAME;
+ const runNumber = context.runNumber;
+ const { owner, repo } = context.repo;
+ core.error(`🔐 TruffleHog detected secrets in: ${locations}`);
+ const title = `🔐 Secrets detected in workflow run: ${workflowName} #${runNumber}`;
+ const body = [
+ '> [!CAUTION]',
+ '> **TruffleHog detected secrets in the agentic workflow output.**',
+ '',
+ `**Locations:** \`${locations}\``,
+ '',
+ `**Workflow run:** [${workflowName} #${runNumber}](${runUrl})`,
+ '',
+ 'Please review the `trufflehog-scan-results` artifact in the workflow run for details.',
+ 'Rotate any exposed credentials immediately.',
+ ].join('\n');
+ const issue = await github.rest.issues.create({ owner, repo, title, body, labels: ['security'] });
+ core.info(`Created secret detection issue: ${issue.data.html_url}`);
+---
+
+
diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml
index 1924b89b0ea..a75cae6f8f1 100644
--- a/.github/workflows/smoke-codex.lock.yml
+++ b/.github/workflows/smoke-codex.lock.yml
@@ -1,5 +1,5 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"032ac3db2b6d0d079973cb437a531a346485b0538a8965374a9c48c00a22a06f","agent_id":"codex"}
-# gh-aw-manifest: {"version":1,"secrets":["CODEX_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","OPENAI_API_KEY"],"actions":[{"repo":"actions-ecosystem/action-add-labels","sha":"c96b68fec76a0987cd93957189e9abd0b9a72ff1","version":"v1.1.3"},{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.29","digest":"sha256:e68f37e36962dcb3f3d1de680a49bc2302cefd001b941a7dc377155ec7ce42f4","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.29@sha256:e68f37e36962dcb3f3d1de680a49bc2302cefd001b941a7dc377155ec7ce42f4"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.29","digest":"sha256:d1219e4110684402aabbeb5a43858f26790c9d0be210581cf3f7a521bd2c87b6","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.29@sha256:d1219e4110684402aabbeb5a43858f26790c9d0be210581cf3f7a521bd2c87b6"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.29","digest":"sha256:8a71ad9e40454051672312917e51567abfb8251d7c294d086c48f63d84e4cb53","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.29@sha256:8a71ad9e40454051672312917e51567abfb8251d7c294d086c48f63d84e4cb53"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.3"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"mcr.microsoft.com/playwright/mcp","digest":"sha256:7b82f29c6ef83480a97f612d53ac3fd5f30a32df3fea1e06923d4204d3532bb2","pinned_image":"mcr.microsoft.com/playwright/mcp@sha256:7b82f29c6ef83480a97f612d53ac3fd5f30a32df3fea1e06923d4204d3532bb2"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"0305420f533fa974fcaa1c7a3277f30f833832d8c57e5542a69ddff157eb2a7a","agent_id":"codex"}
+# gh-aw-manifest: {"version":1,"secrets":["CODEX_API_KEY","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GITHUB_TOKEN","OPENAI_API_KEY"],"actions":[{"repo":"actions-ecosystem/action-add-labels","sha":"c96b68fec76a0987cd93957189e9abd0b9a72ff1","version":"v1.1.3"},{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/setup-go","sha":"4a3601121dd01d1626a1e23e37211e3254c1c06c","version":"v6.4.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.29","digest":"sha256:e68f37e36962dcb3f3d1de680a49bc2302cefd001b941a7dc377155ec7ce42f4","pinned_image":"ghcr.io/github/gh-aw-firewall/agent:0.25.29@sha256:e68f37e36962dcb3f3d1de680a49bc2302cefd001b941a7dc377155ec7ce42f4"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.29","digest":"sha256:d1219e4110684402aabbeb5a43858f26790c9d0be210581cf3f7a521bd2c87b6","pinned_image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.29@sha256:d1219e4110684402aabbeb5a43858f26790c9d0be210581cf3f7a521bd2c87b6"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.29","digest":"sha256:8a71ad9e40454051672312917e51567abfb8251d7c294d086c48f63d84e4cb53","pinned_image":"ghcr.io/github/gh-aw-firewall/squid:0.25.29@sha256:8a71ad9e40454051672312917e51567abfb8251d7c294d086c48f63d84e4cb53"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.3"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"ghcr.io/github/serena-mcp-server:latest","digest":"sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5","pinned_image":"ghcr.io/github/serena-mcp-server:latest@sha256:bf343399e3725c45528f531a230f3a04521d4cdef29f9a5af6282ff0d3c393c5"},{"image":"mcr.microsoft.com/playwright/mcp","digest":"sha256:7b82f29c6ef83480a97f612d53ac3fd5f30a32df3fea1e06923d4204d3532bb2","pinned_image":"mcr.microsoft.com/playwright/mcp@sha256:7b82f29c6ef83480a97f612d53ac3fd5f30a32df3fea1e06923d4204d3532bb2"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]}
# ___ _ _
# / _ \ | | (_)
# | |_| | __ _ ___ _ __ | |_ _ ___
@@ -32,6 +32,7 @@
# - shared/observability-otlp.md
# - shared/reporting.md
# - shared/reporting-otlp.md
+# - shared/trufflehog.md
#
# Secrets used:
# - CODEX_API_KEY
@@ -47,6 +48,7 @@
# - actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
# - actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
# - actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+# - actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
# - actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
# - actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
# - actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@@ -251,25 +253,25 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_928d68ef3e3a244b_EOF'
+ cat << 'GH_AW_PROMPT_be18ab6f21d42e65_EOF'
- GH_AW_PROMPT_928d68ef3e3a244b_EOF
+ GH_AW_PROMPT_be18ab6f21d42e65_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/playwright_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_928d68ef3e3a244b_EOF'
+ cat << 'GH_AW_PROMPT_be18ab6f21d42e65_EOF'
Tools: add_comment(max:2), create_issue, add_labels, remove_labels, unassign_from_user, hide_comment(max:5), missing_tool, missing_data, noop, add_smoked_label
- GH_AW_PROMPT_928d68ef3e3a244b_EOF
+ GH_AW_PROMPT_be18ab6f21d42e65_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_comment_memory.md"
- cat << 'GH_AW_PROMPT_928d68ef3e3a244b_EOF'
+ cat << 'GH_AW_PROMPT_be18ab6f21d42e65_EOF'
- GH_AW_PROMPT_928d68ef3e3a244b_EOF
+ GH_AW_PROMPT_be18ab6f21d42e65_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md"
- cat << 'GH_AW_PROMPT_928d68ef3e3a244b_EOF'
+ cat << 'GH_AW_PROMPT_be18ab6f21d42e65_EOF'
The following GitHub context information is available for this workflow:
{{#if __GH_AW_GITHUB_ACTOR__ }}
@@ -301,9 +303,9 @@ jobs:
- **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches).
- GH_AW_PROMPT_928d68ef3e3a244b_EOF
+ GH_AW_PROMPT_be18ab6f21d42e65_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_928d68ef3e3a244b_EOF'
+ cat << 'GH_AW_PROMPT_be18ab6f21d42e65_EOF'
## Serena Code Analysis
@@ -339,11 +341,12 @@ jobs:
{{#runtime-import .github/workflows/shared/gh.md}}
{{#runtime-import .github/workflows/shared/reporting-otlp.md}}
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
+ {{#runtime-import .github/workflows/shared/trufflehog.md}}
{{#runtime-import .github/workflows/shared/reporting.md}}
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/shared/noop-reminder.md}}
{{#runtime-import .github/workflows/smoke-codex.md}}
- GH_AW_PROMPT_928d68ef3e3a244b_EOF
+ GH_AW_PROMPT_be18ab6f21d42e65_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -582,9 +585,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_4f916ed59e7bf053_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_a968479bc80ebf8e_EOF'
{"add_comment":{"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-codex"]},"add_smoked_label":true,"comment_memory":{"max":1,"memory_id":"default"},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-codex","expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"hide_comment":{"max":5},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"report_incomplete":{},"unassign_from_user":{"allowed":["githubactionagent"],"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_4f916ed59e7bf053_EOF
+ GH_AW_SAFE_OUTPUTS_CONFIG_a968479bc80ebf8e_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -912,7 +915,7 @@ jobs:
- name: Write MCP Scripts Config
run: |
mkdir -p "${RUNNER_TEMP}/gh-aw/mcp-scripts/logs"
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_73a49ce03387d419_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json" << 'GH_AW_MCP_SCRIPTS_TOOLS_78b882308c5f1ed3_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -942,8 +945,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_73a49ce03387d419_EOF
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_fbcce66cf0f06e08_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_78b882308c5f1ed3_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs" << 'GH_AW_MCP_SCRIPTS_SERVER_068086496cc04293_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -957,12 +960,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_fbcce66cf0f06e08_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_068086496cc04293_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs"
- name: Write MCP Scripts Tool Files
run: |
- cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_81b72da895b17993_EOF'
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh" << 'GH_AW_MCP_SCRIPTS_SH_GH_ebb4837f5604bd27_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -974,7 +977,7 @@ jobs:
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_81b72da895b17993_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_ebb4837f5604bd27_EOF
chmod +x "${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh"
- name: Generate MCP Scripts Server Config
@@ -1048,7 +1051,7 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_MCP_SCRIPTS_PORT -e GH_AW_MCP_SCRIPTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GITHUB_AW_OTEL_TRACE_ID -e GITHUB_AW_OTEL_PARENT_SPAN_ID -e CODEX_HOME -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.3'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_4d149b386f6bda31_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_cd045c4c558db8eb_EOF
[history]
persistence = "none"
@@ -1121,11 +1124,11 @@ jobs:
[mcp_servers.serena."guard-policies".write-sink]
accept = ["*"]
- GH_AW_MCP_CONFIG_4d149b386f6bda31_EOF
+ GH_AW_MCP_CONFIG_cd045c4c558db8eb_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_4d149b386f6bda31_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_cd045c4c558db8eb_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
@@ -1235,11 +1238,11 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_4d149b386f6bda31_EOF
+ GH_AW_MCP_CONFIG_cd045c4c558db8eb_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_71ae115a66db43bd_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_e9d7843a40c183aa_EOF
model_provider = "openai-proxy"
@@ -1251,7 +1254,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "GH_AW_ASSETS_ALLOWED_EXTS", "GH_AW_ASSETS_BRANCH", "GH_AW_ASSETS_MAX_SIZE_KB", "GH_AW_SAFE_OUTPUTS", "GITHUB_PERSONAL_ACCESS_TOKEN", "GITHUB_REPOSITORY", "GITHUB_SERVER_URL", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_71ae115a66db43bd_EOF
+ GH_AW_CODEX_SHELL_POLICY_e9d7843a40c183aa_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
@@ -1497,6 +1500,7 @@ jobs:
- agent
- detection
- safe_outputs
+ - trufflehog_scan
- update_cache_memory
if: >
always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true' ||
@@ -1530,6 +1534,36 @@ jobs:
destination: ${{ runner.temp }}/gh-aw/actions
job-name: ${{ github.job }}
trace-id: ${{ needs.activation.outputs.setup-trace-id }}
+ - name: Report TruffleHog secret scan failure
+ if: always() && needs.trufflehog_scan.result == 'failure' && needs.trufflehog_scan.outputs.secrets_found == 'true'
+ uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
+ env:
+ GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ GH_AW_TRUFFLEHOG_SECRETS_LOCATIONS: ${{ needs.trufflehog_scan.outputs.secrets_locations }}
+ GH_AW_WORKFLOW_NAME: ${{ github.workflow }}
+ with:
+ script: |
+ const locations = process.env.GH_AW_TRUFFLEHOG_SECRETS_LOCATIONS || 'unknown locations';
+ const runUrl = process.env.GH_AW_RUN_URL;
+ const workflowName = process.env.GH_AW_WORKFLOW_NAME;
+ const runNumber = context.runNumber;
+ const { owner, repo } = context.repo;
+ core.error(`🔐 TruffleHog detected secrets in: ${locations}`);
+ const title = `🔐 Secrets detected in workflow run: ${workflowName} #${runNumber}`;
+ const body = [
+ '> [!CAUTION]',
+ '> **TruffleHog detected secrets in the agentic workflow output.**',
+ '',
+ `**Locations:** \`${locations}\``,
+ '',
+ `**Workflow run:** [${workflowName} #${runNumber}](${runUrl})`,
+ '',
+ 'Please review the `trufflehog-scan-results` artifact in the workflow run for details.',
+ 'Rotate any exposed credentials immediately.',
+ ].join('\n');
+ const issue = await github.rest.issues.create({ owner, repo, title, body, labels: ['security'] });
+ core.info(`Created secret detection issue: ${issue.data.html_url}`);
+ continue-on-error: true
- name: Download agent output artifact
id: download-agent-output
continue-on-error: true
@@ -1795,18 +1829,18 @@ jobs:
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0')
export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.3'
- cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_b42a43ab4e7c601c_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_9b67e0ee67587d1e_EOF
[history]
persistence = "none"
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_MCP_CONFIG_b42a43ab4e7c601c_EOF
+ GH_AW_MCP_CONFIG_9b67e0ee67587d1e_EOF
# Generate JSON config for MCP gateway
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_52ddf20d8027ba70_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_dc33f48ef6c69e3a_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
},
@@ -1817,11 +1851,11 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- GH_AW_MCP_CONFIG_52ddf20d8027ba70_EOF
+ GH_AW_MCP_CONFIG_dc33f48ef6c69e3a_EOF
# Sync converter output to writable CODEX_HOME for Codex
mkdir -p /tmp/gh-aw/mcp-config
- cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_6e7eb911ab0846c7_EOF
+ cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_f5b23361146b0f34_EOF
model_provider = "openai-proxy"
[model_providers.openai-proxy]
name = "OpenAI AWF proxy"
@@ -1831,7 +1865,7 @@ jobs:
[shell_environment_policy]
inherit = "core"
include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"]
- GH_AW_CODEX_SHELL_POLICY_6e7eb911ab0846c7_EOF
+ GH_AW_CODEX_SHELL_POLICY_f5b23361146b0f34_EOF
awk '
BEGIN { skip_openai_proxy = 0 }
/^[[:space:]]*model_provider[[:space:]]*=/ { next }
@@ -2057,6 +2091,154 @@ jobs:
/tmp/gh-aw/temporary-id-map.json
if-no-files-found: ignore
+ trufflehog_scan:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.agent.result != 'skipped' && needs.detection.result != 'skipped'
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+
+ outputs:
+ secrets_found: ${{ steps.evaluate.outputs.secrets_found }}
+ secrets_locations: ${{ steps.evaluate.outputs.secrets_locations }}
+ steps:
+ - name: Configure GH_HOST for enterprise compatibility
+ id: ghes-host-config
+ shell: bash
+ run: |
+ # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct
+ # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op.
+ GH_HOST="${GITHUB_SERVER_URL#https://}"
+ GH_HOST="${GH_HOST#http://}"
+ echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV"
+ - name: Download agent output artifact
+ id: download-agent
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
+ with:
+ name: agent
+ path: /tmp/gh-aw
+ continue-on-error: true
+ - name: Download cache-memory artifact
+ id: download-cache-memory
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ continue-on-error: true
+ - name: Download repo-memory artifact
+ id: download-repo-memory
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
+ with:
+ name: repo-memory-default
+ path: /tmp/gh-aw/repo-memory/default
+ continue-on-error: true
+ - name: Install TruffleHog
+ id: install-trufflehog
+ run: |
+ echo "Installing TruffleHog v${TRUFFLEHOG_VERSION}..."
+ curl -sSfL https://raw.githubusercontent.com/trufflesecurity/trufflehog/main/scripts/install.sh | sh -s -- -b /usr/local/bin "v${TRUFFLEHOG_VERSION}"
+ trufflehog --version
+ env:
+ TRUFFLEHOG_VERSION: 3.88.27
+ - name: Scan agent output for secrets
+ id: scan-agent-output
+ run: |
+ mkdir -p /tmp/gh-aw/trufflehog
+ SCAN_DIR="/tmp/gh-aw"
+ OUTPUT_FILE="/tmp/gh-aw/trufflehog/agent-output-results.jsonl"
+ if [ -d "$SCAN_DIR" ] && find "$SCAN_DIR" -mindepth 1 -maxdepth 1 -quit 2>/dev/null | grep -q .; then
+ echo "Scanning agent output in $SCAN_DIR"
+ trufflehog filesystem "$SCAN_DIR" \
+ --json --no-update --fail \
+ --exclude-paths /tmp/gh-aw/cache-memory \
+ --exclude-paths /tmp/gh-aw/repo-memory \
+ --exclude-paths /tmp/gh-aw/trufflehog \
+ 2>/dev/null | tee "$OUTPUT_FILE" || SCAN_EXIT=${PIPESTATUS[0]}
+ SCAN_EXIT=${SCAN_EXIT:-0}
+ else
+ echo "Agent output directory is empty or missing, skipping"
+ SCAN_EXIT=0
+ fi
+ if [ "$SCAN_EXIT" -eq 183 ]; then
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ fi
+ continue-on-error: true
+ - name: Scan cache-memory for secrets
+ id: scan-cache-memory
+ run: |
+ mkdir -p /tmp/gh-aw/trufflehog
+ SCAN_DIR="/tmp/gh-aw/cache-memory"
+ OUTPUT_FILE="/tmp/gh-aw/trufflehog/cache-memory-results.jsonl"
+ if [ -d "$SCAN_DIR" ] && find "$SCAN_DIR" -mindepth 1 -maxdepth 1 -quit 2>/dev/null | grep -q .; then
+ echo "Scanning cache-memory in $SCAN_DIR"
+ trufflehog filesystem "$SCAN_DIR" --json --no-update --fail 2>/dev/null | tee "$OUTPUT_FILE" || SCAN_EXIT=${PIPESTATUS[0]}
+ SCAN_EXIT=${SCAN_EXIT:-0}
+ else
+ echo "cache-memory directory is empty or missing, skipping"
+ SCAN_EXIT=0
+ fi
+ if [ "$SCAN_EXIT" -eq 183 ]; then
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ fi
+ continue-on-error: true
+ - name: Scan repo-memory for secrets
+ id: scan-repo-memory
+ run: |
+ mkdir -p /tmp/gh-aw/trufflehog
+ SCAN_DIR="/tmp/gh-aw/repo-memory"
+ OUTPUT_FILE="/tmp/gh-aw/trufflehog/repo-memory-results.jsonl"
+ if [ -d "$SCAN_DIR" ] && find "$SCAN_DIR" -mindepth 1 -maxdepth 1 -quit 2>/dev/null | grep -q .; then
+ echo "Scanning repo-memory in $SCAN_DIR"
+ trufflehog filesystem "$SCAN_DIR" --json --no-update --fail 2>/dev/null | tee "$OUTPUT_FILE" || SCAN_EXIT=${PIPESTATUS[0]}
+ SCAN_EXIT=${SCAN_EXIT:-0}
+ else
+ echo "repo-memory directory is empty or missing, skipping"
+ SCAN_EXIT=0
+ fi
+ if [ "$SCAN_EXIT" -eq 183 ]; then
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ fi
+ continue-on-error: true
+ - name: Evaluate TruffleHog results
+ id: evaluate
+ if: always()
+ run: |
+ echo "==================================="
+ echo "🔍 TruffleHog Scan Summary"
+ echo "==================================="
+ echo "Agent output: ${AGENT_FOUND:-clean}"
+ echo "Cache-memory: ${CACHE_FOUND:-clean}"
+ echo "Repo-memory: ${REPO_FOUND:-clean}"
+ echo "==================================="
+
+ if [[ "$AGENT_FOUND" == "true" || "$CACHE_FOUND" == "true" || "$REPO_FOUND" == "true" ]]; then
+ LOCATIONS=()
+ [[ "$AGENT_FOUND" == "true" ]] && LOCATIONS+=("agent output")
+ [[ "$CACHE_FOUND" == "true" ]] && LOCATIONS+=("cache-memory")
+ [[ "$REPO_FOUND" == "true" ]] && LOCATIONS+=("repo-memory")
+ LOCATIONS_STR=$(IFS=', '; echo "${LOCATIONS[*]}")
+ echo "secrets_found=true" >> "$GITHUB_OUTPUT"
+ echo "secrets_locations=${LOCATIONS_STR}" >> "$GITHUB_OUTPUT"
+ echo "::error::TruffleHog detected secrets in: ${LOCATIONS_STR}"
+ exit 1
+ else
+ echo "secrets_found=false" >> "$GITHUB_OUTPUT"
+ echo "✅ No secrets detected by TruffleHog"
+ fi
+ env:
+ AGENT_FOUND: ${{ steps.scan-agent-output.outputs.secrets_found }}
+ CACHE_FOUND: ${{ steps.scan-cache-memory.outputs.secrets_found }}
+ REPO_FOUND: ${{ steps.scan-repo-memory.outputs.secrets_found }}
+ - name: Upload TruffleHog scan results
+ if: always()
+ uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
+ with:
+ if-no-files-found: ignore
+ name: trufflehog-scan-results
+ path: /tmp/gh-aw/trufflehog/
+
update_cache_memory:
needs:
- activation
diff --git a/.github/workflows/smoke-codex.md b/.github/workflows/smoke-codex.md
index 61b3056e1ec..5e00824fcae 100644
--- a/.github/workflows/smoke-codex.md
+++ b/.github/workflows/smoke-codex.md
@@ -24,6 +24,7 @@ imports:
- shared/gh.md
- shared/reporting-otlp.md
- shared/mcp/serena-go.md
+ - shared/trufflehog.md
network:
allowed:
- defaults