Skip to content

Test Network Permissions #77

Test Network Permissions

Test Network Permissions #77

Workflow file for this run

# This file was automatically generated by gh-aw. DO NOT EDIT.
# To update this file, edit the corresponding .md file and run:
# gh aw compile
name: "Test Network Permissions"
on:
pull_request:
branches:
- main
workflow_dispatch: null
permissions: {}
concurrency:
group: "gh-aw-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
cancel-in-progress: true
run-name: "Test Network Permissions"
jobs:
test-network-permissions:
runs-on: ubuntu-latest
permissions:
contents: read
issues: read
pull-requests: read
discussions: read
deployments: read
models: read
outputs:
output: ${{ steps.collect_output.outputs.output }}
steps:
- name: Checkout repository
uses: actions/checkout@v5
- name: Setup agent output
id: setup_agent_output
uses: actions/github-script@v7
with:
script: |
function main() {
const fs = require('fs');
const crypto = require('crypto');
// Generate a random filename for the output file
const randomId = crypto.randomBytes(8).toString('hex');
const outputFile = `/tmp/aw_output_${randomId}.txt`;
// Ensure the /tmp directory exists and create empty output file
fs.mkdirSync('/tmp', { recursive: true });
fs.writeFileSync(outputFile, '', { mode: 0o644 });
// Verify the file was created and is writable
if (!fs.existsSync(outputFile)) {
throw new Error(`Failed to create output file: ${outputFile}`);
}
// Set the environment variable for subsequent steps
core.exportVariable('GITHUB_AW_OUTPUT', outputFile);
console.log('Created agentic output file:', outputFile);
// Also set as step output for reference
core.setOutput('output_file', outputFile);
}
main();
- name: Setup Proxy Configuration for MCP Network Restrictions
run: |
echo "Generating proxy configuration files for MCP tools with network restrictions..."
# Generate Squid proxy configuration
cat > squid.conf << 'EOF'
# Squid configuration for egress traffic control
# This configuration implements a whitelist-based proxy
# Access log and cache configuration
access_log /var/log/squid/access.log squid
cache_log /var/log/squid/cache.log
cache deny all
# Port configuration
http_port 3128
# ACL definitions for allowed domains
acl allowed_domains dstdomain "/etc/squid/allowed_domains.txt"
acl localnet src 10.0.0.0/8
acl localnet src 172.16.0.0/12
acl localnet src 192.168.0.0/16
acl SSL_ports port 443
acl Safe_ports port 80
acl Safe_ports port 443
acl CONNECT method CONNECT
# Access rules
# Deny requests to unknown domains (not in whitelist)
http_access deny !allowed_domains
http_access deny !Safe_ports
http_access deny CONNECT !SSL_ports
http_access allow localnet
http_access deny all
# Disable caching
cache deny all
# DNS settings
dns_nameservers 8.8.8.8 8.8.4.4
# Forwarded headers
forwarded_for delete
via off
# Error page customization
error_directory /usr/share/squid/errors/English
# Logging
logformat combined %>a %[ui %[un [%tl] "%rm %ru HTTP/%rv" %>Hs %<st "%{Referer}>h" "%{User-Agent}>h" %Ss:%Sh
access_log /var/log/squid/access.log combined
# Memory and file descriptor limits
cache_mem 64 MB
maximum_object_size 0 KB
EOF
# Generate allowed domains file
cat > allowed_domains.txt << 'EOF'
# Allowed domains for egress traffic
# Add one domain per line
example.com
EOF
# Generate Docker Compose configuration for fetch
cat > docker-compose-fetch.yml << 'EOF'
services:
squid-proxy:
image: ubuntu/squid:latest
container_name: squid-proxy-fetch
ports:
- "3128:3128"
volumes:
- ./squid.conf:/etc/squid/squid.conf:ro
- ./allowed_domains.txt:/etc/squid/allowed_domains.txt:ro
- squid-logs:/var/log/squid
healthcheck:
test: ["CMD", "squid", "-k", "check"]
interval: 30s
timeout: 10s
retries: 3
restart: unless-stopped
networks:
awproxy-fetch:
ipv4_address: 172.28.179.10
fetch:
image: mcp/fetch
container_name: fetch-mcp
stdin_open: true
tty: true
environment:
- PROXY_HOST=squid-proxy
- PROXY_PORT=3128
- HTTP_PROXY=http://squid-proxy:3128
- HTTPS_PROXY=http://squid-proxy:3128
networks:
- awproxy-fetch
depends_on:
squid-proxy:
condition: service_healthy
volumes:
squid-logs:
networks:
awproxy-fetch:
driver: bridge
ipam:
config:
- subnet: 172.28.179.0/24
EOF
echo "Proxy configuration files generated."
- name: Pre-pull images and start Squid proxy
run: |
set -e
echo 'Pre-pulling Docker images for proxy-enabled MCP tools...'
docker pull ubuntu/squid:latest
echo 'Pulling mcp/fetch for tool fetch'
docker pull mcp/fetch
echo 'Starting squid-proxy service for fetch'
docker compose -f docker-compose-fetch.yml up -d squid-proxy
echo 'Enforcing egress to proxy for fetch (subnet 172.28.179.0/24, squid 172.28.179.10)'
if command -v sudo >/dev/null 2>&1; then SUDO=sudo; else SUDO=; fi
$SUDO iptables -C DOCKER-USER -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT 2>/dev/null || $SUDO iptables -I DOCKER-USER 1 -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
$SUDO iptables -C DOCKER-USER -s 172.28.179.10 -j ACCEPT 2>/dev/null || $SUDO iptables -I DOCKER-USER 2 -s 172.28.179.10 -j ACCEPT
$SUDO iptables -C DOCKER-USER -s 172.28.179.0/24 -d 172.28.179.10 -p tcp --dport 3128 -j ACCEPT 2>/dev/null || $SUDO iptables -I DOCKER-USER 3 -s 172.28.179.0/24 -d 172.28.179.10 -p tcp --dport 3128 -j ACCEPT
$SUDO iptables -C DOCKER-USER -s 172.28.179.0/24 -j REJECT 2>/dev/null || $SUDO iptables -A DOCKER-USER -s 172.28.179.0/24 -j REJECT
- name: Setup MCPs
run: |
mkdir -p /tmp/mcp-config
cat > /tmp/mcp-config/mcp-servers.json << 'EOF'
{
"mcpServers": {
"fetch": {
"command": "docker",
"args": [
"compose",
"-f",
"docker-compose-fetch.yml",
"run",
"--rm",
"fetch"
]
},
"github": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"GITHUB_PERSONAL_ACCESS_TOKEN",
"ghcr.io/github/github-mcp-server:sha-45e90ae"
],
"env": {
"GITHUB_PERSONAL_ACCESS_TOKEN": "${{ secrets.GITHUB_TOKEN }}"
}
}
}
}
EOF
- name: Create prompt
env:
GITHUB_AW_OUTPUT: ${{ env.GITHUB_AW_OUTPUT }}
run: |
mkdir -p /tmp/aw-prompts
cat > /tmp/aw-prompts/prompt.txt << 'EOF'
# Test Network Permissions
## Task Description
Test the MCP network permissions feature to validate that domain restrictions are properly enforced.
- Use the fetch tool to successfully retrieve content from `https://example.com/` (the only allowed domain)
- Attempt to access blocked domains and verify they fail with network errors:
- `https://httpbin.org/json`
- `https://api.github.com/user`
- `https://www.google.com/`
- `http://malicious-example.com/`
- Verify that all blocked requests fail at the network level (proxy enforcement)
- Confirm that only example.com is accessible through the Squid proxy
## Reporting Instructions
If there are any failures, security issues, or unexpected behaviors:
- Write a detailed report documenting:
- Which domains were successfully accessed vs blocked
- Error messages received for blocked domains
- Any security observations or recommendations
- Specific failure details that need attention
Post the test results as an issue comment on PR #${{ github.event.pull_request.number }}.
---
## Adding a Comment to an Issue or Pull Request
To add a comment to an issue or pull request on GitHub, do NOT attempt to use MCP tools and do NOT attempt to use `gh` or the GitHub API. Instead, write the issue comment you want to make to the file "${{ env.GITHUB_AW_OUTPUT }}", in markdown.
EOF
- name: Print prompt to step summary
run: |
echo "## Generated Prompt" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo '``````markdown' >> $GITHUB_STEP_SUMMARY
cat /tmp/aw-prompts/prompt.txt >> $GITHUB_STEP_SUMMARY
echo '``````' >> $GITHUB_STEP_SUMMARY
- name: Generate agentic run info
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const awInfo = {
engine_id: "claude",
engine_name: "Claude Code",
model: "",
version: "",
workflow_name: "Test Network Permissions",
experimental: false,
supports_tools_whitelist: true,
supports_http_transport: true,
run_id: context.runId,
run_number: context.runNumber,
run_attempt: process.env.GITHUB_RUN_ATTEMPT,
repository: context.repo.owner + '/' + context.repo.repo,
ref: context.ref,
sha: context.sha,
actor: context.actor,
event_name: context.eventName,
created_at: new Date().toISOString()
};
// Write to /tmp directory to avoid inclusion in PR
const tmpPath = '/tmp/aw_info.json';
fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2));
console.log('Generated aw_info.json at:', tmpPath);
console.log(JSON.stringify(awInfo, null, 2));
- name: Upload agentic run info
if: always()
uses: actions/upload-artifact@v4
with:
name: aw_info.json
path: /tmp/aw_info.json
if-no-files-found: warn
- name: Execute Claude Code Action
id: agentic_execution
uses: anthropics/[email protected]
with:
# Allowed tools (sorted):
# - Glob
# - Grep
# - LS
# - NotebookRead
# - Read
# - Task
# - mcp__fetch__fetch
# - mcp__github__create_comment
# - mcp__github__create_issue
# - mcp__github__download_workflow_run_artifact
# - mcp__github__get_code_scanning_alert
# - mcp__github__get_commit
# - mcp__github__get_dependabot_alert
# - mcp__github__get_discussion
# - mcp__github__get_discussion_comments
# - mcp__github__get_file_contents
# - mcp__github__get_issue
# - mcp__github__get_issue_comments
# - mcp__github__get_job_logs
# - mcp__github__get_me
# - mcp__github__get_notification_details
# - mcp__github__get_pull_request
# - mcp__github__get_pull_request_comments
# - mcp__github__get_pull_request_diff
# - mcp__github__get_pull_request_files
# - mcp__github__get_pull_request_reviews
# - mcp__github__get_pull_request_status
# - mcp__github__get_secret_scanning_alert
# - mcp__github__get_tag
# - mcp__github__get_workflow_run
# - mcp__github__get_workflow_run_logs
# - mcp__github__get_workflow_run_usage
# - mcp__github__list_branches
# - mcp__github__list_code_scanning_alerts
# - mcp__github__list_commits
# - mcp__github__list_dependabot_alerts
# - mcp__github__list_discussion_categories
# - mcp__github__list_discussions
# - mcp__github__list_issues
# - mcp__github__list_notifications
# - mcp__github__list_pull_requests
# - mcp__github__list_secret_scanning_alerts
# - mcp__github__list_tags
# - mcp__github__list_workflow_jobs
# - mcp__github__list_workflow_run_artifacts
# - mcp__github__list_workflow_runs
# - mcp__github__list_workflows
# - mcp__github__search_code
# - mcp__github__search_issues
# - mcp__github__search_orgs
# - mcp__github__search_pull_requests
# - mcp__github__search_repositories
# - mcp__github__search_users
allowed_tools: "Glob,Grep,LS,NotebookRead,Read,Task,mcp__fetch__fetch,mcp__github__create_comment,mcp__github__create_issue,mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_issue,mcp__github__get_issue_comments,mcp__github__get_job_logs,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issues,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_secret_scanning_alerts,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users"
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
claude_env: |
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_AW_OUTPUT: ${{ env.GITHUB_AW_OUTPUT }}
mcp_config: /tmp/mcp-config/mcp-servers.json
prompt_file: /tmp/aw-prompts/prompt.txt
timeout_minutes: 5
env:
GITHUB_AW_OUTPUT: ${{ env.GITHUB_AW_OUTPUT }}
- name: Capture Agentic Action logs
if: always()
run: |
# Copy the detailed execution file from Agentic Action if available
if [ -n "${{ steps.agentic_execution.outputs.execution_file }}" ] && [ -f "${{ steps.agentic_execution.outputs.execution_file }}" ]; then
cp ${{ steps.agentic_execution.outputs.execution_file }} /tmp/test-network-permissions.log
else
echo "No execution file output found from Agentic Action" >> /tmp/test-network-permissions.log
fi
# Ensure log file exists
touch /tmp/test-network-permissions.log
- name: Check if workflow-complete.txt exists, if so upload it
id: check_file
run: |
if [ -f workflow-complete.txt ]; then
echo "File exists"
echo "upload=true" >> $GITHUB_OUTPUT
else
echo "File does not exist"
echo "upload=false" >> $GITHUB_OUTPUT
fi
- name: Upload workflow-complete.txt
if: steps.check_file.outputs.upload == 'true'
uses: actions/upload-artifact@v4
with:
name: workflow-complete
path: workflow-complete.txt
- name: Collect agent output
id: collect_output
uses: actions/github-script@v7
with:
script: |
/**
* Sanitizes content for safe output in GitHub Actions
* @param {string} content - The content to sanitize
* @returns {string} The sanitized content
*/
function sanitizeContent(content) {
if (!content || typeof content !== 'string') {
return '';
}
// Read allowed domains from environment variable
const allowedDomainsEnv = process.env.GITHUB_AW_ALLOWED_DOMAINS;
const defaultAllowedDomains = [
'github.com',
'github.io',
'githubusercontent.com',
'githubassets.com',
'github.dev',
'codespaces.new'
];
const allowedDomains = allowedDomainsEnv
? allowedDomainsEnv.split(',').map(d => d.trim()).filter(d => d)
: defaultAllowedDomains;
let sanitized = content;
// Neutralize @mentions to prevent unintended notifications
sanitized = neutralizeMentions(sanitized);
// Remove control characters (except newlines and tabs)
sanitized = sanitized.replace(/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F]/g, '');
// XML character escaping
sanitized = sanitized
.replace(/&/g, '&amp;') // Must be first to avoid double-escaping
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;');
// URI filtering - replace non-https protocols with "(redacted)"
// Step 1: Temporarily mark HTTPS URLs to protect them
sanitized = sanitizeUrlProtocols(sanitized);
// Domain filtering for HTTPS URIs
// Match https:// URIs and check if domain is in allowlist
sanitized = sanitizeUrlDomains(sanitized);
// Limit total length to prevent DoS (0.5MB max)
const maxLength = 524288;
if (sanitized.length > maxLength) {
sanitized = sanitized.substring(0, maxLength) + '\n[Content truncated due to length]';
}
// Limit number of lines to prevent log flooding (65k max)
const lines = sanitized.split('\n');
const maxLines = 65000;
if (lines.length > maxLines) {
sanitized = lines.slice(0, maxLines).join('\n') + '\n[Content truncated due to line count]';
}
// Remove ANSI escape sequences
sanitized = sanitized.replace(/\x1b\[[0-9;]*[mGKH]/g, '');
// Neutralize common bot trigger phrases
sanitized = neutralizeBotTriggers(sanitized);
// Trim excessive whitespace
return sanitized.trim();
/**
* Remove unknown domains
* @param {string} s - The string to process
* @returns {string} The string with unknown domains redacted
*/
function sanitizeUrlDomains(s) {
s = s.replace(/\bhttps:\/\/([^\/\s\])}'"<>&\x00-\x1f]+)/gi, (match, domain) => {
// Extract the hostname part (before first slash, colon, or other delimiter)
const hostname = domain.split(/[\/:\?#]/)[0].toLowerCase();
// Check if this domain or any parent domain is in the allowlist
const isAllowed = allowedDomains.some(allowedDomain => {
const normalizedAllowed = allowedDomain.toLowerCase();
return hostname === normalizedAllowed || hostname.endsWith('.' + normalizedAllowed);
});
return isAllowed ? match : '(redacted)';
});
return s;
}
/**
* Remove unknown protocols except https
* @param {string} s - The string to process
* @returns {string} The string with non-https protocols redacted
*/
function sanitizeUrlProtocols(s) {
// Match both protocol:// and protocol: patterns
// This covers URLs like https://example.com, javascript:alert(), mailto:[email protected], etc.
return s.replace(/\b(\w+):(?:\/\/)?[^\s\])}'"<>&\x00-\x1f]+/gi, (match, protocol) => {
// Allow https (case insensitive), redact everything else
return protocol.toLowerCase() === 'https' ? match : '(redacted)';
});
}
/**
* Neutralizes @mentions by wrapping them in backticks
* @param {string} s - The string to process
* @returns {string} The string with neutralized mentions
*/
function neutralizeMentions(s) {
// Replace @name or @org/team outside code with `@name`
return s.replace(/(^|[^\w`])@([A-Za-z0-9](?:[A-Za-z0-9-]{0,37}[A-Za-z0-9])?(?:\/[A-Za-z0-9._-]+)?)/g,
(_m, p1, p2) => `${p1}\`@${p2}\``);
}
/**
* Neutralizes bot trigger phrases by wrapping them in backticks
* @param {string} s - The string to process
* @returns {string} The string with neutralized bot triggers
*/
function neutralizeBotTriggers(s) {
// Neutralize common bot trigger phrases like "fixes #123", "closes #asdfs", etc.
return s.replace(/\b(fixes?|closes?|resolves?|fix|close|resolve)\s+#(\w+)/gi,
(match, action, ref) => `\`${action} #${ref}\``);
}
}
async function main() {
const fs = require("fs");
const outputFile = process.env.GITHUB_AW_OUTPUT;
if (!outputFile) {
console.log('GITHUB_AW_OUTPUT not set, no output to collect');
core.setOutput('output', '');
return;
}
if (!fs.existsSync(outputFile)) {
console.log('Output file does not exist:', outputFile);
core.setOutput('output', '');
return;
}
const outputContent = fs.readFileSync(outputFile, 'utf8');
if (outputContent.trim() === '') {
console.log('Output file is empty');
core.setOutput('output', '');
} else {
const sanitizedContent = sanitizeContent(outputContent);
console.log('Collected agentic output (sanitized):', sanitizedContent.substring(0, 200) + (sanitizedContent.length > 200 ? '...' : ''));
core.setOutput('output', sanitizedContent);
}
}
await main();
- name: Print agent output to step summary
env:
GITHUB_AW_OUTPUT: ${{ env.GITHUB_AW_OUTPUT }}
run: |
echo "## Agent Output" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo '``````markdown' >> $GITHUB_STEP_SUMMARY
cat ${{ env.GITHUB_AW_OUTPUT }} >> $GITHUB_STEP_SUMMARY
echo '``````' >> $GITHUB_STEP_SUMMARY
- name: Upload agentic output file
if: always() && steps.collect_output.outputs.output != ''
uses: actions/upload-artifact@v4
with:
name: aw_output.txt
path: ${{ env.GITHUB_AW_OUTPUT }}
if-no-files-found: warn
- name: Upload engine output files
if: always()
uses: actions/upload-artifact@v4
with:
name: agent_outputs
path: |
output.txt
if-no-files-found: ignore
- name: Upload agent logs
if: always()
uses: actions/upload-artifact@v4
with:
name: test-network-permissions.log
path: /tmp/test-network-permissions.log
if-no-files-found: warn
- name: Generate git patch
if: always()
run: |
# Check current git status
echo "Current git status:"
git status
# Get the initial commit SHA from the base branch of the pull request
if [ "$GITHUB_EVENT_NAME" = "pull_request" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ]; then
INITIAL_SHA="$GITHUB_BASE_REF"
else
INITIAL_SHA="$GITHUB_SHA"
fi
echo "Base commit SHA: $INITIAL_SHA"
# Configure git user for GitHub Actions
git config --global user.email "[email protected]"
git config --global user.name "GitHub Action"
# Stage any unstaged files
git add -A || true
# Check if there are staged files to commit
if ! git diff --cached --quiet; then
echo "Staged files found, committing them..."
git commit -m "[agent] staged files" || true
echo "Staged files committed"
else
echo "No staged files to commit"
fi
# Check updated git status
echo "Updated git status after committing staged files:"
git status
# Show compact diff information between initial commit and HEAD (committed changes only)
echo '## Git diff' >> $GITHUB_STEP_SUMMARY
echo '' >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
git diff --name-only "$INITIAL_SHA"..HEAD >> $GITHUB_STEP_SUMMARY || true
echo '```' >> $GITHUB_STEP_SUMMARY
echo '' >> $GITHUB_STEP_SUMMARY
# Check if there are any committed changes since the initial commit
if git diff --quiet "$INITIAL_SHA" HEAD; then
echo "No committed changes detected since initial commit"
echo "Skipping patch generation - no committed changes to create patch from"
else
echo "Committed changes detected, generating patch..."
# Generate patch from initial commit to HEAD (committed changes only)
git format-patch "$INITIAL_SHA"..HEAD --stdout > /tmp/aw.patch || echo "Failed to generate patch" > /tmp/aw.patch
echo "Patch file created at /tmp/aw.patch"
ls -la /tmp/aw.patch
# Show the first 50 lines of the patch for review
echo '## Git Patch' >> $GITHUB_STEP_SUMMARY
echo '' >> $GITHUB_STEP_SUMMARY
echo '```diff' >> $GITHUB_STEP_SUMMARY
head -50 /tmp/aw.patch >> $GITHUB_STEP_SUMMARY || echo "Could not display patch contents" >> $GITHUB_STEP_SUMMARY
echo '...' >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo '' >> $GITHUB_STEP_SUMMARY
fi
- name: Upload git patch
if: always()
uses: actions/upload-artifact@v4
with:
name: aw.patch
path: /tmp/aw.patch
if-no-files-found: ignore
create_issue_comment:
needs: test-network-permissions
if: github.event.issue.number || github.event.pull_request.number
runs-on: ubuntu-latest
permissions:
contents: read
issues: write
pull-requests: write
timeout-minutes: 10
outputs:
comment_id: ${{ steps.create_comment.outputs.comment_id }}
comment_url: ${{ steps.create_comment.outputs.comment_url }}
steps:
- name: Add Issue Comment
id: create_comment
uses: actions/github-script@v7
env:
GITHUB_AW_AGENT_OUTPUT: ${{ needs.test-network-permissions.outputs.output }}
with:
script: |
async function main() {
// Read the agent output content from environment variable
const outputContent = process.env.GITHUB_AW_AGENT_OUTPUT;
if (!outputContent) {
console.log('No GITHUB_AW_AGENT_OUTPUT environment variable found');
return;
}
if (outputContent.trim() === '') {
console.log('Agent output content is empty');
return;
}
console.log('Agent output content length:', outputContent.length);
// Check if we're in an issue or pull request context
const isIssueContext = context.eventName === 'issues' || context.eventName === 'issue_comment';
const isPRContext = context.eventName === 'pull_request' || context.eventName === 'pull_request_review' || context.eventName === 'pull_request_review_comment';
if (!isIssueContext && !isPRContext) {
console.log('Not running in issue or pull request context, skipping comment creation');
return;
}
// Determine the issue/PR number and comment endpoint
let issueNumber;
let commentEndpoint;
if (isIssueContext) {
if (context.payload.issue) {
issueNumber = context.payload.issue.number;
commentEndpoint = 'issues';
} else {
console.log('Issue context detected but no issue found in payload');
return;
}
} else if (isPRContext) {
if (context.payload.pull_request) {
issueNumber = context.payload.pull_request.number;
commentEndpoint = 'issues'; // PR comments use the issues API endpoint
} else {
console.log('Pull request context detected but no pull request found in payload');
return;
}
}
if (!issueNumber) {
console.log('Could not determine issue or pull request number');
return;
}
let body = outputContent.trim();
// Add AI disclaimer with run id, run htmlurl
const runId = context.runId;
const runUrl = context.payload.repository
? `${context.payload.repository.html_url}/actions/runs/${runId}`
: `https://github.com/actions/runs/${runId}`;
body += `\n\n> Generated by Agentic Workflow Run [${runId}](${runUrl})\n`;
console.log(`Creating comment on ${commentEndpoint} #${issueNumber}`);
console.log('Comment content length:', body.length);
// Create the comment using GitHub API
const { data: comment } = await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body: body
});
console.log('Created comment #' + comment.id + ': ' + comment.html_url);
// Set output for other jobs to use
core.setOutput('comment_id', comment.id);
core.setOutput('comment_url', comment.html_url);
// write comment id, url to the github_step_summary
await core.summary.addRaw(`
## GitHub Comment
- Comment ID: ${comment.id}
- Comment URL: ${comment.html_url}
`).write();
}
await main();