Update documentation for automatic lockdown determination #12
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # | ||
|
Check failure on line 1 in .github/workflows/metrics-collector.lock.yml
|
||
| # ___ _ _ | ||
| # / _ \ | | (_) | ||
| # | |_| | __ _ ___ _ __ | |_ _ ___ | ||
| # | _ |/ _` |/ _ \ '_ \| __| |/ __| | ||
| # | | | | (_| | __/ | | | |_| | (__ | ||
| # \_| |_/\__, |\___|_| |_|\__|_|\___| | ||
| # __/ | | ||
| # _ _ |___/ | ||
| # | | | | / _| | | ||
| # | | | | ___ _ __ _ __| |_| | _____ ____ | ||
| # | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| | ||
| # \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ | ||
| # \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ | ||
| # | ||
| # This file was automatically generated by gh-aw. DO NOT EDIT. | ||
| # | ||
| # To update this file, edit the corresponding .md file and run: | ||
| # gh aw compile | ||
| # For more information: https://github.com/githubnext/gh-aw/blob/main/.github/aw/github-agentic-workflows.md | ||
| # | ||
| # Collects daily performance metrics for the agent ecosystem and stores them in repo-memory | ||
| name: "Metrics Collector - Infrastructure Agent" | ||
| "on": | ||
| schedule: | ||
| - cron: "28 14 * * *" | ||
| # Friendly format: daily (scattered) | ||
| workflow_dispatch: | ||
| permissions: | ||
| actions: read | ||
| contents: read | ||
| discussions: read | ||
| issues: read | ||
| pull-requests: read | ||
| concurrency: | ||
| group: "gh-aw-${{ github.workflow }}" | ||
| run-name: "Metrics Collector - Infrastructure Agent" | ||
| jobs: | ||
| activation: | ||
| needs: pre_activation | ||
| if: needs.pre_activation.outputs.activated == 'true' | ||
| runs-on: ubuntu-slim | ||
| permissions: | ||
| contents: read | ||
| outputs: | ||
| comment_id: "" | ||
| comment_repo: "" | ||
| steps: | ||
| - name: Checkout actions folder | ||
| uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 | ||
| with: | ||
| sparse-checkout: | | ||
| actions | ||
| persist-credentials: false | ||
| - name: Setup Scripts | ||
| uses: ./actions/setup | ||
| with: | ||
| destination: /tmp/gh-aw/actions | ||
| - name: Check workflow file timestamps | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_WORKFLOW_FILE: "metrics-collector.lock.yml" | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/check_workflow_timestamp_api.cjs'); | ||
| await main(); | ||
| agent: | ||
| needs: activation | ||
| runs-on: ubuntu-latest | ||
| permissions: | ||
| actions: read | ||
| contents: read | ||
| discussions: read | ||
| issues: read | ||
| pull-requests: read | ||
| concurrency: | ||
| group: "gh-aw-copilot-${{ github.workflow }}" | ||
| outputs: | ||
| model: ${{ steps.generate_aw_info.outputs.model }} | ||
| steps: | ||
| - name: Checkout actions folder | ||
| uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 | ||
| with: | ||
| sparse-checkout: | | ||
| actions | ||
| persist-credentials: false | ||
| - name: Setup Scripts | ||
| uses: ./actions/setup | ||
| with: | ||
| destination: /tmp/gh-aw/actions | ||
| - name: Checkout repository | ||
| uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 | ||
| with: | ||
| persist-credentials: false | ||
| - name: Create gh-aw temp directory | ||
| run: bash /tmp/gh-aw/actions/create_gh_aw_tmp_dir.sh | ||
| # Repo memory git-based storage configuration from frontmatter processed below | ||
| - name: Clone repo-memory branch (default) | ||
| env: | ||
| GH_TOKEN: ${{ github.token }} | ||
| BRANCH_NAME: memory/meta-orchestrators | ||
| TARGET_REPO: ${{ github.repository }} | ||
| MEMORY_DIR: /tmp/gh-aw/repo-memory/default | ||
| CREATE_ORPHAN: true | ||
| run: bash /tmp/gh-aw/actions/clone_repo_memory_branch.sh | ||
| - name: Configure Git credentials | ||
| env: | ||
| REPO_NAME: ${{ github.repository }} | ||
| SERVER_URL: ${{ github.server_url }} | ||
| run: | | ||
| git config --global user.email "github-actions[bot]@users.noreply.github.com" | ||
| git config --global user.name "github-actions[bot]" | ||
| # Re-authenticate git with GitHub token | ||
| SERVER_URL_STRIPPED="${SERVER_URL#https://}" | ||
| git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" | ||
| echo "Git configured with standard GitHub Actions identity" | ||
| - name: Checkout PR branch | ||
| if: | | ||
| github.event.pull_request | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | ||
| with: | ||
| github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/checkout_pr_branch.cjs'); | ||
| await main(); | ||
| - name: Validate COPILOT_GITHUB_TOKEN secret | ||
| run: /tmp/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN GitHub Copilot CLI https://githubnext.github.io/gh-aw/reference/engines/#github-copilot-default | ||
| env: | ||
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | ||
| - name: Install GitHub Copilot CLI | ||
| run: | | ||
| # Download official Copilot CLI installer script | ||
| curl -fsSL https://raw.githubusercontent.com/github/copilot-cli/main/install.sh -o /tmp/copilot-install.sh | ||
| # Execute the installer with the specified version | ||
| export VERSION=0.0.374 && sudo bash /tmp/copilot-install.sh | ||
| # Cleanup | ||
| rm -f /tmp/copilot-install.sh | ||
| # Verify installation | ||
| copilot --version | ||
| - name: Install awf binary | ||
| run: | | ||
| echo "Installing awf via installer script (requested version: v0.7.0)" | ||
| curl -sSL https://raw.githubusercontent.com/githubnext/gh-aw-firewall/main/install.sh | sudo AWF_VERSION=v0.7.0 bash | ||
| which awf | ||
| awf --version | ||
| - name: Determine automatic lockdown mode for GitHub MCP server | ||
| id: determine-automatic-lockdown | ||
| if: secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN != '' | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| with: | ||
| script: | | ||
| const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); | ||
| await determineAutomaticLockdown(github, context, core); | ||
| - name: Install gh-aw extension | ||
| env: | ||
| GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | ||
| run: | | ||
| # Check if gh-aw extension is already installed | ||
| if gh extension list | grep -q "githubnext/gh-aw"; then | ||
| echo "gh-aw extension already installed, upgrading..." | ||
| gh extension upgrade gh-aw || true | ||
| else | ||
| echo "Installing gh-aw extension..." | ||
| gh extension install githubnext/gh-aw | ||
| fi | ||
| gh aw --version | ||
| - name: Setup MCPs | ||
| env: | ||
| GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | ||
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
| run: | | ||
| mkdir -p /tmp/gh-aw/mcp-config | ||
| mkdir -p /home/runner/.copilot | ||
| cat > /home/runner/.copilot/mcp-config.json << EOF | ||
| { | ||
| "mcpServers": { | ||
| "agentic_workflows": { | ||
| "type": "local", | ||
| "command": "gh", | ||
| "args": ["aw", "mcp-server"], | ||
| "tools": ["*"], | ||
| "env": { | ||
| "GITHUB_TOKEN": "\${GITHUB_TOKEN}" | ||
| } | ||
| }, | ||
| "github": { | ||
| "type": "http", | ||
| "url": "https://api.githubcopilot.com/mcp/", | ||
| "headers": { | ||
| "Authorization": "Bearer \${GITHUB_PERSONAL_ACCESS_TOKEN}", | ||
| "X-MCP-Lockdown": "${{ steps.determine-automatic-lockdown.outputs.lockdown }}", | ||
| "X-MCP-Readonly": "true", | ||
| "X-MCP-Toolsets": "context,repos,issues,pull_requests" | ||
| }, | ||
| "tools": ["*"], | ||
| "env": { | ||
| "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}" | ||
| } | ||
| } | ||
| } | ||
| } | ||
| EOF | ||
| echo "-------START MCP CONFIG-----------" | ||
| cat /home/runner/.copilot/mcp-config.json | ||
| echo "-------END MCP CONFIG-----------" | ||
| echo "-------/home/runner/.copilot-----------" | ||
| find /home/runner/.copilot | ||
| echo "HOME: $HOME" | ||
| echo "GITHUB_COPILOT_CLI_MODE: $GITHUB_COPILOT_CLI_MODE" | ||
| - name: Generate agentic run info | ||
| id: generate_aw_info | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| with: | ||
| script: | | ||
| const fs = require('fs'); | ||
| const awInfo = { | ||
| engine_id: "copilot", | ||
| engine_name: "GitHub Copilot CLI", | ||
| model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", | ||
| version: "", | ||
| agent_version: "0.0.374", | ||
| workflow_name: "Metrics Collector - Infrastructure Agent", | ||
| experimental: false, | ||
| supports_tools_allowlist: true, | ||
| supports_http_transport: true, | ||
| run_id: context.runId, | ||
| run_number: context.runNumber, | ||
| run_attempt: process.env.GITHUB_RUN_ATTEMPT, | ||
| repository: context.repo.owner + '/' + context.repo.repo, | ||
| ref: context.ref, | ||
| sha: context.sha, | ||
| actor: context.actor, | ||
| event_name: context.eventName, | ||
| staged: false, | ||
| network_mode: "defaults", | ||
| allowed_domains: [], | ||
| firewall_enabled: true, | ||
| awf_version: "v0.7.0", | ||
| steps: { | ||
| firewall: "squid" | ||
| }, | ||
| created_at: new Date().toISOString() | ||
| }; | ||
| // Write to /tmp/gh-aw directory to avoid inclusion in PR | ||
| const tmpPath = '/tmp/gh-aw/aw_info.json'; | ||
| fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); | ||
| console.log('Generated aw_info.json at:', tmpPath); | ||
| console.log(JSON.stringify(awInfo, null, 2)); | ||
| // Set model as output for reuse in other steps/jobs | ||
| core.setOutput('model', awInfo.model); | ||
| - name: Generate workflow overview | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| with: | ||
| script: | | ||
| const { generateWorkflowOverview } = require('/tmp/gh-aw/actions/generate_workflow_overview.cjs'); | ||
| await generateWorkflowOverview(core); | ||
| - name: Create prompt | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | ||
| run: | | ||
| bash /tmp/gh-aw/actions/create_prompt_first.sh | ||
| cat << 'PROMPT_EOF' > "$GH_AW_PROMPT" | ||
| {{#runtime-import? .github/shared-instructions.md}} | ||
| # Metrics Collector - Infrastructure Agent | ||
| You are the Metrics Collector agent responsible for gathering daily performance metrics across the entire agentic workflow ecosystem and storing them in a structured format for analysis by meta-orchestrators. | ||
| ## Your Role | ||
| As an infrastructure agent, you collect and persist performance data that enables: | ||
| - Historical trend analysis by Agent Performance Analyzer | ||
| - Campaign health assessment by Campaign Manager | ||
| - Workflow health monitoring by Workflow Health Manager | ||
| - Data-driven optimization decisions across the ecosystem | ||
| ## Current Context | ||
| - **Repository**: __GH_AW_GITHUB_REPOSITORY__ | ||
| - **Collection Date**: $(date +%Y-%m-%d) | ||
| - **Collection Time**: $(date +%H:%M:%S) UTC | ||
| - **Storage Path**: `/tmp/gh-aw/repo-memory/default/metrics/` | ||
| ## Metrics Collection Process | ||
| ### 1. Use Agentic Workflows Tool to Collect Workflow Metrics | ||
| **Workflow Status and Runs**: | ||
| - Use the `status` tool to get a list of all workflows in the repository | ||
| - Use the `logs` tool to download workflow run data from the last 24 hours: | ||
| ``` | ||
| Parameters: | ||
| - start_date: "-1d" (last 24 hours) | ||
| - Include all workflows (no workflow_name filter) | ||
| ``` | ||
| - From the logs data, extract for each workflow: | ||
| - Total runs in last 24 hours | ||
| - Successful runs (conclusion: "success") | ||
| - Failed runs (conclusion: "failure", "cancelled", "timed_out") | ||
| - Calculate success rate: `successful / total` | ||
| - Token usage and costs (if available in logs) | ||
| - Execution duration statistics | ||
| **Safe Outputs from Logs**: | ||
| - The agentic-workflows logs tool provides information about: | ||
| - Issues created by workflows (from safe-output operations) | ||
| - PRs created by workflows | ||
| - Comments added by workflows | ||
| - Discussions created by workflows | ||
| - Extract and count these for each workflow | ||
| **Additional Metrics via GitHub API**: | ||
| - Use GitHub MCP server (default toolset) to supplement with: | ||
| - Engagement metrics: reactions on issues created by workflows | ||
| - Comment counts on PRs created by workflows | ||
| - Discussion reply counts | ||
| **Quality Indicators**: | ||
| - For merged PRs: Calculate merge time (created_at to merged_at) | ||
| - For closed issues: Calculate close time (created_at to closed_at) | ||
| - Calculate PR merge rate: `merged PRs / total PRs created` | ||
| ### 2. Structure Metrics Data | ||
| Create a JSON object following this schema: | ||
| ```json | ||
| { | ||
| "timestamp": "2024-12-24T00:00:00Z", | ||
| "period": "daily", | ||
| "collection_duration_seconds": 45, | ||
| "workflows": { | ||
| "workflow-name": { | ||
| "safe_outputs": { | ||
| "issues_created": 5, | ||
| "prs_created": 2, | ||
| "comments_added": 10, | ||
| "discussions_created": 1 | ||
| }, | ||
| "workflow_runs": { | ||
| "total": 7, | ||
| "successful": 6, | ||
| "failed": 1, | ||
| "success_rate": 0.857, | ||
| "avg_duration_seconds": 180, | ||
| "total_tokens": 45000, | ||
| "total_cost_usd": 0.45 | ||
| }, | ||
| "engagement": { | ||
| "issue_reactions": 12, | ||
| "pr_comments": 8, | ||
| "discussion_replies": 3 | ||
| }, | ||
| "quality_indicators": { | ||
| "pr_merge_rate": 0.75, | ||
| "avg_issue_close_time_hours": 48.5, | ||
| "avg_pr_merge_time_hours": 72.3 | ||
| } | ||
| } | ||
| }, | ||
| "ecosystem": { | ||
| "total_workflows": 120, | ||
| "active_workflows": 85, | ||
| "total_safe_outputs": 45, | ||
| "overall_success_rate": 0.892, | ||
| "total_tokens": 1250000, | ||
| "total_cost_usd": 12.50 | ||
| } | ||
| } | ||
| ``` | ||
| ### 3. Store Metrics in Repo Memory | ||
| **Daily Storage**: | ||
| - Write metrics to: `/tmp/gh-aw/repo-memory/default/metrics/daily/YYYY-MM-DD.json` | ||
| - Use today's date for the filename (e.g., `2024-12-24.json`) | ||
| **Latest Snapshot**: | ||
| - Copy current metrics to: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` | ||
| - This provides quick access to most recent data without date calculations | ||
| **Create Directory Structure**: | ||
| - Ensure the directory exists: `mkdir -p /tmp/gh-aw/repo-memory/default/metrics/daily/` | ||
| ### 4. Cleanup Old Data | ||
| **Retention Policy**: | ||
| - Keep last 30 days of daily metrics | ||
| - Delete daily files older than 30 days from the metrics directory | ||
| - Preserve `latest.json` (always keep) | ||
| **Cleanup Command**: | ||
| ```bash | ||
| find /tmp/gh-aw/repo-memory/default/metrics/daily/ -name "*.json" -mtime +30 -delete | ||
| ``` | ||
| ### 5. Calculate Ecosystem Aggregates | ||
| **Total Workflows**: | ||
| - Use the agentic-workflows `status` tool to get count of all workflows | ||
| **Active Workflows**: | ||
| - Count workflows that had at least one run in the last 24 hours (from logs data) | ||
| **Total Safe Outputs**: | ||
| - Sum of all safe outputs (issues + PRs + comments + discussions) across all workflows | ||
| **Overall Success Rate**: | ||
| - Calculate: `(sum of successful runs across all workflows) / (sum of total runs across all workflows)` | ||
| **Total Resource Usage**: | ||
| - Sum total tokens used across all workflows | ||
| - Sum total cost across all workflows | ||
| ## Implementation Guidelines | ||
| ### Using Agentic Workflows Tool | ||
| **Primary data source**: Use the agentic-workflows tool for all workflow run metrics: | ||
| 1. Start with `status` tool to get workflow inventory | ||
| 2. Use `logs` tool with `start_date: "-1d"` to collect last 24 hours of runs | ||
| 3. Extract metrics from the log data (success/failure, tokens, costs, safe outputs) | ||
| **Secondary data source**: Use GitHub MCP server for engagement metrics only: | ||
| - Reactions on issues/PRs created by workflows | ||
| - Comment counts | ||
| - Discussion replies | ||
| ### Handling Missing Data | ||
| - If a workflow has no runs in the last 24 hours, set all run metrics to 0 | ||
| - If a workflow has no safe outputs, set all safe output counts to 0 | ||
| - If token/cost data is unavailable, omit or set to null | ||
| - Always include workflows in the metrics even if they have no activity (helps detect stalled workflows) | ||
| ### Workflow Name Extraction | ||
| The agentic-workflows logs tool provides structured data with workflow names already extracted. Use this instead of parsing footers manually. | ||
| ### Performance Considerations | ||
| - The agentic-workflows tool is optimized for log retrieval and analysis | ||
| - Use date filters (start_date: "-1d") to limit data collection scope | ||
| - Process logs in memory rather than making multiple API calls | ||
| - Cache workflow list from status tool | ||
| ### Error Handling | ||
| - If agentic-workflows tool is unavailable, log error but don't fail the entire collection | ||
| - If a specific workflow's data can't be collected, log and continue with others | ||
| - Always write partial metrics even if some data is missing | ||
| ## Output Format | ||
| At the end of collection: | ||
| 1. **Summary Log**: | ||
| ``` | ||
| ✅ Metrics collection completed | ||
| 📊 Collection Summary: | ||
| - Workflows analyzed: 120 | ||
| - Active workflows: 85 | ||
| - Total safe outputs: 45 | ||
| - Overall success rate: 89.2% | ||
| - Storage: /tmp/gh-aw/repo-memory/default/metrics/daily/2024-12-24.json | ||
| ⏱️ Collection took: 45 seconds | ||
| ``` | ||
| 2. **File Operations Log**: | ||
| ``` | ||
| 📝 Files written: | ||
| - metrics/daily/2024-12-24.json | ||
| - metrics/latest.json | ||
| 🗑️ Cleanup: | ||
| - Removed 1 old daily file(s) | ||
| ``` | ||
| ## Important Notes | ||
| - **PRIMARY TOOL**: Use the agentic-workflows tool (`status`, `logs`) for all workflow run metrics | ||
| - **SECONDARY TOOL**: Use GitHub MCP server only for engagement metrics (reactions, comments) | ||
| - **DO NOT** create issues, PRs, or comments - this is a data collection agent only | ||
| - **DO NOT** analyze or interpret the metrics - that's the job of meta-orchestrators | ||
| - **ALWAYS** write valid JSON (test with `jq` before storing) | ||
| - **ALWAYS** include a timestamp in ISO 8601 format | ||
| - **ENSURE** directory structure exists before writing files | ||
| - **USE** repo-memory tool to persist data (it handles git operations automatically) | ||
| - **INCLUDE** token usage and cost metrics when available from logs | ||
| ## Success Criteria | ||
| ✅ Daily metrics file created in correct location | ||
| ✅ Latest metrics snapshot updated | ||
| ✅ Old metrics cleaned up (>30 days) | ||
| ✅ Valid JSON format (validated with jq) | ||
| ✅ All workflows included in metrics | ||
| ✅ Ecosystem aggregates calculated correctly | ||
| ✅ Collection completed within timeout | ||
| ✅ No errors or warnings in execution log | ||
| PROMPT_EOF | ||
| - name: Substitute placeholders | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | ||
| with: | ||
| script: | | ||
| const substitutePlaceholders = require('/tmp/gh-aw/actions/substitute_placeholders.cjs'); | ||
| // Call the substitution function | ||
| return await substitutePlaceholders({ | ||
| file: process.env.GH_AW_PROMPT, | ||
| substitutions: { | ||
| GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY | ||
| } | ||
| }); | ||
| - name: Append XPIA security instructions to prompt | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| run: | | ||
| cat "/tmp/gh-aw/prompts/xpia_prompt.md" >> "$GH_AW_PROMPT" | ||
| - name: Append temporary folder instructions to prompt | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| run: | | ||
| cat "/tmp/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" | ||
| - name: Append repo memory instructions to prompt | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| run: | | ||
| cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" | ||
| --- | ||
| ## Repo Memory Available | ||
| You have access to a persistent repo memory folder at `/tmp/gh-aw/repo-memory/default/` where you can read and write files that are stored in a git branch. | ||
| - **Read/Write Access**: You can freely read from and write to any files in this folder | ||
| - **Git Branch Storage**: Files are stored in the `memory/meta-orchestrators` branch of the current repository | ||
| - **Automatic Push**: Changes are automatically committed and pushed after the workflow completes | ||
| - **Merge Strategy**: In case of conflicts, your changes (current version) win | ||
| - **Persistence**: Files persist across workflow runs via git branch storage | ||
| **Constraints:** | ||
| - **Allowed Files**: Only files matching patterns: metrics/** | ||
| - **Max File Size**: 10240 bytes (0.01 MB) per file | ||
| - **Max File Count**: 100 files per commit | ||
| Examples of what you can store: | ||
| - `/tmp/gh-aw/repo-memory/default/notes.md` - general notes and observations | ||
| - `/tmp/gh-aw/repo-memory/default/state.json` - structured state data | ||
| - `/tmp/gh-aw/repo-memory/default/history/` - organized history files in subdirectories | ||
| Feel free to create, read, update, and organize files in this folder as needed for your tasks. | ||
| PROMPT_EOF | ||
| - name: Append GitHub context to prompt | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| GH_AW_GITHUB_ACTOR: ${{ github.actor }} | ||
| GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} | ||
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} | ||
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} | ||
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} | ||
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | ||
| GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} | ||
| GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} | ||
| run: | | ||
| cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" | ||
| <github-context> | ||
| The following GitHub context information is available for this workflow: | ||
| {{#if __GH_AW_GITHUB_ACTOR__ }} | ||
| - **actor**: __GH_AW_GITHUB_ACTOR__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_REPOSITORY__ }} | ||
| - **repository**: __GH_AW_GITHUB_REPOSITORY__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_WORKSPACE__ }} | ||
| - **workspace**: __GH_AW_GITHUB_WORKSPACE__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} | ||
| - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} | ||
| - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} | ||
| - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} | ||
| - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ | ||
| {{/if}} | ||
| {{#if __GH_AW_GITHUB_RUN_ID__ }} | ||
| - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ | ||
| {{/if}} | ||
| </github-context> | ||
| PROMPT_EOF | ||
| - name: Substitute placeholders | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| GH_AW_GITHUB_ACTOR: ${{ github.actor }} | ||
| GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} | ||
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} | ||
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} | ||
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} | ||
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | ||
| GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} | ||
| GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} | ||
| with: | ||
| script: | | ||
| const substitutePlaceholders = require('/tmp/gh-aw/actions/substitute_placeholders.cjs'); | ||
| // Call the substitution function | ||
| return await substitutePlaceholders({ | ||
| file: process.env.GH_AW_PROMPT, | ||
| substitutions: { | ||
| GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, | ||
| GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, | ||
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, | ||
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, | ||
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, | ||
| GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, | ||
| GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, | ||
| GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE | ||
| } | ||
| }); | ||
| - name: Interpolate variables and render templates | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/interpolate_prompt.cjs'); | ||
| await main(); | ||
| - name: Print prompt | ||
| env: | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| run: bash /tmp/gh-aw/actions/print_prompt_summary.sh | ||
| - name: Upload prompt | ||
| if: always() | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: prompt | ||
| path: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| if-no-files-found: warn | ||
| - name: Upload agentic run info | ||
| if: always() | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: aw-info | ||
| path: /tmp/gh-aw/aw_info.json | ||
| if-no-files-found: warn | ||
| - name: Execute GitHub Copilot CLI | ||
| id: agentic_execution | ||
| # Copilot CLI tool arguments (sorted): | ||
| # --allow-tool github | ||
| timeout-minutes: 15 | ||
| run: | | ||
| set -o pipefail | ||
| sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --mount /home/runner/.copilot:/home/runner/.copilot:rw --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,host.docker.internal,raw.githubusercontent.com,registry.npmjs.org --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --image-tag 0.7.0 \ | ||
| -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} \ | ||
| 2>&1 | tee /tmp/gh-aw/agent-stdio.log | ||
| env: | ||
| COPILOT_AGENT_RUNNER_TYPE: STANDALONE | ||
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | ||
| GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json | ||
| GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} | ||
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | ||
| GITHUB_HEAD_REF: ${{ github.head_ref }} | ||
| GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | ||
| GITHUB_REF_NAME: ${{ github.ref_name }} | ||
| GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} | ||
| GITHUB_WORKSPACE: ${{ github.workspace }} | ||
| XDG_CONFIG_HOME: /home/runner | ||
| - name: Redact secrets in logs | ||
| if: always() | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/redact_secrets.cjs'); | ||
| await main(); | ||
| env: | ||
| GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' | ||
| SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | ||
| SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} | ||
| SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} | ||
| SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
| - name: Upload engine output files | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: agent_outputs | ||
| path: | | ||
| /tmp/gh-aw/sandbox/agent/logs/ | ||
| /tmp/gh-aw/redacted-urls.log | ||
| if-no-files-found: ignore | ||
| - name: Upload MCP logs | ||
| if: always() | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: mcp-logs | ||
| path: /tmp/gh-aw/mcp-logs/ | ||
| if-no-files-found: ignore | ||
| - name: Parse agent logs for step summary | ||
| if: always() | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/parse_copilot_log.cjs'); | ||
| await main(); | ||
| - name: Upload Firewall Logs | ||
| if: always() | ||
| continue-on-error: true | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: firewall-logs-metrics-collector-infrastructure-agent | ||
| path: /tmp/gh-aw/sandbox/firewall/logs/ | ||
| if-no-files-found: ignore | ||
| - name: Parse firewall logs for step summary | ||
| if: always() | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/parse_firewall_logs.cjs'); | ||
| await main(); | ||
| - name: Upload Agent Stdio | ||
| if: always() | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: agent-stdio.log | ||
| path: /tmp/gh-aw/agent-stdio.log | ||
| if-no-files-found: warn | ||
| # Upload repo memory as artifacts for push job | ||
| - name: Upload repo-memory artifact (default) | ||
| if: always() | ||
| uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | ||
| with: | ||
| name: repo-memory-default | ||
| path: /tmp/gh-aw/repo-memory/default | ||
| retention-days: 1 | ||
| if-no-files-found: ignore | ||
| - name: Validate agent logs for errors | ||
| if: always() | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ | ||
| GH_AW_ERROR_PATTERNS: "[{\"id\":\"\",\"pattern\":\"::(error)(?:\\\\s+[^:]*)?::(.+)\",\"level_group\":1,\"message_group\":2,\"description\":\"GitHub Actions workflow command - error\"},{\"id\":\"\",\"pattern\":\"::(warning)(?:\\\\s+[^:]*)?::(.+)\",\"level_group\":1,\"message_group\":2,\"description\":\"GitHub Actions workflow command - warning\"},{\"id\":\"\",\"pattern\":\"::(notice)(?:\\\\s+[^:]*)?::(.+)\",\"level_group\":1,\"message_group\":2,\"description\":\"GitHub Actions workflow command - notice\"},{\"id\":\"\",\"pattern\":\"(ERROR|Error):\\\\s+(.+)\",\"level_group\":1,\"message_group\":2,\"description\":\"Generic ERROR messages\"},{\"id\":\"\",\"pattern\":\"(WARNING|Warning):\\\\s+(.+)\",\"level_group\":1,\"message_group\":2,\"description\":\"Generic WARNING messages\"},{\"id\":\"\",\"pattern\":\"(\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}:\\\\d{2}:\\\\d{2}\\\\.\\\\d{3}Z)\\\\s+\\\\[(ERROR)\\\\]\\\\s+(.+)\",\"level_group\":2,\"message_group\":3,\"description\":\"Copilot CLI timestamped ERROR messages\"},{\"id\":\"\",\"pattern\":\"(\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}:\\\\d{2}:\\\\d{2}\\\\.\\\\d{3}Z)\\\\s+\\\\[(WARN|WARNING)\\\\]\\\\s+(.+)\",\"level_group\":2,\"message_group\":3,\"description\":\"Copilot CLI timestamped WARNING messages\"},{\"id\":\"\",\"pattern\":\"\\\\[(\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}:\\\\d{2}:\\\\d{2}\\\\.\\\\d{3}Z)\\\\]\\\\s+(CRITICAL|ERROR):\\\\s+(.+)\",\"level_group\":2,\"message_group\":3,\"description\":\"Copilot CLI bracketed critical/error messages with timestamp\"},{\"id\":\"\",\"pattern\":\"\\\\[(\\\\d{4}-\\\\d{2}-\\\\d{2}T\\\\d{2}:\\\\d{2}:\\\\d{2}\\\\.\\\\d{3}Z)\\\\]\\\\s+(WARNING):\\\\s+(.+)\",\"level_group\":2,\"message_group\":3,\"description\":\"Copilot CLI bracketed warning messages with timestamp\"},{\"id\":\"\",\"pattern\":\"✗\\\\s+(.+)\",\"level_group\":0,\"message_group\":1,\"description\":\"Copilot CLI failed command indicator\"},{\"id\":\"\",\"pattern\":\"(?:command not found|not found):\\\\s*(.+)|(.+):\\\\s*(?:command not found|not found)\",\"level_group\":0,\"message_group\":0,\"description\":\"Shell command not found error\"},{\"id\":\"\",\"pattern\":\"Cannot find module\\\\s+['\\\"](.+)['\\\"]\",\"level_group\":0,\"message_group\":1,\"description\":\"Node.js module not found error\"},{\"id\":\"\",\"pattern\":\"Permission denied and could not request permission from user\",\"level_group\":0,\"message_group\":0,\"description\":\"Copilot CLI permission denied warning (user interaction required)\"},{\"id\":\"\",\"pattern\":\"\\\\berror\\\\b.*permission.*denied\",\"level_group\":0,\"message_group\":0,\"description\":\"Permission denied error (requires error context)\"},{\"id\":\"\",\"pattern\":\"\\\\berror\\\\b.*unauthorized\",\"level_group\":0,\"message_group\":0,\"description\":\"Unauthorized access error (requires error context)\"},{\"id\":\"\",\"pattern\":\"\\\\berror\\\\b.*forbidden\",\"level_group\":0,\"message_group\":0,\"description\":\"Forbidden access error (requires error context)\"}]" | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/validate_errors.cjs'); | ||
| await main(); | ||
| pre_activation: | ||
| runs-on: ubuntu-slim | ||
| permissions: | ||
| contents: read | ||
| outputs: | ||
| activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} | ||
| steps: | ||
| - name: Checkout actions folder | ||
| uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 | ||
| with: | ||
| sparse-checkout: | | ||
| actions | ||
| persist-credentials: false | ||
| - name: Setup Scripts | ||
| uses: ./actions/setup | ||
| with: | ||
| destination: /tmp/gh-aw/actions | ||
| - name: Check team membership for workflow | ||
| id: check_membership | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_AW_REQUIRED_ROLES: admin,maintainer,write | ||
| with: | ||
| github-token: ${{ secrets.GITHUB_TOKEN }} | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/check_membership.cjs'); | ||
| await main(); | ||
| push_repo_memory: | ||
| needs: agent | ||
| if: always() | ||
| runs-on: ubuntu-latest | ||
| permissions: | ||
| contents: write | ||
| steps: | ||
| - name: Checkout actions folder | ||
| uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 | ||
| with: | ||
| sparse-checkout: | | ||
| actions | ||
| persist-credentials: false | ||
| - name: Setup Scripts | ||
| uses: ./actions/setup | ||
| with: | ||
| destination: /tmp/gh-aw/actions | ||
| - name: Checkout repository | ||
| uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 | ||
| with: | ||
| persist-credentials: false | ||
| sparse-checkout: . | ||
| - name: Configure Git credentials | ||
| env: | ||
| REPO_NAME: ${{ github.repository }} | ||
| SERVER_URL: ${{ github.server_url }} | ||
| run: | | ||
| git config --global user.email "github-actions[bot]@users.noreply.github.com" | ||
| git config --global user.name "github-actions[bot]" | ||
| # Re-authenticate git with GitHub token | ||
| SERVER_URL_STRIPPED="${SERVER_URL#https://}" | ||
| git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" | ||
| echo "Git configured with standard GitHub Actions identity" | ||
| - name: Download repo-memory artifact (default) | ||
| uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 | ||
| continue-on-error: true | ||
| with: | ||
| name: repo-memory-default | ||
| path: /tmp/gh-aw/repo-memory/default | ||
| - name: Push repo-memory changes (default) | ||
| if: always() | ||
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 | ||
| env: | ||
| GH_TOKEN: ${{ github.token }} | ||
| GITHUB_RUN_ID: ${{ github.run_id }} | ||
| ARTIFACT_DIR: /tmp/gh-aw/repo-memory/default | ||
| MEMORY_ID: default | ||
| TARGET_REPO: ${{ github.repository }} | ||
| BRANCH_NAME: memory/meta-orchestrators | ||
| MAX_FILE_SIZE: 10240 | ||
| MAX_FILE_COUNT: 100 | ||
| FILE_GLOB_FILTER: "metrics/**" | ||
| with: | ||
| script: | | ||
| const { setupGlobals } = require('/tmp/gh-aw/actions/setup_globals.cjs'); | ||
| setupGlobals(core, github, context, exec, io); | ||
| const { main } = require('/tmp/gh-aw/actions/push_repo_memory.cjs'); | ||
| await main(); | ||