sync-docs #191
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Sync Documentation from Node Repository | |
| on: | |
| repository_dispatch: | |
| types: [sync-docs] | |
| workflow_dispatch: | |
| inputs: | |
| version: | |
| description: 'Version/tag to sync from genlayer-node repo (e.g., v0.3.5, or "latest" to detect)' | |
| required: false | |
| default: 'latest' | |
| changelog_path: | |
| description: 'Path to changelog files in source repo' | |
| required: false | |
| default: 'docs/changelog' | |
| api_gen_path: | |
| description: 'Path to API gen files in source repo' | |
| required: false | |
| default: 'docs/api/rpc' | |
| api_debug_path: | |
| description: 'Path to API debug files in source repo' | |
| required: false | |
| default: 'docs/api/rpc' | |
| api_gen_regex: | |
| description: 'Regex pattern to filter API gen files (e.g., "gen_.*")' | |
| required: false | |
| default: 'gen_(?!dbg_).*' | |
| api_debug_regex: | |
| description: 'Regex pattern to filter API debug files (e.g., "gen_dbg_.*")' | |
| required: false | |
| default: 'gen_dbg_.*' | |
| api_ops_path: | |
| description: 'Path to API ops files in source repo' | |
| required: false | |
| default: 'docs/api/ops' | |
| # Global environment variables | |
| env: | |
| CLEANUP_ARTIFACTS: true | |
| # Prevent concurrent runs of the same workflow | |
| concurrency: | |
| group: sync-docs-${{ github.ref }}-${{ github.event.inputs.version || github.event.client_payload.version || 'latest' }} | |
| cancel-in-progress: true | |
| jobs: | |
| prepare: | |
| name: 'Determine Version' | |
| runs-on: ubuntu-latest | |
| outputs: | |
| version: ${{ steps.final_version.outputs.version }} | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Extract version parameter | |
| id: extract | |
| run: | | |
| if [ "${{ github.event_name }}" = "repository_dispatch" ]; then | |
| VERSION="${{ github.event.client_payload.version || 'latest' }}" | |
| else | |
| VERSION="${{ github.event.inputs.version || 'latest' }}" | |
| fi | |
| echo "version=$VERSION" >> $GITHUB_OUTPUT | |
| echo "Requested version: $VERSION" | |
| - name: Detect latest version | |
| id: detect | |
| if: steps.extract.outputs.version == 'latest' | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.NODE_REPO_TOKEN || secrets.GITHUB_TOKEN }} | |
| run: | | |
| source .github/scripts/version-utils.sh | |
| LATEST_TAG=$(detect_latest_version "$GITHUB_TOKEN") | |
| echo "Latest version detected: $LATEST_TAG" | |
| echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT | |
| - name: Set final version | |
| id: final_version | |
| run: | | |
| if [[ "${{ steps.extract.outputs.version }}" == "latest" ]]; then | |
| VERSION="${{ steps.detect.outputs.version }}" | |
| else | |
| VERSION="${{ steps.extract.outputs.version }}" | |
| fi | |
| echo "version=$VERSION" >> $GITHUB_OUTPUT | |
| echo "✅ Will sync version: $VERSION" | |
| sync-files: | |
| name: 'Sync Files' | |
| runs-on: ubuntu-latest | |
| needs: prepare | |
| strategy: | |
| matrix: | |
| sync_type: [changelog, config, docker_compose, api_gen, api_debug, api_ops] | |
| fail-fast: false | |
| steps: | |
| - name: Checkout documentation repository | |
| uses: actions/checkout@v4 | |
| with: | |
| token: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Install yq for YAML sanitization | |
| if: matrix.sync_type == 'config' || matrix.sync_type == 'docker_compose' | |
| run: | | |
| sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 | |
| sudo chmod +x /usr/local/bin/yq | |
| - name: Clone source repository | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: genlayerlabs/genlayer-node | |
| token: ${{ secrets.NODE_REPO_TOKEN || secrets.GITHUB_TOKEN }} | |
| fetch-depth: 1 | |
| sparse-checkout: | | |
| docs | |
| configs/node/config.yaml.example | |
| release/docker-compose.yaml | |
| sparse-checkout-cone-mode: true | |
| path: source-repo | |
| ref: ${{ needs.prepare.outputs.version }} | |
| - name: Set sync parameters | |
| id: set_params | |
| run: | | |
| case "${{ matrix.sync_type }}" in | |
| "changelog") | |
| echo "title=Changelog" >> $GITHUB_OUTPUT | |
| echo "source_path=source-repo/${{ github.event.inputs.changelog_path || github.event.client_payload.changelog_path || 'docs/changelog' }}" >> $GITHUB_OUTPUT | |
| echo "target_path=content/validators/changelog" >> $GITHUB_OUTPUT | |
| echo "filter_pattern=.*" >> $GITHUB_OUTPUT | |
| ;; | |
| "config") | |
| echo "title=Config File" >> $GITHUB_OUTPUT | |
| echo "source_path=source-repo/configs/node/config.yaml.example" >> $GITHUB_OUTPUT | |
| echo "target_path=content/validators/config.yaml" >> $GITHUB_OUTPUT | |
| echo "filter_pattern=.*" >> $GITHUB_OUTPUT | |
| ;; | |
| "docker_compose") | |
| echo "title=Docker Compose File" >> $GITHUB_OUTPUT | |
| echo "source_path=source-repo/release/docker-compose.yaml" >> $GITHUB_OUTPUT | |
| echo "target_path=content/validators/docker-compose.yaml" >> $GITHUB_OUTPUT | |
| echo "filter_pattern=.*" >> $GITHUB_OUTPUT | |
| ;; | |
| "api_gen") | |
| echo "title=API Gen Methods" >> $GITHUB_OUTPUT | |
| echo "source_path=source-repo/${{ github.event.inputs.api_gen_path || github.event.client_payload.api_gen_path || 'docs/api/rpc' }}" >> $GITHUB_OUTPUT | |
| echo "target_path=pages/api-references/genlayer-node/gen" >> $GITHUB_OUTPUT | |
| echo "filter_pattern=${{ github.event.inputs.api_gen_regex || github.event.client_payload.api_gen_regex || 'gen_(?!dbg_).*' }}" >> $GITHUB_OUTPUT | |
| ;; | |
| "api_debug") | |
| echo "title=API Debug Methods" >> $GITHUB_OUTPUT | |
| echo "source_path=source-repo/${{ github.event.inputs.api_debug_path || github.event.client_payload.api_debug_path || 'docs/api/rpc' }}" >> $GITHUB_OUTPUT | |
| echo "target_path=pages/api-references/genlayer-node/debug" >> $GITHUB_OUTPUT | |
| echo "filter_pattern=${{ github.event.inputs.api_debug_regex || github.event.client_payload.api_debug_regex || 'gen_dbg_.*' }}" >> $GITHUB_OUTPUT | |
| ;; | |
| "api_ops") | |
| echo "title=API Ops Methods" >> $GITHUB_OUTPUT | |
| echo "source_path=source-repo/${{ github.event.inputs.api_ops_path || github.event.client_payload.api_ops_path || 'docs/api/ops' }}" >> $GITHUB_OUTPUT | |
| echo "target_path=pages/api-references/genlayer-node/ops" >> $GITHUB_OUTPUT | |
| echo "filter_pattern=.*" >> $GITHUB_OUTPUT | |
| ;; | |
| esac | |
| - name: Sync files using composite action | |
| id: sync | |
| uses: ./.github/actions/sync-files | |
| with: | |
| type: ${{ matrix.sync_type }} | |
| title: ${{ steps.set_params.outputs.title }} | |
| source_path: ${{ steps.set_params.outputs.source_path }} | |
| target_path: ${{ steps.set_params.outputs.target_path }} | |
| filter_pattern: ${{ steps.set_params.outputs.filter_pattern }} | |
| aggregate-results: | |
| name: 'Aggregate Sync Results' | |
| runs-on: ubuntu-latest | |
| needs: [prepare, sync-files] | |
| if: always() | |
| outputs: | |
| total_changes: ${{ steps.calculate.outputs.total_changes }} | |
| total_added: ${{ steps.calculate.outputs.total_added }} | |
| total_updated: ${{ steps.calculate.outputs.total_updated }} | |
| total_deleted: ${{ steps.calculate.outputs.total_deleted }} | |
| sync_reports: ${{ steps.calculate.outputs.all_reports }} | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Download all sync artifacts | |
| uses: actions/download-artifact@v4 | |
| continue-on-error: true | |
| with: | |
| pattern: synced-* | |
| merge-multiple: true | |
| path: artifacts/ | |
| - name: Calculate totals and collect reports | |
| id: calculate | |
| run: | | |
| # Move reports to sync-reports directory for the script | |
| mkdir -p sync-reports | |
| find artifacts -name "sync_report_*.md" -exec mv {} sync-reports/ \; | |
| # Run aggregation script | |
| .github/scripts/aggregate-reports.sh | |
| - name: Create merged artifact | |
| run: | | |
| # Create merged artifact structure | |
| mkdir -p synced-merged | |
| # Copy all synced files (excluding reports) | |
| find artifacts -type f ! -name "sync_report_*.md" | while read -r file; do | |
| # Get relative path from artifacts/ | |
| rel_path="${file#artifacts/}" | |
| # Create directory structure and copy file | |
| mkdir -p "synced-merged/$(dirname "$rel_path")" | |
| cp "$file" "synced-merged/$rel_path" | |
| done | |
| # Copy aggregated reports | |
| cp -r sync-reports synced-merged/ | |
| echo "✅ Merged artifact created" | |
| - name: Upload merged synced files | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: synced-merged | |
| path: synced-merged/ | |
| retention-days: 1 | |
| generate-docs: | |
| name: 'Generate Documentation' | |
| runs-on: ubuntu-latest | |
| needs: [prepare, aggregate-results] | |
| if: always() && needs.aggregate-results.result != 'cancelled' | |
| outputs: | |
| generation_success: ${{ steps.generate.outputs.success }} | |
| steps: | |
| - name: Checkout documentation repository | |
| uses: actions/checkout@v4 | |
| with: | |
| token: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Setup Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '20' | |
| cache: 'npm' | |
| - name: Install dependencies | |
| run: npm install | |
| - name: Download merged synced files | |
| if: needs.aggregate-results.result == 'success' | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: synced-merged | |
| path: temp-merged | |
| - name: Apply synced files | |
| if: needs.aggregate-results.result == 'success' | |
| run: | | |
| # Sync all required paths in a single call | |
| .github/scripts/sync-artifact-files.sh temp-merged . \ | |
| "content/validators" \ | |
| "pages/api-references/genlayer-node/gen" \ | |
| "pages/api-references/genlayer-node/debug" \ | |
| "pages/api-references/genlayer-node/ops" | |
| - name: Run documentation generation scripts | |
| id: generate | |
| run: | | |
| .github/scripts/doc-generator.sh && echo "success=true" >> "$GITHUB_OUTPUT" | |
| - name: Copy sync reports for final artifact | |
| run: | | |
| # Copy the sync-reports directory from temp-merged | |
| if [[ -d "temp-merged/sync-reports" ]]; then | |
| cp -r temp-merged/sync-reports . | |
| else | |
| echo "Warning: No sync-reports found in temp-merged" | |
| fi | |
| - name: Upload final documentation | |
| uses: actions/upload-artifact@v4 | |
| if: steps.generate.outputs.success == 'true' | |
| with: | |
| name: synced-final | |
| path: | | |
| content/validators/ | |
| pages/api-references/ | |
| pages/validators/ | |
| sync-reports/ | |
| retention-days: 1 | |
| create-pr: | |
| name: 'Create Pull Request' | |
| runs-on: ubuntu-latest | |
| needs: [prepare, aggregate-results, generate-docs] | |
| if: always() && (needs.aggregate-results.result == 'success' || needs.generate-docs.result == 'success') | |
| outputs: | |
| pr_url: ${{ steps.create_pr.outputs.pr_url }} | |
| permissions: | |
| contents: write | |
| pull-requests: write | |
| steps: | |
| - name: Checkout documentation repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| token: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Setup Git | |
| run: | | |
| git config user.name "github-actions[bot]" | |
| git config user.email "github-actions[bot]@users.noreply.github.com" | |
| - name: Download final documentation | |
| if: needs.generate-docs.result == 'success' | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: synced-final | |
| path: temp-final | |
| - name: Apply final documentation | |
| if: needs.generate-docs.result == 'success' | |
| run: | | |
| # Sync all required paths in a single call | |
| .github/scripts/sync-artifact-files.sh temp-final . \ | |
| "content/validators" \ | |
| "pages/validators" \ | |
| "pages/api-references" | |
| - name: Check for changes and create branch | |
| id: check_changes | |
| run: | | |
| source .github/scripts/git-utils.sh | |
| if check_for_changes; then | |
| BRANCH_NAME=$(create_sync_branch "${{ needs.prepare.outputs.version }}") | |
| # Use aggregated metrics from aggregate-results job | |
| commit_and_push_changes \ | |
| "${{ needs.prepare.outputs.version }}" \ | |
| "${{ needs.aggregate-results.outputs.total_changes }}" \ | |
| "${{ needs.aggregate-results.outputs.total_added }}" \ | |
| "${{ needs.aggregate-results.outputs.total_updated }}" \ | |
| "${{ needs.aggregate-results.outputs.total_deleted }}" \ | |
| "$BRANCH_NAME" | |
| else | |
| echo "No changes to commit" | |
| exit 0 | |
| fi | |
| - name: Create Pull Request | |
| id: create_pr | |
| if: steps.check_changes.outputs.has_changes == 'true' | |
| env: | |
| GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| # Get the branch name from git | |
| BRANCH_NAME=$(git branch --show-current) | |
| # Check if PR already exists for this branch | |
| if PR_JSON=$(gh pr view "$BRANCH_NAME" --json url,state 2>/dev/null); then | |
| PR_STATE=$(echo "$PR_JSON" | jq -r .state) | |
| PR_URL=$(echo "$PR_JSON" | jq -r .url) | |
| if [ "$PR_STATE" = "OPEN" ]; then | |
| echo "Open PR already exists for branch $BRANCH_NAME – updating PR description" | |
| echo "Existing PR: $PR_URL" | |
| UPDATE_EXISTING_PR=true | |
| else | |
| echo "Closed PR exists for branch $BRANCH_NAME (state: $PR_STATE)" | |
| echo "Creating new PR..." | |
| UPDATE_EXISTING_PR=false | |
| fi | |
| else | |
| echo "No PR exists for branch $BRANCH_NAME" | |
| UPDATE_EXISTING_PR=false | |
| fi | |
| # Create PR body file | |
| PR_BODY_FILE="${{ runner.temp }}/pr_body.md" | |
| cat >"$PR_BODY_FILE" <<EOF | |
| ## 🔄 Documentation Sync from Node Repository | |
| This PR automatically syncs documentation from the genlayer-node repository. | |
| ### 📋 Summary | |
| - **Source Repository**: \`genlayerlabs/genlayer-node\` | |
| - **Version**: \`${{ needs.prepare.outputs.version }}\` | |
| - **API Gen Filter**: \`${{ github.event.inputs.api_gen_regex != '' && github.event.inputs.api_gen_regex || (github.event.client_payload.api_gen_regex != '' && github.event.client_payload.api_gen_regex || 'gen_(?!dbg_).*') }}\` | |
| - **API Debug Filter**: \`${{ github.event.inputs.api_debug_regex != '' && github.event.inputs.api_debug_regex || (github.event.client_payload.api_debug_regex != '' && github.event.client_payload.api_debug_regex || 'gen_dbg_.*') }}\` | |
| - **Total Files Changed**: ${{ needs.aggregate-results.outputs.total_changes }} | |
| - Added: ${{ needs.aggregate-results.outputs.total_added }} files | |
| - Updated: ${{ needs.aggregate-results.outputs.total_updated }} files | |
| - Deleted: ${{ needs.aggregate-results.outputs.total_deleted }} files | |
| ### 🤖 Automated Process | |
| This PR was automatically generated by the documentation sync workflow. The following scripts were run: | |
| - \`npm run node-generate-changelog\` | |
| - \`npm run node-update-setup-guide\` | |
| - \`npm run node-update-config\` | |
| - \`npm run node-update-docker-compose\` | |
| - \`npm run node-generate-api-docs\` | |
| Please review the changes and merge if everything looks correct. | |
| ### ✅ Checklist | |
| - [x] All automated scripts completed successfully | |
| - [x] No sensitive information is exposed in config files | |
| - [x] API documentation is properly formatted | |
| EOF | |
| # Create or update PR using GitHub CLI | |
| if [ "$UPDATE_EXISTING_PR" = "true" ]; then | |
| # Update existing PR | |
| gh pr edit "$BRANCH_NAME" \ | |
| --title "docs: sync documentation from genlayer-node ${{ needs.prepare.outputs.version }}" \ | |
| --body-file "$PR_BODY_FILE" | |
| echo "pr_url=$PR_URL" >> $GITHUB_OUTPUT | |
| echo "✅ PR updated: $PR_URL" | |
| else | |
| # Create new PR and capture URL | |
| PR_URL=$(gh pr create \ | |
| --title "docs: sync documentation from genlayer-node ${{ needs.prepare.outputs.version }}" \ | |
| --body-file "$PR_BODY_FILE" \ | |
| --label "documentation" \ | |
| --label "node" \ | |
| --base "main" \ | |
| --head "$BRANCH_NAME") | |
| echo "pr_url=$PR_URL" >> $GITHUB_OUTPUT | |
| echo "✅ PR created: $PR_URL" | |
| fi | |
| summary: | |
| name: 'Workflow Summary' | |
| runs-on: ubuntu-latest | |
| needs: [prepare, aggregate-results, generate-docs, create-pr] | |
| if: always() | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Download final artifact with sync reports | |
| uses: actions/download-artifact@v4 | |
| continue-on-error: true | |
| with: | |
| name: synced-final | |
| path: artifacts/ | |
| - name: Generate workflow summary | |
| run: | | |
| echo "# 📚 Documentation Sync Summary" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "## 📊 Overall Results" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Source Version:** \`${{ needs.prepare.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Total Changes:** ${{ needs.aggregate-results.outputs.total_changes }}" >> $GITHUB_STEP_SUMMARY | |
| echo " - ➕ Added: ${{ needs.aggregate-results.outputs.total_added }} files" >> $GITHUB_STEP_SUMMARY | |
| echo " - ✏️ Updated: ${{ needs.aggregate-results.outputs.total_updated }} files" >> $GITHUB_STEP_SUMMARY | |
| echo " - ➖ Deleted: ${{ needs.aggregate-results.outputs.total_deleted }} files" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "## 📁 Sync Results by Type" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| # Process each sync type report | |
| for sync_type in changelog config docker_compose api_gen api_debug api_ops; do | |
| # Get proper title | |
| case "$sync_type" in | |
| "changelog") title="📝 Changelog Sync" ;; | |
| "config") title="⚙️ Config File Sync" ;; | |
| "docker_compose") title="🐳 Docker Compose Sync" ;; | |
| "api_gen") title="🔧 API Gen Methods Sync" ;; | |
| "api_debug") title="🐛 API Debug Methods Sync" ;; | |
| "api_ops") title="📊 API Ops Methods Sync" ;; | |
| esac | |
| echo "### $title" >> $GITHUB_STEP_SUMMARY | |
| # Check if report exists (look in sync-reports directory) | |
| if [[ -f "artifacts/sync-reports/sync_report_${sync_type}.md" ]]; then | |
| # Extract summary line and file list from report | |
| report_content=$(cat "artifacts/sync-reports/sync_report_${sync_type}.md") | |
| # Extract counts | |
| added=$(echo "$report_content" | grep -o '\*\*Added\*\*: [0-9]\+' | grep -o '[0-9]\+' || echo "0") | |
| updated=$(echo "$report_content" | grep -o '\*\*Updated\*\*: [0-9]\+' | grep -o '[0-9]\+' || echo "0") | |
| deleted=$(echo "$report_content" | grep -o '\*\*Deleted\*\*: [0-9]\+' | grep -o '[0-9]\+' || echo "0") | |
| total=$(echo "$report_content" | grep -o '\*\*Total changes\*\*: [0-9]\+' | grep -o '[0-9]\+' || echo "0") | |
| if [[ "$total" == "0" ]]; then | |
| echo "No updates found" >> $GITHUB_STEP_SUMMARY | |
| else | |
| # Show counts | |
| [[ "$added" != "0" ]] && echo "- **Added**: $added files" >> $GITHUB_STEP_SUMMARY | |
| [[ "$updated" != "0" ]] && echo "- **Updated**: $updated files" >> $GITHUB_STEP_SUMMARY | |
| [[ "$deleted" != "0" ]] && echo "- **Deleted**: $deleted files" >> $GITHUB_STEP_SUMMARY | |
| # Show file lists directly | |
| if grep -q "### Added Files" "artifacts/sync-reports/sync_report_${sync_type}.md"; then | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| sed -n '/### Added Files/,/^###\|^$/p' "artifacts/sync-reports/sync_report_${sync_type}.md" | grep "^- " | sed 's/^- /- **Added:** /' >> $GITHUB_STEP_SUMMARY | |
| fi | |
| if grep -q "### Updated Files" "artifacts/sync-reports/sync_report_${sync_type}.md"; then | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| sed -n '/### Updated Files/,/^###\|^$/p' "artifacts/sync-reports/sync_report_${sync_type}.md" | grep "^- " | sed 's/^- /- **Updated:** /' >> $GITHUB_STEP_SUMMARY | |
| fi | |
| if grep -q "### Deleted Files" "artifacts/sync-reports/sync_report_${sync_type}.md"; then | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| sed -n '/### Deleted Files/,/^###\|^$/p' "artifacts/sync-reports/sync_report_${sync_type}.md" | grep "^- " | sed 's/^- /- **Deleted:** /' >> $GITHUB_STEP_SUMMARY | |
| fi | |
| fi | |
| else | |
| echo "No report available" >> $GITHUB_STEP_SUMMARY | |
| fi | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| done | |
| # Add PR link if created | |
| if [[ "${{ needs.create-pr.outputs.pr_url }}" != "" ]]; then | |
| echo "## ✅ Pull Request" >> $GITHUB_STEP_SUMMARY | |
| echo "**PR Created:** ${{ needs.create-pr.outputs.pr_url }}" >> $GITHUB_STEP_SUMMARY | |
| else | |
| echo "## ℹ️ Result" >> $GITHUB_STEP_SUMMARY | |
| if [[ "${{ needs.aggregate-results.outputs.total_changes }}" == "0" ]]; then | |
| echo "No changes detected - no PR created" >> $GITHUB_STEP_SUMMARY | |
| else | |
| echo "Changes detected but PR creation failed or was skipped" >> $GITHUB_STEP_SUMMARY | |
| fi | |
| fi | |
| cleanup: | |
| name: 'Cleanup Artifacts' | |
| runs-on: ubuntu-latest | |
| needs: [prepare, aggregate-results, generate-docs, create-pr, summary] | |
| if: always() && (needs.create-pr.result == 'success' || needs.summary.result == 'success') | |
| permissions: | |
| actions: write | |
| steps: | |
| - name: Check cleanup configuration | |
| id: check | |
| run: | | |
| if [[ "${{ env.CLEANUP_ARTIFACTS }}" == "true" ]]; then | |
| echo "should_cleanup=true" >> $GITHUB_OUTPUT | |
| echo "✅ Artifact cleanup is enabled" | |
| else | |
| echo "should_cleanup=false" >> $GITHUB_OUTPUT | |
| echo "⏭️ Artifact cleanup is disabled (CLEANUP_ARTIFACTS=${{ env.CLEANUP_ARTIFACTS }})" | |
| fi | |
| - name: Build artifact list to delete | |
| if: steps.check.outputs.should_cleanup == 'true' | |
| id: artifacts | |
| env: | |
| GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| # Get ALL artifacts from this workflow run | |
| ARTIFACTS_TO_DELETE=$(gh api \ | |
| -H "Accept: application/vnd.github+json" \ | |
| /repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts \ | |
| --jq '.artifacts[].name' | \ | |
| paste -sd '|' -) | |
| echo "artifacts_list<<EOF" >> $GITHUB_OUTPUT | |
| echo "$ARTIFACTS_TO_DELETE" | tr '|' '\n' >> $GITHUB_OUTPUT | |
| echo "EOF" >> $GITHUB_OUTPUT | |
| echo "Artifacts to delete: $ARTIFACTS_TO_DELETE" | |
| - name: Delete intermediate artifacts | |
| if: steps.check.outputs.should_cleanup == 'true' && steps.artifacts.outputs.artifacts_list != '' | |
| uses: geekyeggo/delete-artifact@v5 | |
| with: | |
| name: ${{ steps.artifacts.outputs.artifacts_list }} | |
| failOnError: false |