From caee20ea50de9d943a3b1f98edb82f0e10f5f7ac Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 03:18:38 +0800 Subject: [PATCH 01/38] ci: add smart PR workflow automation --- .github/pull_request_template.md | 10 ++ .github/workflows/ci.yml | 200 ++++++++++++++++++++++++++++ .github/workflows/pr-advisory.yml | 184 +++++++++++++++++++++++++ .github/workflows/pr-governance.yml | 138 +++++++++++++++++++ CONTRIBUTING.md | 7 +- README.md | 1 + docs/development/CONFIG_FIELDS.md | 2 +- docs/development/TESTING.md | 17 ++- eslint.config.js | 2 +- package.json | 1 + scripts/ci/docs-check.js | 171 ++++++++++++++++++++++++ 11 files changed, 726 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/pr-advisory.yml create mode 100644 .github/workflows/pr-governance.yml create mode 100644 scripts/ci/docs-check.js diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 4a28e033..ccbe5be0 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -8,15 +8,25 @@ ## Testing - [ ] `npm run lint` +- [ ] `npm run typecheck` - [ ] `npm run build` - [ ] `npm test` +- [ ] `npm run docs:check` - [ ] Not applicable +## Docs Impact + +- [ ] README or docs updated +- [ ] No docs changes needed + ## Compliance Confirmation - [ ] This change stays within the repository scope and OpenAI Terms of Service expectations. - [ ] This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior. - [ ] I updated tests and documentation when the change affected users, maintainers, or repository behavior. +- [ ] No auth, request-routing, or storage paths changed. +- [ ] I manually tested with a real ChatGPT Plus/Pro account. +- [ ] Maintainer live verification completed. ## Notes diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..c15b18c9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,200 @@ +name: CI + +on: + pull_request: + push: + branches: + - main + merge_group: + +permissions: + contents: read + +concurrency: + group: ci-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + CI: true + HUSKY: 0 + +jobs: + changes: + name: Detect changes + runs-on: ubuntu-latest + outputs: + code_changed: ${{ steps.detect.outputs.code_changed }} + docs_changed: ${{ steps.detect.outputs.docs_changed }} + workflow_changed: ${{ steps.detect.outputs.workflow_changed }} + steps: + - name: Check out repository + uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Classify changed files + id: detect + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + BASE_REF: ${{ github.base_ref }} + BEFORE_SHA: ${{ github.event.before }} + run: | + set -euo pipefail + + docs_changed=false + code_changed=false + workflow_changed=false + files=() + + if [[ "${EVENT_NAME}" == "pull_request" ]]; then + git fetch --no-tags --depth=1 origin "${BASE_REF}" + while IFS= read -r file; do + files+=("${file}") + done < <(git diff --name-only "origin/${BASE_REF}...HEAD") + elif [[ "${EVENT_NAME}" == "push" && -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then + while IFS= read -r file; do + files+=("${file}") + done < <(git diff --name-only "${BEFORE_SHA}...HEAD") + else + while IFS= read -r file; do + files+=("${file}") + done < <(git ls-files) + fi + + for file in "${files[@]}"; do + [[ -z "${file}" ]] && continue + + case "${file}" in + README.md|CONTRIBUTING.md|CHANGELOG.md|SECURITY.md|config/README.md|docs/**|test/README.md|.github/*.md|.github/ISSUE_TEMPLATE/*.md) + docs_changed=true + ;; + esac + + case "${file}" in + .github/workflows/**) + workflow_changed=true + code_changed=true + ;; + README.md|CONTRIBUTING.md|CHANGELOG.md|SECURITY.md|config/README.md|docs/**|test/README.md|.github/*.md|.github/ISSUE_TEMPLATE/*.md) + ;; + *) + code_changed=true + ;; + esac + done + + if [[ "${EVENT_NAME}" != "pull_request" ]]; then + docs_changed=true + code_changed=true + fi + + { + echo "docs_changed=${docs_changed}" + echo "code_changed=${code_changed}" + echo "workflow_changed=${workflow_changed}" + } >> "${GITHUB_OUTPUT}" + + validate: + name: validate + needs: changes + if: needs.changes.outputs.code_changed == 'true' + runs-on: ubuntu-latest + timeout-minutes: 20 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + + - name: Lint + run: npm run lint + + - name: Type check + run: npm run typecheck + + - name: Build + run: npm run build + + - name: Run tests + run: npm test + + docs-sanity: + name: docs-sanity + needs: changes + if: needs.changes.outputs.docs_changed == 'true' + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + + - name: Verify markdown links and CI badge targets + run: npm run docs:check + + actionlint: + name: actionlint + needs: changes + if: needs.changes.outputs.workflow_changed == 'true' + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Lint GitHub Actions workflows + uses: docker://rhysd/actionlint:1.7.11 + with: + args: -color + + required-pr: + name: required-pr + needs: + - changes + - validate + - docs-sanity + - actionlint + if: always() + runs-on: ubuntu-latest + steps: + - name: Evaluate required checks + shell: bash + env: + CODE_CHANGED: ${{ needs.changes.outputs.code_changed }} + DOCS_CHANGED: ${{ needs.changes.outputs.docs_changed }} + WORKFLOW_CHANGED: ${{ needs.changes.outputs.workflow_changed }} + VALIDATE_RESULT: ${{ needs.validate.result }} + DOCS_RESULT: ${{ needs.docs-sanity.result }} + ACTIONLINT_RESULT: ${{ needs.actionlint.result }} + run: | + set -euo pipefail + failures=() + + if [[ "${CODE_CHANGED}" == "true" && "${VALIDATE_RESULT}" != "success" ]]; then + failures+=("validate") + fi + + if [[ "${DOCS_CHANGED}" == "true" && "${DOCS_RESULT}" != "success" ]]; then + failures+=("docs-sanity") + fi + + if [[ "${WORKFLOW_CHANGED}" == "true" && "${ACTIONLINT_RESULT}" != "success" ]]; then + failures+=("actionlint") + fi + + if [[ ${#failures[@]} -gt 0 ]]; then + echo "Required checks failed: ${failures[*]}" + exit 1 + fi + + echo "All required PR checks passed." diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml new file mode 100644 index 00000000..3dc9e207 --- /dev/null +++ b/.github/workflows/pr-advisory.yml @@ -0,0 +1,184 @@ +name: PR Advisory + +on: + pull_request: + push: + branches: + - main + schedule: + - cron: "23 6 * * *" + workflow_dispatch: + +permissions: + contents: read + +concurrency: + group: advisory-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +env: + CI: true + HUSKY: 0 + +jobs: + detect-dependency-change: + name: detect-dependency-change + runs-on: ubuntu-latest + outputs: + dependency_changed: ${{ steps.detect.outputs.dependency_changed }} + steps: + - name: Check out repository + uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Detect dependency changes + id: detect + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + BASE_REF: ${{ github.base_ref }} + BEFORE_SHA: ${{ github.event.before }} + run: | + set -euo pipefail + + dependency_changed=false + + if [[ "${EVENT_NAME}" == "schedule" || "${EVENT_NAME}" == "workflow_dispatch" ]]; then + dependency_changed=true + elif [[ "${EVENT_NAME}" == "pull_request" ]]; then + git fetch --no-tags --depth=1 origin "${BASE_REF}" + if git diff --name-only "origin/${BASE_REF}...HEAD" | grep -Eq '^(package\.json|package-lock\.json)$'; then + dependency_changed=true + fi + elif [[ -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then + if git diff --name-only "${BEFORE_SHA}...HEAD" | grep -Eq '^(package\.json|package-lock\.json)$'; then + dependency_changed=true + fi + else + dependency_changed=true + fi + + echo "dependency_changed=${dependency_changed}" >> "${GITHUB_OUTPUT}" + + coverage: + name: coverage + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + + - name: Run coverage + id: coverage + continue-on-error: true + run: npm run test:coverage + + - name: Upload coverage artifacts + if: always() + uses: actions/upload-artifact@v7 + with: + name: coverage-report-${{ github.run_id }} + path: coverage + if-no-files-found: ignore + + - name: Write coverage summary + if: always() + shell: bash + env: + COVERAGE_OUTCOME: ${{ steps.coverage.outcome }} + run: | + { + echo "## Coverage" + if [[ "${COVERAGE_OUTCOME}" == "success" ]]; then + echo "" + echo "- \`npm run test:coverage\` passed." + else + echo "" + echo "- \`npm run test:coverage\` is currently advisory." + echo "- The command failed during this run. Keep the artifact for inspection and remediate coverage before promoting it to a required gate." + fi + } >> "${GITHUB_STEP_SUMMARY}" + + compat-matrix: + name: compat-matrix (${{ matrix.os }}, Node ${{ matrix.node }}) + runs-on: ${{ matrix.os }} + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + node: 18 + - os: ubuntu-latest + node: 22 + - os: windows-latest + node: 20 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: ${{ matrix.node }} + cache: npm + + - name: Install dependencies + run: npm ci + + - name: Build + run: npm run build + + - name: Run tests + run: npm test + + dependency-audit: + name: dependency-audit + needs: detect-dependency-change + if: needs.detect-dependency-change.outputs.dependency_changed == 'true' + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + + - name: Run dependency audit + id: audit + continue-on-error: true + run: npm run audit:ci + + - name: Write dependency audit summary + if: always() + shell: bash + env: + AUDIT_OUTCOME: ${{ steps.audit.outcome }} + run: | + { + echo "## Dependency audit" + if [[ "${AUDIT_OUTCOME}" == "success" ]]; then + echo "" + echo "- \`npm run audit:ci\` passed." + else + echo "" + echo "- \`npm run audit:ci\` is currently advisory." + echo "- The audit found issues or baseline failures. Review the job log before promoting this lane to required status." + fi + } >> "${GITHUB_STEP_SUMMARY}" diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml new file mode 100644 index 00000000..9656519b --- /dev/null +++ b/.github/workflows/pr-governance.yml @@ -0,0 +1,138 @@ +name: PR Governance + +on: + pull_request_target: + types: + - opened + - edited + - reopened + - synchronize + - ready_for_review + +permissions: + contents: read + issues: write + pull-requests: read + +concurrency: + group: governance-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + pr-governance: + name: pr-governance + runs-on: ubuntu-latest + steps: + - name: Validate PR template and live verification markers + uses: actions/github-script@v8 + with: + script: | + const owner = context.repo.owner; + const repo = context.repo.repo; + const issueNumber = context.payload.pull_request.number; + const body = context.payload.pull_request.body || ""; + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: issueNumber, + per_page: 100, + }); + + const changedFiles = files.map((file) => file.filename); + const riskyMatchers = [ + /^index\.ts$/, + /^lib\/auth\//, + /^lib\/request\//, + /^lib\/storage(?:\/|\.ts$)/, + /^lib\/recovery\/storage\.ts$/, + ]; + + const riskyPaths = changedFiles.filter((file) => riskyMatchers.some((matcher) => matcher.test(file))); + const liveVerificationRequired = riskyPaths.length > 0; + + const requiredHeadings = [ + /^## Summary$/m, + /^## Testing$/m, + /^## Docs Impact$/m, + /^## Compliance Confirmation$/m, + /^## Notes$/m, + ]; + + const missingSections = requiredHeadings.filter((pattern) => !pattern.test(body)); + const complianceChecked = /- \[x\] This change stays within the repository scope and OpenAI Terms of Service expectations\./i.test(body); + const noLiveRequiredChecked = /- \[x\] No auth, request-routing, or storage paths changed\./i.test(body); + const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(body); + const maintainerLiveChecked = /- \[x\] Maintainer live verification completed\./i.test(body); + + const labelName = "needs-live-verification"; + + async function ensureLabel() { + try { + await github.rest.issues.getLabel({ owner, repo, name: labelName }); + } catch (error) { + if (error.status !== 404) throw error; + await github.rest.issues.createLabel({ + owner, + repo, + name: labelName, + color: "b60205", + description: "Manual ChatGPT Plus/Pro verification required before merge", + }); + } + } + + if (liveVerificationRequired) { + await ensureLabel(); + await github.rest.issues.addLabels({ + owner, + repo, + issue_number: issueNumber, + labels: [labelName], + }); + } else { + try { + await github.rest.issues.removeLabel({ + owner, + repo, + issue_number: issueNumber, + name: labelName, + }); + } catch (error) { + if (error.status !== 404) throw error; + } + } + + const failures = []; + + if (missingSections.length > 0) { + failures.push("PR description is missing one or more required template sections."); + } + + if (!complianceChecked) { + failures.push("The compliance checkbox must be checked."); + } + + if (liveVerificationRequired) { + if (!manualLiveChecked && !maintainerLiveChecked) { + failures.push("This PR touches auth/request/storage paths and needs a completed live verification checkbox."); + } + } else if (!noLiveRequiredChecked && !manualLiveChecked && !maintainerLiveChecked) { + failures.push("Choose the matching live-validation state in the PR template."); + } + + const summaryLines = [ + "## PR governance", + "", + `- Changed files: ${changedFiles.length}`, + `- Live verification required: ${liveVerificationRequired ? "yes" : "no"}`, + ]; + + if (riskyPaths.length > 0) { + summaryLines.push(`- Risky paths: ${riskyPaths.join(", ")}`); + } + + await core.summary.addRaw(summaryLines.join("\n")).write(); + + if (failures.length > 0) { + core.setFailed(failures.join("\n")); + } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 192bf09b..aef7cbd7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -63,9 +63,10 @@ The project does not accept work aimed at: 3. **Include tests** for new functionality 4. **Update documentation** (README.md, config examples, etc.) 5. **Ensure compliance** with guidelines above -6. **Test thoroughly** with the most appropriate validation for the change -7. **Complete the pull request template** with summary, testing, and compliance details -8. **Submit PR** with clear description of changes +6. **Run local validation**: `npm run lint`, `npm run typecheck`, `npm run build`, `npm test`, and `npm run docs:check` when docs or workflow files change +7. **Test thoroughly** with the most appropriate validation for the change, including real ChatGPT Plus/Pro checks when touching auth, request-routing, or storage behavior +8. **Complete the pull request template** with summary, testing, docs impact, and compliance details +9. **Submit PR** with clear description of changes Pull requests are automatically screened for incomplete or suspicious submissions. Legitimate contributions are still welcome, but low-signal PRs may be flagged for maintainer review before they move forward. diff --git a/README.md b/README.md index 3b9a3d29..90606019 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![npm version](https://img.shields.io/npm/v/oc-chatgpt-multi-auth.svg)](https://www.npmjs.com/package/oc-chatgpt-multi-auth) [![npm downloads](https://img.shields.io/npm/dw/oc-chatgpt-multi-auth.svg)](https://www.npmjs.com/package/oc-chatgpt-multi-auth) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](LICENSE) +[![Tests](https://github.com/ndycode/oc-chatgpt-multi-auth/actions/workflows/ci.yml/badge.svg)](https://github.com/ndycode/oc-chatgpt-multi-auth/actions/workflows/ci.yml) Use your ChatGPT Plus/Pro subscription inside OpenCode with OAuth login, GPT-5/Codex model presets, and multi-account failover. diff --git a/docs/development/CONFIG_FIELDS.md b/docs/development/CONFIG_FIELDS.md index 465b916a..1b720737 100644 --- a/docs/development/CONFIG_FIELDS.md +++ b/docs/development/CONFIG_FIELDS.md @@ -693,4 +693,4 @@ Notes: - [CONFIG_FLOW.md](./CONFIG_FLOW.md) - Complete config system guide - [ARCHITECTURE.md](./ARCHITECTURE.md) - Technical architecture -- [BUGS_FIXED.md](./BUGS_FIXED.md) - Bug fixes and testing +- [TESTING.md](./TESTING.md) - Validation matrix and CI guidance diff --git a/docs/development/TESTING.md b/docs/development/TESTING.md index a8f8d1a0..1ecc9896 100644 --- a/docs/development/TESTING.md +++ b/docs/development/TESTING.md @@ -30,9 +30,22 @@ Recommended validation command before release: ```bash npm run lint npm run typecheck +npm run build npm test +npm run docs:check ``` +Current PR automation is split into required and advisory lanes: + +- Required `required-pr`: runs docs verification for Markdown changes, full `lint` + `typecheck` + `build` + `test` validation for code changes, and `actionlint` when workflows change. +- Required `pr-governance`: enforces the pull request template, compliance checkbox, and a completed live-verification marker for auth/request/storage changes. +- Advisory `PR Advisory`: runs `npm run test:coverage`, a wider compatibility matrix, and `npm run audit:ci`. + +Notes on the advisory lane: + +- `npm run test:coverage` is currently informational because the repo baseline is below the configured global coverage thresholds. +- `npm run audit:ci` is currently informational because the production dependency audit still reports an unresolved `hono` advisory. + ## Test Scenarios Matrix ### Scenario 1: Default OpenCode Models (No Custom Config) @@ -786,6 +799,6 @@ describe('filterInput', () => { ## See Also -- [IMPLEMENTATION_SUMMARY.md](./IMPLEMENTATION_SUMMARY.md) - Complete summary - [CONFIG_FIELDS.md](./CONFIG_FIELDS.md) - Field usage guide -- [BUGS_FIXED.md](./BUGS_FIXED.md) - Bug analysis +- [CONFIG_FLOW.md](./CONFIG_FLOW.md) - Configuration loading and precedence +- [ARCHITECTURE.md](./ARCHITECTURE.md) - Technical architecture and request flow diff --git a/eslint.config.js b/eslint.config.js index d038ed8f..c326b456 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -3,7 +3,7 @@ import tsparser from "@typescript-eslint/parser"; export default [ { - ignores: ["dist/**", "node_modules/**", "winston/**", "*.cjs", "*.mjs"], + ignores: ["coverage/**", "dist/**", "node_modules/**", "winston/**", "*.cjs", "*.mjs"], }, { files: ["index.ts", "lib/**/*.ts"], diff --git a/package.json b/package.json index 3517fc83..47875113 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,7 @@ "test:ui": "vitest --ui", "test:coverage": "vitest run --coverage", "coverage": "vitest run --coverage", + "docs:check": "node scripts/ci/docs-check.js", "audit:prod": "npm audit --omit=dev --audit-level=high", "audit:all": "npm audit --audit-level=high", "audit:dev:allowlist": "node scripts/audit-dev-allowlist.js", diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js new file mode 100644 index 00000000..45a5a4dd --- /dev/null +++ b/scripts/ci/docs-check.js @@ -0,0 +1,171 @@ +#!/usr/bin/env node + +import { access, readdir, readFile } from "node:fs/promises"; +import path from "node:path"; + +const ROOT = process.cwd(); +const DEFAULT_FILES = ["README.md", "CONTRIBUTING.md", "SECURITY.md", "CHANGELOG.md"]; +const DEFAULT_DIRS = [".github", "config", "docs", "test"]; +const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); +const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); + +async function exists(targetPath) { + try { + await access(targetPath); + return true; + } catch { + return false; + } +} + +async function walkMarkdownFiles(dirPath) { + const entries = await readdir(dirPath, { withFileTypes: true }); + const files = []; + + for (const entry of entries) { + const absolutePath = path.join(dirPath, entry.name); + const relativePath = path.relative(ROOT, absolutePath).replace(/\\/g, "/"); + + if (entry.isDirectory()) { + if (IGNORED_DIRS.has(relativePath) || IGNORED_DIRS.has(entry.name)) continue; + files.push(...(await walkMarkdownFiles(absolutePath))); + continue; + } + + if (MARKDOWN_EXTENSIONS.has(path.extname(entry.name).toLowerCase())) { + files.push(absolutePath); + } + } + + return files; +} + +async function collectMarkdownFiles(inputPaths) { + const resolved = new Set(); + + if (inputPaths.length > 0) { + for (const inputPath of inputPaths) { + const absolutePath = path.resolve(ROOT, inputPath); + if (!(await exists(absolutePath))) continue; + const extension = path.extname(absolutePath).toLowerCase(); + if (MARKDOWN_EXTENSIONS.has(extension)) { + resolved.add(absolutePath); + continue; + } + + const nestedFiles = await walkMarkdownFiles(absolutePath); + for (const nestedFile of nestedFiles) resolved.add(nestedFile); + } + + return [...resolved].sort(); + } + + for (const file of DEFAULT_FILES) { + const absolutePath = path.join(ROOT, file); + if (await exists(absolutePath)) resolved.add(absolutePath); + } + + for (const dir of DEFAULT_DIRS) { + const absolutePath = path.join(ROOT, dir); + if (!(await exists(absolutePath))) continue; + const nestedFiles = await walkMarkdownFiles(absolutePath); + for (const nestedFile of nestedFiles) resolved.add(nestedFile); + } + + return [...resolved].sort(); +} + +function extractMarkdownLinks(markdown) { + const stripped = markdown + .replace(/```[\s\S]*?```/g, "\n") + .replace(/`[^`\n]+`/g, "`code`"); + const pattern = /!?\[[^\]]*]\(([^)\n]+)\)/g; + const links = []; + + for (const match of stripped.matchAll(pattern)) { + const rawTarget = match[1]?.trim(); + if (!rawTarget) continue; + + let target = rawTarget; + if (target.startsWith("<") && target.endsWith(">")) { + target = target.slice(1, -1).trim(); + } + + const spacedTarget = target.match(/^(\S+)\s+["'(].*$/); + if (spacedTarget?.[1]) { + target = spacedTarget[1]; + } + + links.push(target); + } + + return links; +} + +function getWorkflowPathFromUrl(target) { + try { + const url = new URL(target); + if (!["github.com", "www.github.com"].includes(url.hostname)) return null; + const match = url.pathname.match(/\/actions\/workflows\/([^/]+)(?:\/badge\.svg)?$/); + return match?.[1] ?? null; + } catch { + return null; + } +} + +async function validateLink(filePath, linkTarget) { + if (!linkTarget || linkTarget.startsWith("#")) return null; + if (/^(mailto:|tel:|data:)/i.test(linkTarget)) return null; + + const workflowFile = getWorkflowPathFromUrl(linkTarget); + if (workflowFile) { + const workflowPath = path.join(ROOT, ".github", "workflows", workflowFile); + if (await exists(workflowPath)) return null; + return `Missing workflow referenced by GitHub Actions badge/link: ${workflowFile}`; + } + + if (/^https?:\/\//i.test(linkTarget)) return null; + if (linkTarget.startsWith("/")) return null; + + const [rawPath] = linkTarget.split(/[?#]/, 1); + if (!rawPath) return null; + + const resolvedPath = path.resolve(path.dirname(filePath), rawPath); + if (await exists(resolvedPath)) return null; + + return `Missing local target: ${rawPath}`; +} + +async function main() { + const files = await collectMarkdownFiles(process.argv.slice(2)); + if (files.length === 0) { + console.log("docs-check: no markdown files found"); + return; + } + + const failures = []; + + for (const filePath of files) { + const contents = await readFile(filePath, "utf8"); + const links = extractMarkdownLinks(contents); + + for (const link of links) { + const error = await validateLink(filePath, link); + if (!error) continue; + failures.push(`${path.relative(ROOT, filePath).replace(/\\/g, "/")}: ${error} (${link})`); + } + } + + if (failures.length > 0) { + console.error("docs-check found broken documentation links:"); + for (const failure of failures) { + console.error(`- ${failure}`); + } + process.exitCode = 1; + return; + } + + console.log(`docs-check: verified ${files.length} markdown file(s)`); +} + +await main(); From d4ca4443f0f9817c38a305619ca78f09c229a383 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 03:54:14 +0800 Subject: [PATCH 02/38] ci: split required PR validation lanes --- .github/workflows/ci.yml | 117 ++++++++++++++++++++++++++++-- .github/workflows/pr-advisory.yml | 2 +- CONTRIBUTING.md | 18 +++++ README.md | 2 + docs/development/TESTING.md | 9 ++- 5 files changed, 138 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c15b18c9..a8ec3d0c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -95,12 +95,12 @@ jobs: echo "workflow_changed=${workflow_changed}" } >> "${GITHUB_OUTPUT}" - validate: - name: validate + lint: + name: lint needs: changes if: needs.changes.outputs.code_changed == 'true' runs-on: ubuntu-latest - timeout-minutes: 20 + timeout-minutes: 15 steps: - name: Check out repository uses: actions/checkout@v6 @@ -117,12 +117,91 @@ jobs: - name: Lint run: npm run lint + typecheck: + name: typecheck + needs: changes + if: needs.changes.outputs.code_changed == 'true' + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + - name: Type check run: npm run typecheck + build: + name: build + needs: changes + if: needs.changes.outputs.code_changed == 'true' + runs-on: ubuntu-latest + timeout-minutes: 20 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + - name: Build run: npm run build + unit-linux: + name: unit (linux) + needs: changes + if: needs.changes.outputs.code_changed == 'true' + runs-on: ubuntu-latest + timeout-minutes: 25 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + + - name: Run tests + run: npm test + + unit-windows: + name: unit (windows) + needs: changes + if: needs.changes.outputs.code_changed == 'true' + runs-on: windows-latest + timeout-minutes: 30 + steps: + - name: Check out repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: .nvmrc + cache: npm + + - name: Install dependencies + run: npm ci + - name: Run tests run: npm test @@ -161,7 +240,11 @@ jobs: name: required-pr needs: - changes - - validate + - lint + - typecheck + - build + - unit-linux + - unit-windows - docs-sanity - actionlint if: always() @@ -173,15 +256,35 @@ jobs: CODE_CHANGED: ${{ needs.changes.outputs.code_changed }} DOCS_CHANGED: ${{ needs.changes.outputs.docs_changed }} WORKFLOW_CHANGED: ${{ needs.changes.outputs.workflow_changed }} - VALIDATE_RESULT: ${{ needs.validate.result }} + LINT_RESULT: ${{ needs.lint.result }} + TYPECHECK_RESULT: ${{ needs.typecheck.result }} + BUILD_RESULT: ${{ needs.build.result }} + UNIT_LINUX_RESULT: ${{ needs.unit-linux.result }} + UNIT_WINDOWS_RESULT: ${{ needs.unit-windows.result }} DOCS_RESULT: ${{ needs.docs-sanity.result }} ACTIONLINT_RESULT: ${{ needs.actionlint.result }} run: | set -euo pipefail failures=() - if [[ "${CODE_CHANGED}" == "true" && "${VALIDATE_RESULT}" != "success" ]]; then - failures+=("validate") + if [[ "${CODE_CHANGED}" == "true" && "${LINT_RESULT}" != "success" ]]; then + failures+=("lint") + fi + + if [[ "${CODE_CHANGED}" == "true" && "${TYPECHECK_RESULT}" != "success" ]]; then + failures+=("typecheck") + fi + + if [[ "${CODE_CHANGED}" == "true" && "${BUILD_RESULT}" != "success" ]]; then + failures+=("build") + fi + + if [[ "${CODE_CHANGED}" == "true" && "${UNIT_LINUX_RESULT}" != "success" ]]; then + failures+=("unit-linux") + fi + + if [[ "${CODE_CHANGED}" == "true" && "${UNIT_WINDOWS_RESULT}" != "success" ]]; then + failures+=("unit-windows") fi if [[ "${DOCS_CHANGED}" == "true" && "${DOCS_RESULT}" != "success" ]]; then diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 3dc9e207..c1f9e498 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -121,7 +121,7 @@ jobs: node: 18 - os: ubuntu-latest node: 22 - - os: windows-latest + - os: macos-latest node: 20 steps: - name: Check out repository diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index aef7cbd7..d08cf477 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -72,6 +72,24 @@ Pull requests are automatically screened for incomplete or suspicious submission If a PR is flagged incorrectly, a maintainer can override the workflow with the `exempt` label after review. +## CI and PR Checks + +The repository uses split required and advisory PR lanes: + +- Required code validation: `lint`, `typecheck`, `build`, `unit (linux)`, and `unit (windows)` +- Required conditional validation: `docs-sanity` for Markdown changes and `actionlint` for workflow changes +- Required policy checks: `required-pr` and `pr-governance` +- Advisory lanes: `coverage`, `compat-matrix`, and `dependency-audit` + +`required-pr` is the aggregate gate that evaluates the required code, docs, and workflow jobs. `pr-governance` separately enforces the PR template and live-verification requirements for auth, request-routing, and storage changes. + +For maintainers, GitHub branch protection should require only: + +- `required-pr` +- `pr-governance` + +Advisory lanes should remain unrequired until their baselines are clean and stable. + ## Reporting Issues When reporting issues, please: diff --git a/README.md b/README.md index 90606019..083c45d4 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,9 @@ Short answers for the most common questions live in [docs/faq.md](docs/faq.md), Contributions are welcome if they keep the project accurate, maintainable, and aligned with its personal-use scope. +- Pull requests run split required checks (`lint`, `typecheck`, `build`, Linux and Windows unit tests, docs/workflow validation) plus advisory coverage, compatibility, and dependency-audit lanes. - [Contributing Guide](CONTRIBUTING.md) +- [Testing & CI Notes](docs/development/TESTING.md) - [Code of Conduct](CODE_OF_CONDUCT.md) - [Security Policy](SECURITY.md) diff --git a/docs/development/TESTING.md b/docs/development/TESTING.md index 1ecc9896..4d07589c 100644 --- a/docs/development/TESTING.md +++ b/docs/development/TESTING.md @@ -37,15 +37,20 @@ npm run docs:check Current PR automation is split into required and advisory lanes: -- Required `required-pr`: runs docs verification for Markdown changes, full `lint` + `typecheck` + `build` + `test` validation for code changes, and `actionlint` when workflows change. +- Required `required-pr`: aggregates `lint`, `typecheck`, `build`, `unit (linux)`, and `unit (windows)` for the default Node version, plus `docs-sanity` for Markdown changes and `actionlint` when workflows change. - Required `pr-governance`: enforces the pull request template, compliance checkbox, and a completed live-verification marker for auth/request/storage changes. -- Advisory `PR Advisory`: runs `npm run test:coverage`, a wider compatibility matrix, and `npm run audit:ci`. +- Advisory `PR Advisory`: runs `npm run test:coverage`, a wider compatibility matrix (Ubuntu Node 18 and 22 plus macOS on the default Node version), and `npm run audit:ci`. Notes on the advisory lane: - `npm run test:coverage` is currently informational because the repo baseline is below the configured global coverage thresholds. - `npm run audit:ci` is currently informational because the production dependency audit still reports an unresolved `hono` advisory. +Maintainer branch protection should require only: + +- `required-pr` +- `pr-governance` + ## Test Scenarios Matrix ### Scenario 1: Default OpenCode Models (No Custom Config) From ed57187b2322364742a6806d72ae1a8c301e4fd5 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 04:07:46 +0800 Subject: [PATCH 03/38] ci: address PR review edge cases --- .github/workflows/ci.yml | 22 ++++++++++++++-------- scripts/ci/docs-check.js | 19 +++++++++++++++++-- 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8ec3d0c..03a4ddec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -64,22 +64,22 @@ jobs: for file in "${files[@]}"; do [[ -z "${file}" ]] && continue + is_docs_markdown=false - case "${file}" in - README.md|CONTRIBUTING.md|CHANGELOG.md|SECURITY.md|config/README.md|docs/**|test/README.md|.github/*.md|.github/ISSUE_TEMPLATE/*.md) - docs_changed=true - ;; - esac + if [[ "${file}" =~ ^(README\.md|CONTRIBUTING\.md|CHANGELOG\.md|SECURITY\.md)$ ]] || [[ "${file}" =~ ^(\.github|config|docs|test)/.+\.(md|markdown)$ ]]; then + docs_changed=true + is_docs_markdown=true + fi case "${file}" in .github/workflows/**) workflow_changed=true code_changed=true ;; - README.md|CONTRIBUTING.md|CHANGELOG.md|SECURITY.md|config/README.md|docs/**|test/README.md|.github/*.md|.github/ISSUE_TEMPLATE/*.md) - ;; *) - code_changed=true + if [[ "${is_docs_markdown}" != "true" ]]; then + code_changed=true + fi ;; esac done @@ -253,6 +253,7 @@ jobs: - name: Evaluate required checks shell: bash env: + CHANGES_RESULT: ${{ needs.changes.result }} CODE_CHANGED: ${{ needs.changes.outputs.code_changed }} DOCS_CHANGED: ${{ needs.changes.outputs.docs_changed }} WORKFLOW_CHANGED: ${{ needs.changes.outputs.workflow_changed }} @@ -267,6 +268,11 @@ jobs: set -euo pipefail failures=() + if [[ "${CHANGES_RESULT}" != "success" ]]; then + echo "Changes detection job did not succeed (result: ${CHANGES_RESULT}). Failing gate." + exit 1 + fi + if [[ "${CODE_CHANGED}" == "true" && "${LINT_RESULT}" != "success" ]]; then failures+=("lint") fi diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 45a5a4dd..72005aa2 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -1,6 +1,6 @@ #!/usr/bin/env node -import { access, readdir, readFile } from "node:fs/promises"; +import { access, readdir, readFile, stat } from "node:fs/promises"; import path from "node:path"; const ROOT = process.cwd(); @@ -18,6 +18,17 @@ async function exists(targetPath) { } } +async function getPathType(targetPath) { + try { + const metadata = await stat(targetPath); + if (metadata.isDirectory()) return "directory"; + if (metadata.isFile()) return "file"; + return "other"; + } catch { + return "missing"; + } +} + async function walkMarkdownFiles(dirPath) { const entries = await readdir(dirPath, { withFileTypes: true }); const files = []; @@ -47,12 +58,16 @@ async function collectMarkdownFiles(inputPaths) { for (const inputPath of inputPaths) { const absolutePath = path.resolve(ROOT, inputPath); if (!(await exists(absolutePath))) continue; + + const pathType = await getPathType(absolutePath); const extension = path.extname(absolutePath).toLowerCase(); - if (MARKDOWN_EXTENSIONS.has(extension)) { + if (pathType === "file" && MARKDOWN_EXTENSIONS.has(extension)) { resolved.add(absolutePath); continue; } + if (pathType !== "directory") continue; + const nestedFiles = await walkMarkdownFiles(absolutePath); for (const nestedFile of nestedFiles) resolved.add(nestedFile); } From 4d902713465ea68af006c9c17bcef400d69ac41a Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 04:22:21 +0800 Subject: [PATCH 04/38] ci: harden docs-check markdown parsing --- scripts/ci/docs-check.js | 82 +++++++++++++++++++++++++++++++++++++--- test/docs-check.test.ts | 11 ++++++ 2 files changed, 88 insertions(+), 5 deletions(-) create mode 100644 test/docs-check.test.ts diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 72005aa2..c85473fd 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -2,12 +2,19 @@ import { access, readdir, readFile, stat } from "node:fs/promises"; import path from "node:path"; +import { fileURLToPath } from "node:url"; const ROOT = process.cwd(); const DEFAULT_FILES = ["README.md", "CONTRIBUTING.md", "SECURITY.md", "CHANGELOG.md"]; const DEFAULT_DIRS = [".github", "config", "docs", "test"]; const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); +const __filename = fileURLToPath(import.meta.url); + +function normalizePathForCompare(targetPath) { + const resolved = path.resolve(targetPath); + return process.platform === "win32" ? resolved.toLowerCase() : resolved; +} async function exists(targetPath) { try { @@ -90,15 +97,73 @@ async function collectMarkdownFiles(inputPaths) { return [...resolved].sort(); } -function extractMarkdownLinks(markdown) { +function extractLinkTarget(markdown, startIndex) { + let depth = 1; + let inAngleTarget = false; + let isEscaped = false; + let target = ""; + + for (let index = startIndex; index < markdown.length; index += 1) { + const char = markdown[index]; + + if (isEscaped) { + target += char; + isEscaped = false; + continue; + } + + if (char === "\\") { + target += char; + isEscaped = true; + continue; + } + + if (inAngleTarget) { + target += char; + if (char === ">") inAngleTarget = false; + continue; + } + + if (char === "<" && target.trim().length === 0) { + target += char; + inAngleTarget = true; + continue; + } + + if (char === "(") { + target += char; + depth += 1; + continue; + } + + if (char === ")") { + depth -= 1; + if (depth === 0) { + return target; + } + target += char; + continue; + } + + target += char; + } + + return null; +} + +export function extractMarkdownLinks(markdown) { const stripped = markdown .replace(/```[\s\S]*?```/g, "\n") .replace(/`[^`\n]+`/g, "`code`"); - const pattern = /!?\[[^\]]*]\(([^)\n]+)\)/g; + const openerPattern = /!?\[[^\]]*]\(/g; const links = []; - for (const match of stripped.matchAll(pattern)) { - const rawTarget = match[1]?.trim(); + for (const match of stripped.matchAll(openerPattern)) { + const linkStart = (match.index ?? 0) + match[0].length; + const parsedTarget = extractLinkTarget(stripped, linkStart); + if (!parsedTarget) continue; + + const rawTarget = parsedTarget.trim(); if (!rawTarget) continue; let target = rawTarget; @@ -140,6 +205,7 @@ async function validateLink(filePath, linkTarget) { } if (/^https?:\/\//i.test(linkTarget)) return null; + // Site-root links depend on the final docs host; only repo-relative targets are checked here. if (linkTarget.startsWith("/")) return null; const [rawPath] = linkTarget.split(/[?#]/, 1); @@ -183,4 +249,10 @@ async function main() { console.log(`docs-check: verified ${files.length} markdown file(s)`); } -await main(); +const isDirectRun = process.argv[1] + ? normalizePathForCompare(process.argv[1]) === normalizePathForCompare(__filename) + : false; + +if (isDirectRun) { + await main(); +} diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts new file mode 100644 index 00000000..aa40bd92 --- /dev/null +++ b/test/docs-check.test.ts @@ -0,0 +1,11 @@ +import { describe, expect, it } from "vitest"; + +describe("docs-check script", () => { + it("keeps balanced parentheses inside markdown link targets", async () => { + const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); + + const markdown = "[Config Guide](docs/guides/config(v2).md)"; + + expect(extractMarkdownLinks(markdown)).toEqual(["docs/guides/config(v2).md"]); + }); +}); From 76b4d7f64389de47559615e2d571491c10cb3295 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 04:31:32 +0800 Subject: [PATCH 05/38] test: cover docs-check link validation --- .github/workflows/ci.yml | 1 + scripts/ci/docs-check.js | 4 +-- test/docs-check.test.ts | 66 +++++++++++++++++++++++++++++++++++++++- 3 files changed, 68 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 03a4ddec..a93eb0fe 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -219,6 +219,7 @@ jobs: with: node-version-file: .nvmrc + # docs-check.js is intentionally limited to Node built-ins, so this lane can skip npm ci. - name: Verify markdown links and CI badge targets run: npm run docs:check diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index c85473fd..563eed3b 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -11,7 +11,7 @@ const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); const __filename = fileURLToPath(import.meta.url); -function normalizePathForCompare(targetPath) { +export function normalizePathForCompare(targetPath) { const resolved = path.resolve(targetPath); return process.platform === "win32" ? resolved.toLowerCase() : resolved; } @@ -193,7 +193,7 @@ function getWorkflowPathFromUrl(target) { } } -async function validateLink(filePath, linkTarget) { +export async function validateLink(filePath, linkTarget) { if (!linkTarget || linkTarget.startsWith("#")) return null; if (/^(mailto:|tel:|data:)/i.test(linkTarget)) return null; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index aa40bd92..ae6300b9 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -1,4 +1,30 @@ -import { describe, expect, it } from "vitest"; +import { mkdir, mkdtemp, rm, writeFile } from "node:fs/promises"; +import path from "node:path"; +import { tmpdir } from "node:os"; +import { afterEach, describe, expect, it } from "vitest"; + +const tempRoots = []; + +afterEach(async () => { + await Promise.all(tempRoots.splice(0).map((root) => rm(root, { recursive: true, force: true }))); +}); + +async function createDocsFixture() { + const root = await mkdtemp(path.join(tmpdir(), "docs-check-")); + tempRoots.push(root); + + const docsDir = path.join(root, "docs"); + const targetsDir = path.join(docsDir, "targets"); + await mkdir(targetsDir, { recursive: true }); + + const docsFile = path.join(docsDir, "guide.md"); + await writeFile(docsFile, "# Guide\n", "utf8"); + + const existingTarget = path.join(targetsDir, "exists.md"); + await writeFile(existingTarget, "# Target\n", "utf8"); + + return { docsFile }; +} describe("docs-check script", () => { it("keeps balanced parentheses inside markdown link targets", async () => { @@ -8,4 +34,42 @@ describe("docs-check script", () => { expect(extractMarkdownLinks(markdown)).toEqual(["docs/guides/config(v2).md"]); }); + + it("skips anchor-only and external links", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { docsFile } = await createDocsFixture(); + + await expect(validateLink(docsFile, "#section")).resolves.toBeNull(); + await expect(validateLink(docsFile, "https://example.com/docs")).resolves.toBeNull(); + }); + + it("reports missing workflow badge targets", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { docsFile } = await createDocsFixture(); + + await expect( + validateLink( + docsFile, + "https://github.com/ndycode/oc-chatgpt-multi-auth/actions/workflows/does-not-exist.yml/badge.svg", + ), + ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: does-not-exist.yml"); + }); + + it("resolves relative local targets from the markdown file directory", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { docsFile } = await createDocsFixture(); + + await expect(validateLink(docsFile, "./targets/exists.md")).resolves.toBeNull(); + await expect(validateLink(docsFile, "./targets/missing.md")).resolves.toBe("Missing local target: ./targets/missing.md"); + }); + + it("normalizes direct-run paths consistently for the current platform", async () => { + const { normalizePathForCompare } = await import("../scripts/ci/docs-check.js"); + + const input = process.platform === "win32" ? "C:\\Temp\\Example\\..\\Test.js" : "./scripts/../README.md"; + const resolved = path.resolve(input); + const expected = process.platform === "win32" ? resolved.toLowerCase() : resolved; + + expect(normalizePathForCompare(input)).toBe(expected); + }); }); From b3dce2ccf077eb5e63494d7286f04692861095f1 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 04:43:47 +0800 Subject: [PATCH 06/38] fix: tighten docs-check and advisory PR lanes --- .github/workflows/ci.yml | 2 +- .github/workflows/pr-advisory.yml | 43 ++++++++++++++++++++---- scripts/ci/docs-check.js | 55 ++++++++++++++++++++++--------- test/docs-check.test.ts | 25 +++++++++++--- 4 files changed, 98 insertions(+), 27 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a93eb0fe..371e58d0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,7 +72,7 @@ jobs: fi case "${file}" in - .github/workflows/**) + .github/workflows/*) workflow_changed=true code_changed=true ;; diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index c1f9e498..a50f60ae 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -26,6 +26,7 @@ jobs: runs-on: ubuntu-latest outputs: dependency_changed: ${{ steps.detect.outputs.dependency_changed }} + code_changed: ${{ steps.detect.outputs.code_changed }} steps: - name: Check out repository uses: actions/checkout@v6 @@ -43,26 +44,52 @@ jobs: set -euo pipefail dependency_changed=false + code_changed=false + files=() if [[ "${EVENT_NAME}" == "schedule" || "${EVENT_NAME}" == "workflow_dispatch" ]]; then dependency_changed=true + code_changed=true elif [[ "${EVENT_NAME}" == "pull_request" ]]; then git fetch --no-tags --depth=1 origin "${BASE_REF}" - if git diff --name-only "origin/${BASE_REF}...HEAD" | grep -Eq '^(package\.json|package-lock\.json)$'; then - dependency_changed=true - fi + while IFS= read -r file; do + files+=("${file}") + done < <(git diff --name-only "origin/${BASE_REF}...HEAD") elif [[ -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then - if git diff --name-only "${BEFORE_SHA}...HEAD" | grep -Eq '^(package\.json|package-lock\.json)$'; then - dependency_changed=true - fi + while IFS= read -r file; do + files+=("${file}") + done < <(git diff --name-only "${BEFORE_SHA}...HEAD") else dependency_changed=true + code_changed=true fi - echo "dependency_changed=${dependency_changed}" >> "${GITHUB_OUTPUT}" + for file in "${files[@]}"; do + [[ -z "${file}" ]] && continue + is_docs_markdown=false + + if [[ "${file}" =~ ^(README\.md|CONTRIBUTING\.md|CHANGELOG\.md|SECURITY\.md)$ ]] || [[ "${file}" =~ ^(\.github|config|docs|test)/.+\.(md|markdown)$ ]]; then + is_docs_markdown=true + fi + + if [[ "${file}" =~ ^(package\.json|package-lock\.json)$ ]]; then + dependency_changed=true + fi + + if [[ "${is_docs_markdown}" != "true" ]]; then + code_changed=true + fi + done + + { + echo "dependency_changed=${dependency_changed}" + echo "code_changed=${code_changed}" + } >> "${GITHUB_OUTPUT}" coverage: name: coverage + needs: detect-dependency-change + if: needs.detect-dependency-change.outputs.code_changed == 'true' runs-on: ubuntu-latest timeout-minutes: 30 steps: @@ -111,6 +138,8 @@ jobs: compat-matrix: name: compat-matrix (${{ matrix.os }}, Node ${{ matrix.node }}) + needs: detect-dependency-change + if: needs.detect-dependency-change.outputs.code_changed == 'true' runs-on: ${{ matrix.os }} timeout-minutes: 30 strategy: diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 563eed3b..da07fccd 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -16,6 +16,28 @@ export function normalizePathForCompare(targetPath) { return process.platform === "win32" ? resolved.toLowerCase() : resolved; } +function normalizeReferenceLabel(label) { + return label.trim().replace(/\s+/g, " ").toLowerCase(); +} + +function normalizeLinkTarget(rawTarget) { + if (!rawTarget) return null; + + let target = rawTarget.trim(); + if (!target) return null; + + if (target.startsWith("<") && target.endsWith(">")) { + target = target.slice(1, -1).trim(); + } + + const spacedTarget = target.match(/^(\S+)\s+["'(].*$/); + if (spacedTarget?.[1]) { + target = spacedTarget[1]; + } + + return target || null; +} + async function exists(targetPath) { try { await access(targetPath); @@ -156,29 +178,32 @@ export function extractMarkdownLinks(markdown) { .replace(/```[\s\S]*?```/g, "\n") .replace(/`[^`\n]+`/g, "`code`"); const openerPattern = /!?\[[^\]]*]\(/g; + const referencePattern = /!?\[([^\]]+)]\[([^\]]*)]/g; + const referenceDefinitionPattern = /^\s{0,3}\[([^\]]+)]:\s+(.+)$/gm; const links = []; + const referenceDefinitions = new Map(); + + for (const match of stripped.matchAll(referenceDefinitionPattern)) { + const label = normalizeReferenceLabel(match[1] ?? ""); + const target = normalizeLinkTarget(match[2] ?? ""); + if (!label || !target) continue; + referenceDefinitions.set(label, target); + } for (const match of stripped.matchAll(openerPattern)) { const linkStart = (match.index ?? 0) + match[0].length; const parsedTarget = extractLinkTarget(stripped, linkStart); - if (!parsedTarget) continue; - - const rawTarget = parsedTarget.trim(); - if (!rawTarget) continue; - - let target = rawTarget; - if (target.startsWith("<") && target.endsWith(">")) { - target = target.slice(1, -1).trim(); - } - - const spacedTarget = target.match(/^(\S+)\s+["'(].*$/); - if (spacedTarget?.[1]) { - target = spacedTarget[1]; - } - + const target = normalizeLinkTarget(parsedTarget); + if (!target) continue; links.push(target); } + for (const match of stripped.matchAll(referencePattern)) { + const label = match[2]?.trim() ? match[2] : match[1]; + const referenceTarget = referenceDefinitions.get(normalizeReferenceLabel(label ?? "")); + if (referenceTarget) links.push(referenceTarget); + } + return links; } diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index ae6300b9..10cc3e09 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -1,4 +1,4 @@ -import { mkdir, mkdtemp, rm, writeFile } from "node:fs/promises"; +import { mkdir, mkdtemp, readFile, rm, writeFile } from "node:fs/promises"; import path from "node:path"; import { tmpdir } from "node:os"; import { afterEach, describe, expect, it } from "vitest"; @@ -6,10 +6,17 @@ import { afterEach, describe, expect, it } from "vitest"; const tempRoots = []; afterEach(async () => { - await Promise.all(tempRoots.splice(0).map((root) => rm(root, { recursive: true, force: true }))); + await Promise.all( + tempRoots.splice(0).map((root) => + rm(root, { recursive: true, force: true }).catch((error) => { + const message = error instanceof Error ? error.message : String(error); + console.warn(`[docs-check test] failed to clean up ${root}: ${message}`); + }), + ), + ); }); -async function createDocsFixture() { +async function createDocsFixture(markdown = "# Guide\n") { const root = await mkdtemp(path.join(tmpdir(), "docs-check-")); tempRoots.push(root); @@ -18,7 +25,7 @@ async function createDocsFixture() { await mkdir(targetsDir, { recursive: true }); const docsFile = path.join(docsDir, "guide.md"); - await writeFile(docsFile, "# Guide\n", "utf8"); + await writeFile(docsFile, markdown, "utf8"); const existingTarget = path.join(targetsDir, "exists.md"); await writeFile(existingTarget, "# Target\n", "utf8"); @@ -72,4 +79,14 @@ describe("docs-check script", () => { expect(normalizePathForCompare(input)).toBe(expected); }); + + it("extracts reference-style definitions so missing targets are still caught", async () => { + const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); + const { docsFile } = await createDocsFixture("[Config Guide][config]\n\n[config]: ./targets/missing.md\n"); + const markdown = await readFile(docsFile, "utf8"); + const [referenceTarget] = extractMarkdownLinks(markdown); + + expect(referenceTarget).toBe("./targets/missing.md"); + await expect(validateLink(docsFile, referenceTarget)).resolves.toBe("Missing local target: ./targets/missing.md"); + }); }); From 18ec2e3df750dd95f306cbd560cc07927743f993 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 04:55:37 +0800 Subject: [PATCH 07/38] fix: bound workflow gates and test temp roots --- .github/workflows/ci.yml | 2 ++ .github/workflows/pr-governance.yml | 3 +++ test/docs-check.test.ts | 2 +- 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 371e58d0..8f3e4624 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -210,6 +210,7 @@ jobs: needs: changes if: needs.changes.outputs.docs_changed == 'true' runs-on: ubuntu-latest + timeout-minutes: 10 steps: - name: Check out repository uses: actions/checkout@v6 @@ -228,6 +229,7 @@ jobs: needs: changes if: needs.changes.outputs.workflow_changed == 'true' runs-on: ubuntu-latest + timeout-minutes: 10 steps: - name: Check out repository uses: actions/checkout@v6 diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 9656519b..dde710e4 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -20,6 +20,9 @@ concurrency: jobs: pr-governance: + # SECURITY: this job uses pull_request_target so the GitHub token carries + # issues: write even for fork PRs. DO NOT add a checkout of the PR head ref + # here - doing so would let a fork PR execute arbitrary code with write access. name: pr-governance runs-on: ubuntu-latest steps: diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 10cc3e09..0ee9e188 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -3,7 +3,7 @@ import path from "node:path"; import { tmpdir } from "node:os"; import { afterEach, describe, expect, it } from "vitest"; -const tempRoots = []; +const tempRoots: string[] = []; afterEach(async () => { await Promise.all( From 0d2c6126578c371f1c39ce68ac779569574f6c81 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 05:01:03 +0800 Subject: [PATCH 08/38] fix: harden PR governance and docs checks --- .github/pull_request_template.md | 2 +- .github/workflows/pr-advisory.yml | 2 +- .github/workflows/pr-governance.yml | 66 ++++++++++++++++++++++------- CONTRIBUTING.md | 2 +- scripts/ci/docs-check.js | 17 +++++--- test/docs-check.test.ts | 10 +++++ 6 files changed, 74 insertions(+), 25 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index ccbe5be0..6e603d37 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -26,7 +26,7 @@ - [ ] I updated tests and documentation when the change affected users, maintainers, or repository behavior. - [ ] No auth, request-routing, or storage paths changed. - [ ] I manually tested with a real ChatGPT Plus/Pro account. -- [ ] Maintainer live verification completed. +- Maintainers can apply the `maintainer-live-verified` label after independent live verification. ## Notes diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index a50f60ae..3d297880 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -68,7 +68,7 @@ jobs: [[ -z "${file}" ]] && continue is_docs_markdown=false - if [[ "${file}" =~ ^(README\.md|CONTRIBUTING\.md|CHANGELOG\.md|SECURITY\.md)$ ]] || [[ "${file}" =~ ^(\.github|config|docs|test)/.+\.(md|markdown)$ ]]; then + if [[ "${file}" =~ ^[^/]+\.(md|markdown)$ ]] || [[ "${file}" =~ ^(\.github|config|docs|test)/.+\.(md|markdown)$ ]]; then is_docs_markdown=true fi diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index dde710e4..bd73e759 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -5,8 +5,10 @@ on: types: - opened - edited + - labeled - reopened - synchronize + - unlabeled - ready_for_review permissions: @@ -34,6 +36,11 @@ jobs: const repo = context.repo.repo; const issueNumber = context.payload.pull_request.number; const body = context.payload.pull_request.body || ""; + const currentLabels = new Set( + (context.payload.pull_request.labels ?? []) + .map((label) => label?.name) + .filter((name) => typeof name === "string"), + ); const files = await github.paginate(github.rest.pulls.listFiles, { owner, repo, @@ -50,7 +57,13 @@ jobs: /^lib\/recovery\/storage\.ts$/, ]; - const riskyPaths = changedFiles.filter((file) => riskyMatchers.some((matcher) => matcher.test(file))); + const riskyPaths = files + .filter((file) => + [file.filename, file.previous_filename].some( + (candidatePath) => candidatePath && riskyMatchers.some((matcher) => matcher.test(candidatePath)) + ) + ) + .map((file) => file.filename); const liveVerificationRequired = riskyPaths.length > 0; const requiredHeadings = [ @@ -63,34 +76,46 @@ jobs: const missingSections = requiredHeadings.filter((pattern) => !pattern.test(body)); const complianceChecked = /- \[x\] This change stays within the repository scope and OpenAI Terms of Service expectations\./i.test(body); + const officialAuthChecked = /- \[x\] This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior\./i.test(body); + const testsDocsChecked = /- \[x\] I updated tests and documentation when the change affected users, maintainers, or repository behavior\./i.test(body); const noLiveRequiredChecked = /- \[x\] No auth, request-routing, or storage paths changed\./i.test(body); const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(body); - const maintainerLiveChecked = /- \[x\] Maintainer live verification completed\./i.test(body); + const liveVerificationLabelName = "needs-live-verification"; + const maintainerVerifiedLabelName = "maintainer-live-verified"; + const maintainerLiveVerified = currentLabels.has(maintainerVerifiedLabelName); - const labelName = "needs-live-verification"; - - async function ensureLabel() { + async function ensureLabel(name, color, description) { try { - await github.rest.issues.getLabel({ owner, repo, name: labelName }); + await github.rest.issues.getLabel({ owner, repo, name }); } catch (error) { if (error.status !== 404) throw error; await github.rest.issues.createLabel({ owner, repo, - name: labelName, - color: "b60205", - description: "Manual ChatGPT Plus/Pro verification required before merge", + name, + color, + description, }); } } + await ensureLabel( + maintainerVerifiedLabelName, + "0e8a16", + "Maintainer completed independent live ChatGPT Plus/Pro verification", + ); + if (liveVerificationRequired) { - await ensureLabel(); + await ensureLabel( + liveVerificationLabelName, + "b60205", + "Manual ChatGPT Plus/Pro verification required before merge", + ); await github.rest.issues.addLabels({ owner, repo, issue_number: issueNumber, - labels: [labelName], + labels: [liveVerificationLabelName], }); } else { try { @@ -98,7 +123,7 @@ jobs: owner, repo, issue_number: issueNumber, - name: labelName, + name: liveVerificationLabelName, }); } catch (error) { if (error.status !== 404) throw error; @@ -115,12 +140,20 @@ jobs: failures.push("The compliance checkbox must be checked."); } + if (!officialAuthChecked) { + failures.push("The official-authentication checkbox must be checked."); + } + + if (!testsDocsChecked) { + failures.push("The tests/documentation checkbox must be checked."); + } + if (liveVerificationRequired) { - if (!manualLiveChecked && !maintainerLiveChecked) { - failures.push("This PR touches auth/request/storage paths and needs a completed live verification checkbox."); + if (!manualLiveChecked && !maintainerLiveVerified) { + failures.push("This PR touches auth/request/storage paths and needs either the manual live-test checkbox or the maintainer-live-verified label."); } - } else if (!noLiveRequiredChecked && !manualLiveChecked && !maintainerLiveChecked) { - failures.push("Choose the matching live-validation state in the PR template."); + } else if (!noLiveRequiredChecked && !manualLiveChecked && !maintainerLiveVerified) { + failures.push("Choose the matching live-validation state or apply the maintainer-live-verified label."); } const summaryLines = [ @@ -128,6 +161,7 @@ jobs: "", `- Changed files: ${changedFiles.length}`, `- Live verification required: ${liveVerificationRequired ? "yes" : "no"}`, + `- Maintainer verification label present: ${maintainerLiveVerified ? "yes" : "no"}`, ]; if (riskyPaths.length > 0) { diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d08cf477..021f5bd6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -81,7 +81,7 @@ The repository uses split required and advisory PR lanes: - Required policy checks: `required-pr` and `pr-governance` - Advisory lanes: `coverage`, `compat-matrix`, and `dependency-audit` -`required-pr` is the aggregate gate that evaluates the required code, docs, and workflow jobs. `pr-governance` separately enforces the PR template and live-verification requirements for auth, request-routing, and storage changes. +`required-pr` is the aggregate gate that evaluates the required code, docs, and workflow jobs. `pr-governance` separately enforces the PR template and live-verification requirements for auth, request-routing, and storage changes. Maintainers can record an independent live check by applying the `maintainer-live-verified` label. For maintainers, GitHub branch protection should require only: diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index da07fccd..c241103d 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -26,13 +26,18 @@ function normalizeLinkTarget(rawTarget) { let target = rawTarget.trim(); if (!target) return null; - if (target.startsWith("<") && target.endsWith(">")) { - target = target.slice(1, -1).trim(); - } + const angleTargetWithOptionalTitle = target.match(/^<([^>]+)>(?:\s+["'(].*)?$/); + if (angleTargetWithOptionalTitle?.[1]) { + target = angleTargetWithOptionalTitle[1].trim(); + } else { + const spacedTarget = target.match(/^(\S+)\s+["'(].*$/); + if (spacedTarget?.[1]) { + target = spacedTarget[1]; + } - const spacedTarget = target.match(/^(\S+)\s+["'(].*$/); - if (spacedTarget?.[1]) { - target = spacedTarget[1]; + if (target.startsWith("<") && target.endsWith(">")) { + target = target.slice(1, -1).trim(); + } } return target || null; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 0ee9e188..fd1665a5 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -89,4 +89,14 @@ describe("docs-check script", () => { expect(referenceTarget).toBe("./targets/missing.md"); await expect(validateLink(docsFile, referenceTarget)).resolves.toBe("Missing local target: ./targets/missing.md"); }); + + it("accepts angle-bracket targets that include an optional title", async () => { + const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); + const { docsFile } = await createDocsFixture('[Config Guide](<./targets/exists.md> "Config target")\n'); + const markdown = await readFile(docsFile, "utf8"); + const [linkTarget] = extractMarkdownLinks(markdown); + + expect(linkTarget).toBe("./targets/exists.md"); + await expect(validateLink(docsFile, linkTarget)).resolves.toBeNull(); + }); }); From 3c1de9fab46961f29b2c0082f05ca212b94094f6 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 05:10:11 +0800 Subject: [PATCH 09/38] fix: tighten docs-check root boundaries --- .github/workflows/pr-governance.yml | 20 +++++++++++++------- scripts/ci/docs-check.js | 5 +++++ test/docs-check.test.ts | 8 +++++++- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index bd73e759..77c1a4e8 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -89,13 +89,19 @@ jobs: await github.rest.issues.getLabel({ owner, repo, name }); } catch (error) { if (error.status !== 404) throw error; - await github.rest.issues.createLabel({ - owner, - repo, - name, - color, - description, - }); + try { + await github.rest.issues.createLabel({ + owner, + repo, + name, + color, + description, + }); + } catch (createError) { + if (createError.status === 422) return; + if (String(createError.message ?? "").includes("already_exists")) return; + throw createError; + } } } diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index c241103d..527a5909 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -242,6 +242,11 @@ export async function validateLink(filePath, linkTarget) { if (!rawPath) return null; const resolvedPath = path.resolve(path.dirname(filePath), rawPath); + const relativeToRoot = path.relative(ROOT, resolvedPath); + if (relativeToRoot.startsWith("..") || path.isAbsolute(relativeToRoot)) { + return `Local target escapes repository root: ${rawPath}`; + } + if (await exists(resolvedPath)) return null; return `Missing local target: ${rawPath}`; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index fd1665a5..4738dbc5 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -17,7 +17,10 @@ afterEach(async () => { }); async function createDocsFixture(markdown = "# Guide\n") { - const root = await mkdtemp(path.join(tmpdir(), "docs-check-")); + const repoTempDir = path.join(process.cwd(), "tmp"); + await mkdir(repoTempDir, { recursive: true }); + + const root = await mkdtemp(path.join(repoTempDir, "docs-check-")); tempRoots.push(root); const docsDir = path.join(root, "docs"); @@ -68,6 +71,9 @@ describe("docs-check script", () => { await expect(validateLink(docsFile, "./targets/exists.md")).resolves.toBeNull(); await expect(validateLink(docsFile, "./targets/missing.md")).resolves.toBe("Missing local target: ./targets/missing.md"); + await expect(validateLink(docsFile, "../../../../outside.md")).resolves.toBe( + "Local target escapes repository root: ../../../../outside.md", + ); }); it("normalizes direct-run paths consistently for the current platform", async () => { From 44e2b19b8ceb809527bd2f47f1f3c797cf65958f Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 05:17:24 +0800 Subject: [PATCH 10/38] fix: add workflow timeout guards --- .github/workflows/pr-advisory.yml | 1 + .github/workflows/pr-governance.yml | 1 + test/docs-check.test.ts | 3 ++- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 3d297880..95ec4cca 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -176,6 +176,7 @@ jobs: needs: detect-dependency-change if: needs.detect-dependency-change.outputs.dependency_changed == 'true' runs-on: ubuntu-latest + timeout-minutes: 15 steps: - name: Check out repository uses: actions/checkout@v6 diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 77c1a4e8..ebcee0f6 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -27,6 +27,7 @@ jobs: # here - doing so would let a fork PR execute arbitrary code with write access. name: pr-governance runs-on: ubuntu-latest + timeout-minutes: 5 steps: - name: Validate PR template and live verification markers uses: actions/github-script@v8 diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 4738dbc5..50f5f89b 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -1,6 +1,5 @@ import { mkdir, mkdtemp, readFile, rm, writeFile } from "node:fs/promises"; import path from "node:path"; -import { tmpdir } from "node:os"; import { afterEach, describe, expect, it } from "vitest"; const tempRoots: string[] = []; @@ -17,6 +16,8 @@ afterEach(async () => { }); async function createDocsFixture(markdown = "# Guide\n") { + // docs-check resolves local links against process.cwd(), so fixtures must live + // under the repo root for relative-link validation to exercise real behavior. const repoTempDir = path.join(process.cwd(), "tmp"); await mkdir(repoTempDir, { recursive: true }); From 08226fbca63cd3e0ef217acc6f0118a0011054de Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 05:26:09 +0800 Subject: [PATCH 11/38] fix: scope workflow badge checks --- .github/workflows/ci.yml | 1 + .github/workflows/pr-advisory.yml | 1 + scripts/ci/docs-check.js | 11 +++++++++-- test/docs-check.test.ts | 6 ++++++ 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8f3e4624..ab8479ab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,6 +22,7 @@ jobs: changes: name: Detect changes runs-on: ubuntu-latest + timeout-minutes: 10 outputs: code_changed: ${{ steps.detect.outputs.code_changed }} docs_changed: ${{ steps.detect.outputs.docs_changed }} diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 95ec4cca..c0332789 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -24,6 +24,7 @@ jobs: detect-dependency-change: name: detect-dependency-change runs-on: ubuntu-latest + timeout-minutes: 10 outputs: dependency_changed: ${{ steps.detect.outputs.dependency_changed }} code_changed: ${{ steps.detect.outputs.code_changed }} diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 527a5909..077fdfc0 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -10,6 +10,7 @@ const DEFAULT_DIRS = [".github", "config", "docs", "test"]; const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); const __filename = fileURLToPath(import.meta.url); +const REPOSITORY = process.env.GITHUB_REPOSITORY ?? "ndycode/oc-chatgpt-multi-auth"; export function normalizePathForCompare(targetPath) { const resolved = path.resolve(targetPath); @@ -216,8 +217,14 @@ function getWorkflowPathFromUrl(target) { try { const url = new URL(target); if (!["github.com", "www.github.com"].includes(url.hostname)) return null; - const match = url.pathname.match(/\/actions\/workflows\/([^/]+)(?:\/badge\.svg)?$/); - return match?.[1] ?? null; + const match = url.pathname.match(/^\/([^/]+)\/([^/]+)\/actions\/workflows\/([^/]+)(?:\/badge\.svg)?$/); + if (!match) return null; + + const [, ownerFromUrl, repoFromUrl, workflowFile] = match; + const [owner, repo] = REPOSITORY.split("/"); + if (ownerFromUrl !== owner || repoFromUrl !== repo) return null; + + return workflowFile; } catch { return null; } diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 50f5f89b..acab98b5 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -64,6 +64,12 @@ describe("docs-check script", () => { "https://github.com/ndycode/oc-chatgpt-multi-auth/actions/workflows/does-not-exist.yml/badge.svg", ), ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: does-not-exist.yml"); + await expect( + validateLink( + docsFile, + "https://github.com/octocat/hello-world/actions/workflows/ci.yml/badge.svg", + ), + ).resolves.toBeNull(); }); it("resolves relative local targets from the markdown file directory", async () => { From f1a8dfc24e4ebc10c4d11f6ec19b5438066582a8 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 05:44:24 +0800 Subject: [PATCH 12/38] fix: address docs check review follow-ups --- .github/workflows/pr-governance.yml | 2 +- scripts/ci/docs-check.js | 39 +++++++++++++++------------- test/docs-check.test.ts | 40 +++++++++++++++++++++-------- 3 files changed, 52 insertions(+), 29 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index ebcee0f6..0da11b1d 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -160,7 +160,7 @@ jobs: failures.push("This PR touches auth/request/storage paths and needs either the manual live-test checkbox or the maintainer-live-verified label."); } } else if (!noLiveRequiredChecked && !manualLiveChecked && !maintainerLiveVerified) { - failures.push("Choose the matching live-validation state or apply the maintainer-live-verified label."); + failures.push('Check either "No auth, request-routing, or storage paths changed." or "I manually tested with a real ChatGPT Plus/Pro account.", or have a maintainer apply the maintainer-live-verified label.'); } const summaryLines = [ diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 077fdfc0..e67a3f4c 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -4,7 +4,6 @@ import { access, readdir, readFile, stat } from "node:fs/promises"; import path from "node:path"; import { fileURLToPath } from "node:url"; -const ROOT = process.cwd(); const DEFAULT_FILES = ["README.md", "CONTRIBUTING.md", "SECURITY.md", "CHANGELOG.md"]; const DEFAULT_DIRS = [".github", "config", "docs", "test"]; const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); @@ -12,6 +11,10 @@ const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node const __filename = fileURLToPath(import.meta.url); const REPOSITORY = process.env.GITHUB_REPOSITORY ?? "ndycode/oc-chatgpt-multi-auth"; +function getRootDir() { + return process.cwd(); +} + export function normalizePathForCompare(targetPath) { const resolved = path.resolve(targetPath); return process.platform === "win32" ? resolved.toLowerCase() : resolved; @@ -64,17 +67,17 @@ async function getPathType(targetPath) { } } -async function walkMarkdownFiles(dirPath) { +async function walkMarkdownFiles(dirPath, rootDir = getRootDir()) { const entries = await readdir(dirPath, { withFileTypes: true }); const files = []; for (const entry of entries) { const absolutePath = path.join(dirPath, entry.name); - const relativePath = path.relative(ROOT, absolutePath).replace(/\\/g, "/"); + const relativePath = path.relative(rootDir, absolutePath).replace(/\\/g, "/"); if (entry.isDirectory()) { if (IGNORED_DIRS.has(relativePath) || IGNORED_DIRS.has(entry.name)) continue; - files.push(...(await walkMarkdownFiles(absolutePath))); + files.push(...(await walkMarkdownFiles(absolutePath, rootDir))); continue; } @@ -86,12 +89,12 @@ async function walkMarkdownFiles(dirPath) { return files; } -async function collectMarkdownFiles(inputPaths) { +async function collectMarkdownFiles(inputPaths, rootDir = getRootDir()) { const resolved = new Set(); if (inputPaths.length > 0) { for (const inputPath of inputPaths) { - const absolutePath = path.resolve(ROOT, inputPath); + const absolutePath = path.resolve(rootDir, inputPath); if (!(await exists(absolutePath))) continue; const pathType = await getPathType(absolutePath); @@ -103,7 +106,7 @@ async function collectMarkdownFiles(inputPaths) { if (pathType !== "directory") continue; - const nestedFiles = await walkMarkdownFiles(absolutePath); + const nestedFiles = await walkMarkdownFiles(absolutePath, rootDir); for (const nestedFile of nestedFiles) resolved.add(nestedFile); } @@ -111,14 +114,14 @@ async function collectMarkdownFiles(inputPaths) { } for (const file of DEFAULT_FILES) { - const absolutePath = path.join(ROOT, file); + const absolutePath = path.join(rootDir, file); if (await exists(absolutePath)) resolved.add(absolutePath); } for (const dir of DEFAULT_DIRS) { - const absolutePath = path.join(ROOT, dir); + const absolutePath = path.join(rootDir, dir); if (!(await exists(absolutePath))) continue; - const nestedFiles = await walkMarkdownFiles(absolutePath); + const nestedFiles = await walkMarkdownFiles(absolutePath, rootDir); for (const nestedFile of nestedFiles) resolved.add(nestedFile); } @@ -222,7 +225,7 @@ function getWorkflowPathFromUrl(target) { const [, ownerFromUrl, repoFromUrl, workflowFile] = match; const [owner, repo] = REPOSITORY.split("/"); - if (ownerFromUrl !== owner || repoFromUrl !== repo) return null; + if (ownerFromUrl.toLowerCase() !== owner?.toLowerCase() || repoFromUrl.toLowerCase() !== repo?.toLowerCase()) return null; return workflowFile; } catch { @@ -230,13 +233,13 @@ function getWorkflowPathFromUrl(target) { } } -export async function validateLink(filePath, linkTarget) { +export async function validateLink(filePath, linkTarget, rootDir = getRootDir()) { if (!linkTarget || linkTarget.startsWith("#")) return null; if (/^(mailto:|tel:|data:)/i.test(linkTarget)) return null; const workflowFile = getWorkflowPathFromUrl(linkTarget); if (workflowFile) { - const workflowPath = path.join(ROOT, ".github", "workflows", workflowFile); + const workflowPath = path.join(rootDir, ".github", "workflows", workflowFile); if (await exists(workflowPath)) return null; return `Missing workflow referenced by GitHub Actions badge/link: ${workflowFile}`; } @@ -249,7 +252,7 @@ export async function validateLink(filePath, linkTarget) { if (!rawPath) return null; const resolvedPath = path.resolve(path.dirname(filePath), rawPath); - const relativeToRoot = path.relative(ROOT, resolvedPath); + const relativeToRoot = path.relative(rootDir, resolvedPath); if (relativeToRoot.startsWith("..") || path.isAbsolute(relativeToRoot)) { return `Local target escapes repository root: ${rawPath}`; } @@ -259,8 +262,8 @@ export async function validateLink(filePath, linkTarget) { return `Missing local target: ${rawPath}`; } -async function main() { - const files = await collectMarkdownFiles(process.argv.slice(2)); +async function main(rootDir = getRootDir()) { + const files = await collectMarkdownFiles(process.argv.slice(2), rootDir); if (files.length === 0) { console.log("docs-check: no markdown files found"); return; @@ -273,9 +276,9 @@ async function main() { const links = extractMarkdownLinks(contents); for (const link of links) { - const error = await validateLink(filePath, link); + const error = await validateLink(filePath, link, rootDir); if (!error) continue; - failures.push(`${path.relative(ROOT, filePath).replace(/\\/g, "/")}: ${error} (${link})`); + failures.push(`${path.relative(rootDir, filePath).replace(/\\/g, "/")}: ${error} (${link})`); } } diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index acab98b5..91b1a90d 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -1,18 +1,31 @@ import { mkdir, mkdtemp, readFile, rm, writeFile } from "node:fs/promises"; import path from "node:path"; +import { setTimeout as delay } from "node:timers/promises"; import { afterEach, describe, expect, it } from "vitest"; const tempRoots: string[] = []; +const TEMP_CLEANUP_ATTEMPTS = 3; +const TEMP_CLEANUP_DELAY_MS = 100; + +async function cleanupTempRoot(root: string) { + for (let attempt = 1; attempt <= TEMP_CLEANUP_ATTEMPTS; attempt += 1) { + try { + await rm(root, { recursive: true, force: true }); + return; + } catch (error) { + if (attempt === TEMP_CLEANUP_ATTEMPTS) { + const message = error instanceof Error ? error.message : String(error); + console.warn(`[docs-check test] failed to clean up ${root} after ${TEMP_CLEANUP_ATTEMPTS} attempts: ${message}`); + return; + } + + await delay(TEMP_CLEANUP_DELAY_MS); + } + } +} afterEach(async () => { - await Promise.all( - tempRoots.splice(0).map((root) => - rm(root, { recursive: true, force: true }).catch((error) => { - const message = error instanceof Error ? error.message : String(error); - console.warn(`[docs-check test] failed to clean up ${root}: ${message}`); - }), - ), - ); + await Promise.all(tempRoots.splice(0).map((root) => cleanupTempRoot(root))); }); async function createDocsFixture(markdown = "# Guide\n") { @@ -34,7 +47,7 @@ async function createDocsFixture(markdown = "# Guide\n") { const existingTarget = path.join(targetsDir, "exists.md"); await writeFile(existingTarget, "# Target\n", "utf8"); - return { docsFile }; + return { docsFile, root }; } describe("docs-check script", () => { @@ -64,6 +77,12 @@ describe("docs-check script", () => { "https://github.com/ndycode/oc-chatgpt-multi-auth/actions/workflows/does-not-exist.yml/badge.svg", ), ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: does-not-exist.yml"); + await expect( + validateLink( + docsFile, + "https://github.com/NdyCode/OC-ChatGPT-Multi-Auth/actions/workflows/does-not-exist.yml/badge.svg", + ), + ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: does-not-exist.yml"); await expect( validateLink( docsFile, @@ -74,9 +93,10 @@ describe("docs-check script", () => { it("resolves relative local targets from the markdown file directory", async () => { const { validateLink } = await import("../scripts/ci/docs-check.js"); - const { docsFile } = await createDocsFixture(); + const { docsFile, root } = await createDocsFixture(); await expect(validateLink(docsFile, "./targets/exists.md")).resolves.toBeNull(); + await expect(validateLink(docsFile, "./targets/exists.md", root)).resolves.toBeNull(); await expect(validateLink(docsFile, "./targets/missing.md")).resolves.toBe("Missing local target: ./targets/missing.md"); await expect(validateLink(docsFile, "../../../../outside.md")).resolves.toBe( "Local target escapes repository root: ../../../../outside.md", From 4f2df69e97694ec51e4131402958264329142b87 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 11:45:15 +0800 Subject: [PATCH 13/38] fix: ignore html comment links in docs check --- scripts/ci/docs-check.js | 1 + test/docs-check.test.ts | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index e67a3f4c..4a8c2e85 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -184,6 +184,7 @@ function extractLinkTarget(markdown, startIndex) { export function extractMarkdownLinks(markdown) { const stripped = markdown + .replace(//g, "") .replace(/```[\s\S]*?```/g, "\n") .replace(/`[^`\n]+`/g, "`code`"); const openerPattern = /!?\[[^\]]*]\(/g; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 91b1a90d..2434fc32 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -123,6 +123,14 @@ describe("docs-check script", () => { await expect(validateLink(docsFile, referenceTarget)).resolves.toBe("Missing local target: ./targets/missing.md"); }); + it("ignores links that only appear inside HTML comments", async () => { + const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); + + const markdown = "\n[Config Guide](./targets/exists.md)\n"; + + expect(extractMarkdownLinks(markdown)).toEqual(["./targets/exists.md"]); + }); + it("accepts angle-bracket targets that include an optional title", async () => { const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); const { docsFile } = await createDocsFixture('[Config Guide](<./targets/exists.md> "Config target")\n'); From ec4e8520855af484d6e3d575041ff411de1ea3f6 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 13:48:28 +0800 Subject: [PATCH 14/38] Add CI timeout and clarify advisory/test intent --- .github/workflows/ci.yml | 1 + .github/workflows/pr-advisory.yml | 2 +- test/docs-check.test.ts | 2 ++ 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ab8479ab..0143aaee 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -253,6 +253,7 @@ jobs: - actionlint if: always() runs-on: ubuntu-latest + timeout-minutes: 5 steps: - name: Evaluate required checks shell: bash diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index c0332789..919bb65b 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -152,7 +152,7 @@ jobs: - os: ubuntu-latest node: 22 - os: macos-latest - node: 20 + node: 20 # Intentionally pins the current default advisory lane; update when .nvmrc changes. steps: - name: Check out repository uses: actions/checkout@v6 diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 2434fc32..0380089b 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -31,6 +31,8 @@ afterEach(async () => { async function createDocsFixture(markdown = "# Guide\n") { // docs-check resolves local links against process.cwd(), so fixtures must live // under the repo root for relative-link validation to exercise real behavior. + // .gitignore excludes tmp/ and tmp* so a leftover retry-cleanup fixture does + // not pollute git status if Windows holds a transient lock on removal. const repoTempDir = path.join(process.cwd(), "tmp"); await mkdir(repoTempDir, { recursive: true }); From f3338dab1775606fcc5712369663352244e50d5c Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 13:57:54 +0800 Subject: [PATCH 15/38] Run docs-sanity when CI scripts change --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0143aaee..8dee5c0a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,6 +73,10 @@ jobs: fi case "${file}" in + scripts/ci/*) + docs_changed=true + code_changed=true + ;; .github/workflows/*) workflow_changed=true code_changed=true From b949784b10aa9cf4e32366b7ef91ca36774b7b9d Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 15:09:56 +0800 Subject: [PATCH 16/38] Harden docs-check link validation contract --- .github/workflows/pr-advisory.yml | 2 +- scripts/ci/docs-check.js | 4 ++++ test/docs-check.test.ts | 8 ++++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 919bb65b..8c90600d 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -56,7 +56,7 @@ jobs: while IFS= read -r file; do files+=("${file}") done < <(git diff --name-only "origin/${BASE_REF}...HEAD") - elif [[ -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then + elif [[ "${EVENT_NAME}" == "push" && -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then while IFS= read -r file; do files+=("${file}") done < <(git diff --name-only "${BEFORE_SHA}...HEAD") diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 4a8c2e85..fffb6339 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -235,6 +235,10 @@ function getWorkflowPathFromUrl(target) { } export async function validateLink(filePath, linkTarget, rootDir = getRootDir()) { + if (!path.isAbsolute(filePath)) { + throw new TypeError(`validateLink: filePath must be absolute, got "${filePath}"`); + } + if (!linkTarget || linkTarget.startsWith("#")) return null; if (/^(mailto:|tel:|data:)/i.test(linkTarget)) return null; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 0380089b..64069bea 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -69,6 +69,14 @@ describe("docs-check script", () => { await expect(validateLink(docsFile, "https://example.com/docs")).resolves.toBeNull(); }); + it("requires an absolute markdown file path", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + + await expect(validateLink("docs/guide.md", "./targets/exists.md", process.cwd())).rejects.toThrow( + 'validateLink: filePath must be absolute, got "docs/guide.md"', + ); + }); + it("reports missing workflow badge targets", async () => { const { validateLink } = await import("../scripts/ci/docs-check.js"); const { docsFile } = await createDocsFixture(); From df1bc6838d596d1c37fda01311fbc107b4b020bc Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 15:18:03 +0800 Subject: [PATCH 17/38] Add Windows advisory compat lane --- .github/workflows/pr-advisory.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 8c90600d..f15d557b 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -153,6 +153,8 @@ jobs: node: 22 - os: macos-latest node: 20 # Intentionally pins the current default advisory lane; update when .nvmrc changes. + - os: windows-latest + node: 18 # Verify Windows + LTS compatibility; antivirus locking is a known risk. steps: - name: Check out repository uses: actions/checkout@v6 From 38fe3c3b43ccc5cacab0427859113b1543f6ef78 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 15:29:11 +0800 Subject: [PATCH 18/38] Harden PR governance verification gating --- .github/workflows/pr-governance.yml | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 0da11b1d..01cb4a7d 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -77,13 +77,16 @@ jobs: const missingSections = requiredHeadings.filter((pattern) => !pattern.test(body)); const complianceChecked = /- \[x\] This change stays within the repository scope and OpenAI Terms of Service expectations\./i.test(body); + const docsImpactChecked = + /- \[x\] README or docs updated/i.test(body) || + /- \[x\] No docs changes needed/i.test(body); const officialAuthChecked = /- \[x\] This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior\./i.test(body); const testsDocsChecked = /- \[x\] I updated tests and documentation when the change affected users, maintainers, or repository behavior\./i.test(body); const noLiveRequiredChecked = /- \[x\] No auth, request-routing, or storage paths changed\./i.test(body); const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(body); const liveVerificationLabelName = "needs-live-verification"; const maintainerVerifiedLabelName = "maintainer-live-verified"; - const maintainerLiveVerified = currentLabels.has(maintainerVerifiedLabelName); + let maintainerLiveVerified = currentLabels.has(maintainerVerifiedLabelName); async function ensureLabel(name, color, description) { try { @@ -124,6 +127,20 @@ jobs: issue_number: issueNumber, labels: [liveVerificationLabelName], }); + + if (context.payload.action === "synchronize" && maintainerLiveVerified) { + try { + await github.rest.issues.removeLabel({ + owner, + repo, + issue_number: issueNumber, + name: maintainerVerifiedLabelName, + }); + } catch (error) { + if (error.status !== 404) throw error; + } + maintainerLiveVerified = false; + } } else { try { await github.rest.issues.removeLabel({ @@ -147,6 +164,10 @@ jobs: failures.push("The compliance checkbox must be checked."); } + if (!docsImpactChecked) { + failures.push("Check one of the Docs Impact options."); + } + if (!officialAuthChecked) { failures.push("The official-authentication checkbox must be checked."); } From e4e16fd90e8a158227e7cb73920fac89aa48099b Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 15:43:09 +0800 Subject: [PATCH 19/38] Cover docs-check discovery paths and clear stale live labels --- .github/workflows/pr-governance.yml | 14 ++++ scripts/ci/docs-check.js | 2 +- test/docs-check.test.ts | 107 +++++++++++++++++++++++++--- 3 files changed, 114 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 01cb4a7d..a10c4222 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -152,6 +152,20 @@ jobs: } catch (error) { if (error.status !== 404) throw error; } + + if (maintainerLiveVerified) { + try { + await github.rest.issues.removeLabel({ + owner, + repo, + issue_number: issueNumber, + name: maintainerVerifiedLabelName, + }); + } catch (error) { + if (error.status !== 404) throw error; + } + maintainerLiveVerified = false; + } } const failures = []; diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index fffb6339..8fe23f4b 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -89,7 +89,7 @@ async function walkMarkdownFiles(dirPath, rootDir = getRootDir()) { return files; } -async function collectMarkdownFiles(inputPaths, rootDir = getRootDir()) { +export async function collectMarkdownFiles(inputPaths, rootDir = getRootDir()) { const resolved = new Set(); if (inputPaths.length > 0) { diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 64069bea..f290b67b 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -1,11 +1,14 @@ +import { execFile } from "node:child_process"; import { mkdir, mkdtemp, readFile, rm, writeFile } from "node:fs/promises"; import path from "node:path"; import { setTimeout as delay } from "node:timers/promises"; +import { promisify } from "node:util"; import { afterEach, describe, expect, it } from "vitest"; const tempRoots: string[] = []; const TEMP_CLEANUP_ATTEMPTS = 3; const TEMP_CLEANUP_DELAY_MS = 100; +const execFileAsync = promisify(execFile); async function cleanupTempRoot(root: string) { for (let attempt = 1; attempt <= TEMP_CLEANUP_ATTEMPTS; attempt += 1) { @@ -28,7 +31,7 @@ afterEach(async () => { await Promise.all(tempRoots.splice(0).map((root) => cleanupTempRoot(root))); }); -async function createDocsFixture(markdown = "# Guide\n") { +async function createRepoFixture(files: Record) { // docs-check resolves local links against process.cwd(), so fixtures must live // under the repo root for relative-link validation to exercise real behavior. // .gitignore excludes tmp/ and tmp* so a leftover retry-cleanup fixture does @@ -39,15 +42,22 @@ async function createDocsFixture(markdown = "# Guide\n") { const root = await mkdtemp(path.join(repoTempDir, "docs-check-")); tempRoots.push(root); - const docsDir = path.join(root, "docs"); - const targetsDir = path.join(docsDir, "targets"); - await mkdir(targetsDir, { recursive: true }); + for (const [relativePath, contents] of Object.entries(files)) { + const absolutePath = path.join(root, relativePath); + await mkdir(path.dirname(absolutePath), { recursive: true }); + await writeFile(absolutePath, contents, "utf8"); + } - const docsFile = path.join(docsDir, "guide.md"); - await writeFile(docsFile, markdown, "utf8"); + return { root }; +} + +async function createDocsFixture(markdown = "# Guide\n") { + const { root } = await createRepoFixture({ + "docs/guide.md": markdown, + "docs/targets/exists.md": "# Target\n", + }); - const existingTarget = path.join(targetsDir, "exists.md"); - await writeFile(existingTarget, "# Target\n", "utf8"); + const docsFile = path.join(root, "docs", "guide.md"); return { docsFile, root }; } @@ -150,4 +160,85 @@ describe("docs-check script", () => { expect(linkTarget).toBe("./targets/exists.md"); await expect(validateLink(docsFile, linkTarget)).resolves.toBeNull(); }); + + it("discovers default markdown files and skips ignored directories", async () => { + const { collectMarkdownFiles } = await import("../scripts/ci/docs-check.js"); + const { root } = await createRepoFixture({ + "README.md": "# Root\n", + "CONTRIBUTING.md": "# Contributing\n", + "SECURITY.md": "# Security\n", + "CHANGELOG.md": "# Changelog\n", + ".github/pull_request_template.md": "# PR Template\n", + ".github/workflows/ignored.md": "# Ignored workflow doc\n", + "config/README.md": "# Config\n", + "docs/guide.md": "# Guide\n", + "docs/sub/nested.markdown": "# Nested\n", + "test/AGENTS.md": "# Test instructions\n", + "notes/outside.md": "# Outside default dirs\n", + "tmp/ignored.md": "# Ignored temp\n", + "dist/ignored.md": "# Ignored dist\n", + "node_modules/pkg/ignored.md": "# Ignored dependency\n", + }); + + const discoveredFiles = await collectMarkdownFiles([], root); + const relativeDiscoveredFiles = discoveredFiles.map((filePath: string) => + path.relative(root, filePath).replace(/\\/g, "/"), + ); + + expect(relativeDiscoveredFiles).toEqual([ + ".github/pull_request_template.md", + "CHANGELOG.md", + "CONTRIBUTING.md", + "README.md", + "SECURITY.md", + "config/README.md", + "docs/guide.md", + "docs/sub/nested.markdown", + "test/AGENTS.md", + ]); + }); + + it("collects only explicitly requested markdown files or directories", async () => { + const { collectMarkdownFiles } = await import("../scripts/ci/docs-check.js"); + const { root } = await createRepoFixture({ + "README.md": "# Root\n", + "docs/guide.md": "# Guide\n", + "docs/sub/nested.markdown": "# Nested\n", + "notes/extra.md": "# Extra\n", + }); + + const explicitFile = await collectMarkdownFiles(["README.md"], root); + const explicitDirectory = await collectMarkdownFiles(["docs"], root); + + expect(explicitFile.map((filePath: string) => path.relative(root, filePath).replace(/\\/g, "/"))).toEqual(["README.md"]); + expect(explicitDirectory.map((filePath: string) => path.relative(root, filePath).replace(/\\/g, "/"))).toEqual([ + "docs/guide.md", + "docs/sub/nested.markdown", + ]); + }); + + it("silently skips missing explicit paths", async () => { + const { collectMarkdownFiles } = await import("../scripts/ci/docs-check.js"); + const { root } = await createRepoFixture({ + "docs/guide.md": "# Guide\n", + }); + + await expect(collectMarkdownFiles(["missing.md", "missing-dir"], root)).resolves.toEqual([]); + }); + + it("runs the direct docs-check pipeline for an explicit fixture path", async () => { + const { root } = await createRepoFixture({ + "docs/guide.md": "[Target](./targets/exists.md)\n", + "docs/targets/exists.md": "# Target\n", + }); + const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); + const relativeFixtureRoot = path.relative(process.cwd(), root).replace(/\\/g, "/"); + + const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + cwd: process.cwd(), + }); + + expect(stdout).toContain("docs-check: verified 2 markdown file(s)"); + expect(stderr).toBe(""); + }); }); From effb3a0eb104df78662365c868ac1d7dcc1bf6b1 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 15:47:28 +0800 Subject: [PATCH 20/38] Pin pr governance action and decode docs links --- .github/workflows/pr-governance.yml | 2 +- scripts/ci/docs-check.js | 9 ++++++++- test/docs-check.test.ts | 13 +++++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index a10c4222..8773fc17 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -30,7 +30,7 @@ jobs: timeout-minutes: 5 steps: - name: Validate PR template and live verification markers - uses: actions/github-script@v8 + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 with: script: | const owner = context.repo.owner; diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 8fe23f4b..b80c1133 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -256,7 +256,14 @@ export async function validateLink(filePath, linkTarget, rootDir = getRootDir()) const [rawPath] = linkTarget.split(/[?#]/, 1); if (!rawPath) return null; - const resolvedPath = path.resolve(path.dirname(filePath), rawPath); + let decodedPath = rawPath; + try { + decodedPath = decodeURIComponent(rawPath); + } catch { + decodedPath = rawPath; + } + + const resolvedPath = path.resolve(path.dirname(filePath), decodedPath); const relativeToRoot = path.relative(rootDir, resolvedPath); if (relativeToRoot.startsWith("..") || path.isAbsolute(relativeToRoot)) { return `Local target escapes repository root: ${rawPath}`; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index f290b67b..64814d63 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -123,6 +123,19 @@ describe("docs-check script", () => { ); }); + it("decodes URL-escaped local paths before checking the filesystem", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { root } = await createRepoFixture({ + "docs/guide.md": "[Space](./My%20Guide.md)\n[Literal](./bad%2Gname.md)\n", + "docs/My Guide.md": "# Decoded path target\n", + "docs/bad%2Gname.md": "# Literal percent target\n", + }); + const docsFile = path.join(root, "docs", "guide.md"); + + await expect(validateLink(docsFile, "./My%20Guide.md", root)).resolves.toBeNull(); + await expect(validateLink(docsFile, "./bad%2Gname.md", root)).resolves.toBeNull(); + }); + it("normalizes direct-run paths consistently for the current platform", async () => { const { normalizePathForCompare } = await import("../scripts/ci/docs-check.js"); From 98d09be7956c410476d95eec24c50d34b66e9fdc Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 15:49:22 +0800 Subject: [PATCH 21/38] Tighten stale live label cleanup --- .github/workflows/pr-governance.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 8773fc17..d9452f43 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -153,7 +153,7 @@ jobs: if (error.status !== 404) throw error; } - if (maintainerLiveVerified) { + if (context.payload.action === "synchronize" && maintainerLiveVerified) { try { await github.rest.issues.removeLabel({ owner, From 3b726f457b25c9f076065f4554931e45d87e778d Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 16:03:49 +0800 Subject: [PATCH 22/38] Pin workflow action tags and cover empty docs run --- .github/workflows/ci.yml | 28 ++++++++++++++-------------- .github/workflows/pr-advisory.yml | 14 +++++++------- test/docs-check.test.ts | 13 +++++++++++++ 3 files changed, 34 insertions(+), 21 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8dee5c0a..e77db966 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: workflow_changed: ${{ steps.detect.outputs.workflow_changed }} steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 @@ -108,10 +108,10 @@ jobs: timeout-minutes: 15 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm @@ -130,10 +130,10 @@ jobs: timeout-minutes: 15 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm @@ -152,10 +152,10 @@ jobs: timeout-minutes: 20 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm @@ -174,10 +174,10 @@ jobs: timeout-minutes: 25 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm @@ -196,10 +196,10 @@ jobs: timeout-minutes: 30 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm @@ -218,10 +218,10 @@ jobs: timeout-minutes: 10 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc @@ -237,7 +237,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Lint GitHub Actions workflows uses: docker://rhysd/actionlint:1.7.11 diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index f15d557b..5b274116 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -30,7 +30,7 @@ jobs: code_changed: ${{ steps.detect.outputs.code_changed }} steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 @@ -95,10 +95,10 @@ jobs: timeout-minutes: 30 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm @@ -157,10 +157,10 @@ jobs: node: 18 # Verify Windows + LTS compatibility; antivirus locking is a known risk. steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version: ${{ matrix.node }} cache: npm @@ -182,10 +182,10 @@ jobs: timeout-minutes: 15 steps: - name: Check out repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 with: node-version-file: .nvmrc cache: npm diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 64814d63..21bdd811 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -254,4 +254,17 @@ describe("docs-check script", () => { expect(stdout).toContain("docs-check: verified 2 markdown file(s)"); expect(stderr).toBe(""); }); + + it("exits cleanly when no markdown files are found", async () => { + const { root } = await createRepoFixture({}); + const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); + const relativeFixtureRoot = path.relative(process.cwd(), root).replace(/\\/g, "/"); + + const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + cwd: process.cwd(), + }); + + expect(stdout).toContain("docs-check: no markdown files found"); + expect(stderr).toBe(""); + }); }); From 70247a98a1804aef6258cc185c1ce329a24d1026 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 16:10:29 +0800 Subject: [PATCH 23/38] Pin advisory artifact action and cover docs failures --- .github/workflows/pr-advisory.yml | 2 +- test/docs-check.test.ts | 26 ++++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 5b274116..39baf0fc 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -113,7 +113,7 @@ jobs: - name: Upload coverage artifacts if: always() - uses: actions/upload-artifact@v7 + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 with: name: coverage-report-${{ github.run_id }} path: coverage diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 21bdd811..80a99833 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -267,4 +267,30 @@ describe("docs-check script", () => { expect(stdout).toContain("docs-check: no markdown files found"); expect(stderr).toBe(""); }); + + it("exits with an error when the direct docs-check pipeline finds broken links", async () => { + const { root } = await createRepoFixture({ + "docs/guide.md": "[Missing](./targets/missing.md)\n", + }); + const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); + const relativeFixtureRoot = path.relative(process.cwd(), root).replace(/\\/g, "/"); + let failure: (Error & { code?: number; stderr?: string; stdout?: string }) | null = null; + + try { + await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + cwd: process.cwd(), + }); + } catch (error) { + if (error instanceof Error) { + failure = error as Error & { code?: number; stderr?: string; stdout?: string }; + } else { + throw error; + } + } + + expect(failure).not.toBeNull(); + expect(failure?.code).toBe(1); + expect(failure?.stderr).toContain("docs-check found broken documentation links:"); + expect(failure?.stderr).toContain("docs/guide.md: Missing local target: ./targets/missing.md (./targets/missing.md)"); + }); }); From 45f2bb20d6cf9e2587725d7ad55441097c327d13 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 17:09:03 +0800 Subject: [PATCH 24/38] Harden docs-check portability and CI routing --- .github/workflows/ci.yml | 28 ++++++++++++---- scripts/ci/docs-check.js | 71 +++++++++++++++++++++++++++++++++++++--- test/docs-check.test.ts | 14 ++++++++ 3 files changed, 102 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e77db966..64eba6ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,15 +48,29 @@ jobs: workflow_changed=false files=() + collect_changed_files() { + local range="$1" + + while IFS=$'\t' read -r status first_path second_path; do + [[ -z "${status}" ]] && continue + + case "${status}" in + R*|C*) + [[ -n "${first_path}" ]] && files+=("${first_path}") + [[ -n "${second_path}" ]] && files+=("${second_path}") + ;; + *) + [[ -n "${first_path}" ]] && files+=("${first_path}") + ;; + esac + done < <(git diff --find-renames --name-status "${range}") + } + if [[ "${EVENT_NAME}" == "pull_request" ]]; then git fetch --no-tags --depth=1 origin "${BASE_REF}" - while IFS= read -r file; do - files+=("${file}") - done < <(git diff --name-only "origin/${BASE_REF}...HEAD") + collect_changed_files "origin/${BASE_REF}...HEAD" elif [[ "${EVENT_NAME}" == "push" && -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then - while IFS= read -r file; do - files+=("${file}") - done < <(git diff --name-only "${BEFORE_SHA}...HEAD") + collect_changed_files "${BEFORE_SHA}...HEAD" else while IFS= read -r file; do files+=("${file}") @@ -240,7 +254,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Lint GitHub Actions workflows - uses: docker://rhysd/actionlint:1.7.11 + uses: docker://rhysd/actionlint@sha256:5457037ba91acd225478edac3d4b32e45cf6c10291e0dabbfd2491c63129afe1 # rhysd/actionlint:1.7.11 linux/amd64 with: args: -color diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index b80c1133..56594997 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -1,5 +1,7 @@ #!/usr/bin/env node +import { execFileSync } from "node:child_process"; +import { readFileSync } from "node:fs"; import { access, readdir, readFile, stat } from "node:fs/promises"; import path from "node:path"; import { fileURLToPath } from "node:url"; @@ -8,8 +10,8 @@ const DEFAULT_FILES = ["README.md", "CONTRIBUTING.md", "SECURITY.md", "CHANGELOG const DEFAULT_DIRS = [".github", "config", "docs", "test"]; const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); +const MARKDOWN_PATH_ESCAPE_PATTERN = /\\([\x20-\x2F\x3A-\x40\x5B-\x60\x7B-\x7E])/g; const __filename = fileURLToPath(import.meta.url); -const REPOSITORY = process.env.GITHUB_REPOSITORY ?? "ndycode/oc-chatgpt-multi-auth"; function getRootDir() { return process.cwd(); @@ -24,6 +26,62 @@ function normalizeReferenceLabel(label) { return label.trim().replace(/\s+/g, " ").toLowerCase(); } +function unescapeMarkdownPathTarget(target) { + return target.replace(MARKDOWN_PATH_ESCAPE_PATTERN, "$1"); +} + +function extractRepositorySlug(repositoryValue) { + if (!repositoryValue) return null; + + const normalizedValue = repositoryValue + .trim() + .replace(/^git\+/, "") + .replace(/^git@github\.com:/i, "https://github.com/") + .replace(/^ssh:\/\/git@github\.com\//i, "https://github.com/") + .replace(/\.git$/i, ""); + + try { + const url = new URL(normalizedValue); + if (!["github.com", "www.github.com"].includes(url.hostname)) return null; + + const match = url.pathname.match(/^\/([^/]+)\/([^/]+?)\/?$/); + if (!match) return null; + return `${match[1]}/${match[2]}`; + } catch { + const match = normalizedValue.match(/github\.com[:/]([^/]+)\/([^/]+?)(?:\/)?$/i); + if (!match) return null; + return `${match[1]}/${match[2]}`; + } +} + +function getRepositorySlug(rootDir = getRootDir()) { + const githubRepository = process.env.GITHUB_REPOSITORY?.trim(); + if (githubRepository && /^[^/]+\/[^/]+$/.test(githubRepository)) { + return githubRepository; + } + + try { + const packageJson = JSON.parse(readFileSync(path.join(rootDir, "package.json"), "utf8")); + const repositoryField = + typeof packageJson.repository === "string" ? packageJson.repository : packageJson.repository?.url; + const repositoryFromPackage = extractRepositorySlug(repositoryField); + if (repositoryFromPackage) return repositoryFromPackage; + } catch { + // Ignore package.json lookup failures and fall back to git metadata. + } + + try { + const remoteUrl = execFileSync("git", ["config", "--get", "remote.origin.url"], { + cwd: rootDir, + encoding: "utf8", + stdio: ["ignore", "pipe", "ignore"], + }).trim(); + return extractRepositorySlug(remoteUrl); + } catch { + return null; + } +} + function normalizeLinkTarget(rawTarget) { if (!rawTarget) return null; @@ -44,6 +102,7 @@ function normalizeLinkTarget(rawTarget) { } } + target = unescapeMarkdownPathTarget(target); return target || null; } @@ -217,7 +276,7 @@ export function extractMarkdownLinks(markdown) { return links; } -function getWorkflowPathFromUrl(target) { +function getWorkflowPathFromUrl(target, rootDir = getRootDir()) { try { const url = new URL(target); if (!["github.com", "www.github.com"].includes(url.hostname)) return null; @@ -225,7 +284,11 @@ function getWorkflowPathFromUrl(target) { if (!match) return null; const [, ownerFromUrl, repoFromUrl, workflowFile] = match; - const [owner, repo] = REPOSITORY.split("/"); + const repositorySlug = getRepositorySlug(rootDir); + // Local clones and forks may not expose GitHub context; skip badge validation if the repo slug is unknown. + if (!repositorySlug) return null; + + const [owner, repo] = repositorySlug.split("/"); if (ownerFromUrl.toLowerCase() !== owner?.toLowerCase() || repoFromUrl.toLowerCase() !== repo?.toLowerCase()) return null; return workflowFile; @@ -242,7 +305,7 @@ export async function validateLink(filePath, linkTarget, rootDir = getRootDir()) if (!linkTarget || linkTarget.startsWith("#")) return null; if (/^(mailto:|tel:|data:)/i.test(linkTarget)) return null; - const workflowFile = getWorkflowPathFromUrl(linkTarget); + const workflowFile = getWorkflowPathFromUrl(linkTarget, rootDir); if (workflowFile) { const workflowPath = path.join(rootDir, ".github", "workflows", workflowFile); if (await exists(workflowPath)) return null; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 80a99833..7e0d2c61 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -136,6 +136,20 @@ describe("docs-check script", () => { await expect(validateLink(docsFile, "./bad%2Gname.md", root)).resolves.toBeNull(); }); + it("unescapes markdown-escaped local targets before checking the filesystem", async () => { + const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); + const { root } = await createRepoFixture({ + "docs/guide.md": "[Escaped](./array\\[1\\]\\ \\(v2\\).md)\n", + "docs/array[1] (v2).md": "# Escaped target\n", + }); + const docsFile = path.join(root, "docs", "guide.md"); + const markdown = await readFile(docsFile, "utf8"); + const [linkTarget] = extractMarkdownLinks(markdown); + + expect(linkTarget).toBe("./array[1] (v2).md"); + await expect(validateLink(docsFile, linkTarget, root)).resolves.toBeNull(); + }); + it("normalizes direct-run paths consistently for the current platform", async () => { const { normalizePathForCompare } = await import("../scripts/ci/docs-check.js"); From 66bf42ce1fb302ec5f3f79c3e35c54abc6015f4f Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 17:21:09 +0800 Subject: [PATCH 25/38] Tighten CI review follow-ups --- .github/workflows/ci.yml | 2 + .github/workflows/pr-advisory.yml | 2 +- .github/workflows/pr-governance.yml | 10 +-- test/docs-check.test.ts | 103 +++++++++++++++++++++++++--- 4 files changed, 102 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 64eba6ec..ea304768 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -104,6 +104,8 @@ jobs: done if [[ "${EVENT_NAME}" != "pull_request" ]]; then + # Run full docs/code lanes on non-PR events, but keep workflow_changed + # diff-based so actionlint only runs when workflow files actually change. docs_changed=true code_changed=true fi diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index 39baf0fc..c9a916e5 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -69,7 +69,7 @@ jobs: [[ -z "${file}" ]] && continue is_docs_markdown=false - if [[ "${file}" =~ ^[^/]+\.(md|markdown)$ ]] || [[ "${file}" =~ ^(\.github|config|docs|test)/.+\.(md|markdown)$ ]]; then + if [[ "${file}" =~ ^(README\.md|CONTRIBUTING\.md|CHANGELOG\.md|SECURITY\.md)$ ]] || [[ "${file}" =~ ^(\.github|config|docs|test)/.+\.(md|markdown)$ ]]; then is_docs_markdown=true fi diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index d9452f43..fe01522e 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -68,11 +68,11 @@ jobs: const liveVerificationRequired = riskyPaths.length > 0; const requiredHeadings = [ - /^## Summary$/m, - /^## Testing$/m, - /^## Docs Impact$/m, - /^## Compliance Confirmation$/m, - /^## Notes$/m, + /^## Summary\s*$/m, + /^## Testing\s*$/m, + /^## Docs Impact\s*$/m, + /^## Compliance Confirmation\s*$/m, + /^## Notes\s*$/m, ]; const missingSections = requiredHeadings.filter((pattern) => !pattern.test(body)); diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 7e0d2c61..a16f877c 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -1,13 +1,14 @@ import { execFile } from "node:child_process"; import { mkdir, mkdtemp, readFile, rm, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; import path from "node:path"; import { setTimeout as delay } from "node:timers/promises"; import { promisify } from "node:util"; import { afterEach, describe, expect, it } from "vitest"; const tempRoots: string[] = []; -const TEMP_CLEANUP_ATTEMPTS = 3; -const TEMP_CLEANUP_DELAY_MS = 100; +const TEMP_CLEANUP_DELAYS_MS = [100, 500, 2000]; +const TEMP_CLEANUP_ATTEMPTS = TEMP_CLEANUP_DELAYS_MS.length + 1; const execFileAsync = promisify(execFile); async function cleanupTempRoot(root: string) { @@ -22,7 +23,7 @@ async function cleanupTempRoot(root: string) { return; } - await delay(TEMP_CLEANUP_DELAY_MS); + await delay(TEMP_CLEANUP_DELAYS_MS[attempt - 1] ?? TEMP_CLEANUP_DELAYS_MS.at(-1) ?? 100); } } } @@ -31,6 +32,16 @@ afterEach(async () => { await Promise.all(tempRoots.splice(0).map((root) => cleanupTempRoot(root))); }); +async function writeFixtureFiles(root: string, files: Record) { + tempRoots.push(root); + + for (const [relativePath, contents] of Object.entries(files)) { + const absolutePath = path.join(root, relativePath); + await mkdir(path.dirname(absolutePath), { recursive: true }); + await writeFile(absolutePath, contents, "utf8"); + } +} + async function createRepoFixture(files: Record) { // docs-check resolves local links against process.cwd(), so fixtures must live // under the repo root for relative-link validation to exercise real behavior. @@ -40,13 +51,16 @@ async function createRepoFixture(files: Record) { await mkdir(repoTempDir, { recursive: true }); const root = await mkdtemp(path.join(repoTempDir, "docs-check-")); - tempRoots.push(root); + await writeFixtureFiles(root, files); - for (const [relativePath, contents] of Object.entries(files)) { - const absolutePath = path.join(root, relativePath); - await mkdir(path.dirname(absolutePath), { recursive: true }); - await writeFile(absolutePath, contents, "utf8"); - } + return { root }; +} + +async function createExternalFixture(files: Record) { + // Workflow badge fallback tests need a directory outside the repo so the git + // remote lookup can cleanly fail when package metadata is absent. + const root = await mkdtemp(path.join(tmpdir(), "docs-check-external-")); + await writeFixtureFiles(root, files); return { root }; } @@ -111,6 +125,77 @@ describe("docs-check script", () => { ).resolves.toBeNull(); }); + it("uses package metadata to validate workflow badge targets when GitHub Actions context is unavailable", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { root } = await createExternalFixture({ + "package.json": JSON.stringify( + { + name: "fixture-docs-check", + repository: { + type: "git", + url: "git+https://github.com/example/docs-fixture.git", + }, + }, + null, + 2, + ), + "docs/guide.md": "# Guide\n", + ".github/workflows/ci.yml": "name: CI\non: push\n", + }); + const docsFile = path.join(root, "docs", "guide.md"); + const originalRepository = process.env.GITHUB_REPOSITORY; + delete process.env.GITHUB_REPOSITORY; + + try { + await expect( + validateLink( + docsFile, + "https://github.com/example/docs-fixture/actions/workflows/ci.yml/badge.svg", + root, + ), + ).resolves.toBeNull(); + await expect( + validateLink( + docsFile, + "https://github.com/example/docs-fixture/actions/workflows/missing.yml/badge.svg", + root, + ), + ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: missing.yml"); + } finally { + if (originalRepository === undefined) { + delete process.env.GITHUB_REPOSITORY; + } else { + process.env.GITHUB_REPOSITORY = originalRepository; + } + } + }); + + it("skips workflow badge validation when repository metadata cannot be resolved", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { root } = await createExternalFixture({ + "docs/guide.md": "# Guide\n", + }); + const docsFile = path.join(root, "docs", "guide.md"); + const originalRepository = process.env.GITHUB_REPOSITORY; + delete process.env.GITHUB_REPOSITORY; + + try { + await expect( + validateLink( + docsFile, + "https://github.com/example/docs-fixture/actions/workflows/ci.yml/badge.svg", + root, + ), + ).resolves.toBeNull(); + } finally { + if (originalRepository === undefined) { + delete process.env.GITHUB_REPOSITORY; + } else { + process.env.GITHUB_REPOSITORY = originalRepository; + } + } + }); + it("resolves relative local targets from the markdown file directory", async () => { const { validateLink } = await import("../scripts/ci/docs-check.js"); const { docsFile, root } = await createDocsFixture(); From e02bb4bf5f8894e2ae02a3054e59227b837141a6 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 17:35:31 +0800 Subject: [PATCH 26/38] chore: tighten workflow review edge cases --- .github/workflows/ci.yml | 7 ++++--- .github/workflows/pr-advisory.yml | 26 ++++++++++++++++++++------ .github/workflows/pr-governance.yml | 6 ++++-- test/docs-check.test.ts | 3 ++- 4 files changed, 30 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ea304768..d053d894 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,9 +72,10 @@ jobs: elif [[ "${EVENT_NAME}" == "push" && -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then collect_changed_files "${BEFORE_SHA}...HEAD" else - while IFS= read -r file; do - files+=("${file}") - done < <(git ls-files) + # No reliable diff range is available on this event. docs_changed and + # code_changed are forced below, and workflow_changed intentionally + # stays false unless an actual diff classified workflow files. + : fi for file in "${files[@]}"; do diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index c9a916e5..a4a9fb1e 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -48,18 +48,32 @@ jobs: code_changed=false files=() + collect_changed_files() { + local range="$1" + + while IFS=$'\t' read -r status first_path second_path; do + [[ -z "${status}" ]] && continue + + case "${status}" in + R*|C*) + [[ -n "${first_path}" ]] && files+=("${first_path}") + [[ -n "${second_path}" ]] && files+=("${second_path}") + ;; + *) + [[ -n "${first_path}" ]] && files+=("${first_path}") + ;; + esac + done < <(git diff --find-renames --name-status "${range}") + } + if [[ "${EVENT_NAME}" == "schedule" || "${EVENT_NAME}" == "workflow_dispatch" ]]; then dependency_changed=true code_changed=true elif [[ "${EVENT_NAME}" == "pull_request" ]]; then git fetch --no-tags --depth=1 origin "${BASE_REF}" - while IFS= read -r file; do - files+=("${file}") - done < <(git diff --name-only "origin/${BASE_REF}...HEAD") + collect_changed_files "origin/${BASE_REF}...HEAD" elif [[ "${EVENT_NAME}" == "push" && -n "${BEFORE_SHA}" && "${BEFORE_SHA}" != "0000000000000000000000000000000000000000" ]]; then - while IFS= read -r file; do - files+=("${file}") - done < <(git diff --name-only "${BEFORE_SHA}...HEAD") + collect_changed_files "${BEFORE_SHA}...HEAD" else dependency_changed=true code_changed=true diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index fe01522e..88e890e0 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -86,6 +86,8 @@ jobs: const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(body); const liveVerificationLabelName = "needs-live-verification"; const maintainerVerifiedLabelName = "maintainer-live-verified"; + const invalidateMaintainerVerification = + context.payload.action === "synchronize" || context.payload.action === "reopened"; let maintainerLiveVerified = currentLabels.has(maintainerVerifiedLabelName); async function ensureLabel(name, color, description) { @@ -128,7 +130,7 @@ jobs: labels: [liveVerificationLabelName], }); - if (context.payload.action === "synchronize" && maintainerLiveVerified) { + if (invalidateMaintainerVerification && maintainerLiveVerified) { try { await github.rest.issues.removeLabel({ owner, @@ -153,7 +155,7 @@ jobs: if (error.status !== 404) throw error; } - if (context.payload.action === "synchronize" && maintainerLiveVerified) { + if (invalidateMaintainerVerification && maintainerLiveVerified) { try { await github.rest.issues.removeLabel({ owner, diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index a16f877c..a08b0ebe 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -85,12 +85,13 @@ describe("docs-check script", () => { expect(extractMarkdownLinks(markdown)).toEqual(["docs/guides/config(v2).md"]); }); - it("skips anchor-only and external links", async () => { + it("skips anchor-only, external, and site-root-prefixed links", async () => { const { validateLink } = await import("../scripts/ci/docs-check.js"); const { docsFile } = await createDocsFixture(); await expect(validateLink(docsFile, "#section")).resolves.toBeNull(); await expect(validateLink(docsFile, "https://example.com/docs")).resolves.toBeNull(); + await expect(validateLink(docsFile, "/docs/development/CONFIG_FIELDS.md")).resolves.toBeNull(); }); it("requires an absolute markdown file path", async () => { From c16622f7af2d83c9b593379a7e7119e4d81621f3 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 17:48:26 +0800 Subject: [PATCH 27/38] chore: isolate governance event concurrency --- .github/workflows/pr-governance.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 88e890e0..08ac6b2d 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -17,7 +17,7 @@ permissions: pull-requests: read concurrency: - group: governance-${{ github.event.pull_request.number }} + group: governance-${{ github.event.pull_request.number }}-${{ github.event.action }} cancel-in-progress: true jobs: From 8288daf646ebc92cda63713bd54fca85c78b7674 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 18:06:28 +0800 Subject: [PATCH 28/38] ci: harden PR review follow-ups --- .github/workflows/ci.yml | 2 +- .github/workflows/pr-governance.yml | 8 +++++++- test/docs-check.test.ts | 4 ++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d053d894..5cd32fd6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -225,7 +225,7 @@ jobs: run: npm ci - name: Run tests - run: npm test + run: npm test -- --reporter=verbose docs-sanity: name: docs-sanity diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 08ac6b2d..b619d328 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -37,8 +37,14 @@ jobs: const repo = context.repo.repo; const issueNumber = context.payload.pull_request.number; const body = context.payload.pull_request.body || ""; + const liveLabels = await github.paginate(github.rest.issues.listLabelsOnIssue, { + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); const currentLabels = new Set( - (context.payload.pull_request.labels ?? []) + liveLabels .map((label) => label?.name) .filter((name) => typeof name === "string"), ); diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index a08b0ebe..a6191dc7 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -9,6 +9,7 @@ import { afterEach, describe, expect, it } from "vitest"; const tempRoots: string[] = []; const TEMP_CLEANUP_DELAYS_MS = [100, 500, 2000]; const TEMP_CLEANUP_ATTEMPTS = TEMP_CLEANUP_DELAYS_MS.length + 1; +const DOCS_CHECK_SUBPROCESS_TIMEOUT_MS = 15_000; const execFileAsync = promisify(execFile); async function cleanupTempRoot(root: string) { @@ -349,6 +350,7 @@ describe("docs-check script", () => { const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { cwd: process.cwd(), + timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); expect(stdout).toContain("docs-check: verified 2 markdown file(s)"); @@ -362,6 +364,7 @@ describe("docs-check script", () => { const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { cwd: process.cwd(), + timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); expect(stdout).toContain("docs-check: no markdown files found"); @@ -379,6 +382,7 @@ describe("docs-check script", () => { try { await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { cwd: process.cwd(), + timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); } catch (error) { if (error instanceof Error) { From 85895c4da1d4d6628bc579ee6b868734c767d929 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 18:21:24 +0800 Subject: [PATCH 29/38] ci: summarize advisory compat failures --- .github/workflows/pr-advisory.yml | 16 ++++++++++++++++ test/docs-check.test.ts | 17 +++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/.github/workflows/pr-advisory.yml b/.github/workflows/pr-advisory.yml index a4a9fb1e..e4d64901 100644 --- a/.github/workflows/pr-advisory.yml +++ b/.github/workflows/pr-advisory.yml @@ -188,6 +188,22 @@ jobs: - name: Run tests run: npm test + - name: Write compat summary + if: always() + shell: bash + env: + COMPAT_OUTCOME: ${{ job.status }} + run: | + { + echo "## Compat matrix (${{ matrix.os }}, Node ${{ matrix.node }})" + echo "" + if [[ "${COMPAT_OUTCOME}" == "success" ]]; then + echo "- build + tests passed." + else + echo "- build or tests failed. Review logs before promoting this lane to required." + fi + } >> "${GITHUB_STEP_SUMMARY}" + dependency-audit: name: dependency-audit needs: detect-dependency-change diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index a6191dc7..1508b821 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -371,6 +371,23 @@ describe("docs-check script", () => { expect(stderr).toBe(""); }); + it("runs the direct docs-check pipeline in default-scan mode", async () => { + const { root } = await createRepoFixture({ + "README.md": "# Root\n", + "docs/guide.md": "[Target](./targets/exists.md)\n", + "docs/targets/exists.md": "# Target\n", + }); + const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); + + const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath], { + cwd: root, + timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, + }); + + expect(stdout).toContain("docs-check: verified 3 markdown file(s)"); + expect(stderr).toBe(""); + }); + it("exits with an error when the direct docs-check pipeline finds broken links", async () => { const { root } = await createRepoFixture({ "docs/guide.md": "[Missing](./targets/missing.md)\n", From 1e9699219443d5dc4db4d9d1fd993d6367b5921e Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 18:33:44 +0800 Subject: [PATCH 30/38] ci: harden governance body parsing --- .github/workflows/pr-governance.yml | 19 +++++++++++-------- test/docs-check.test.ts | 28 ++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index b619d328..7035c9a0 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -37,6 +37,9 @@ jobs: const repo = context.repo.repo; const issueNumber = context.payload.pull_request.number; const body = context.payload.pull_request.body || ""; + const strippedBody = body + .replace(/```[\s\S]*?```/g, "") + .replace(/`[^`\n]+`/g, ""); const liveLabels = await github.paginate(github.rest.issues.listLabelsOnIssue, { owner, repo, @@ -81,15 +84,15 @@ jobs: /^## Notes\s*$/m, ]; - const missingSections = requiredHeadings.filter((pattern) => !pattern.test(body)); - const complianceChecked = /- \[x\] This change stays within the repository scope and OpenAI Terms of Service expectations\./i.test(body); + const missingSections = requiredHeadings.filter((pattern) => !pattern.test(strippedBody)); + const complianceChecked = /- \[x\] This change stays within the repository scope and OpenAI Terms of Service expectations\./i.test(strippedBody); const docsImpactChecked = - /- \[x\] README or docs updated/i.test(body) || - /- \[x\] No docs changes needed/i.test(body); - const officialAuthChecked = /- \[x\] This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior\./i.test(body); - const testsDocsChecked = /- \[x\] I updated tests and documentation when the change affected users, maintainers, or repository behavior\./i.test(body); - const noLiveRequiredChecked = /- \[x\] No auth, request-routing, or storage paths changed\./i.test(body); - const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(body); + /- \[x\] README or docs updated/i.test(strippedBody) || + /- \[x\] No docs changes needed/i.test(strippedBody); + const officialAuthChecked = /- \[x\] This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior\./i.test(strippedBody); + const testsDocsChecked = /- \[x\] I updated tests and documentation when the change affected users, maintainers, or repository behavior\./i.test(strippedBody); + const noLiveRequiredChecked = /- \[x\] No auth, request-routing, or storage paths changed\./i.test(strippedBody); + const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(strippedBody); const liveVerificationLabelName = "needs-live-verification"; const maintainerVerifiedLabelName = "maintainer-live-verified"; const invalidateMaintainerVerification = diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 1508b821..53be1db9 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -414,4 +414,32 @@ describe("docs-check script", () => { expect(failure?.stderr).toContain("docs-check found broken documentation links:"); expect(failure?.stderr).toContain("docs/guide.md: Missing local target: ./targets/missing.md (./targets/missing.md)"); }); + + it("exits with an error when the direct docs-check pipeline finds a broken workflow badge", async () => { + const { root } = await createRepoFixture({ + "docs/guide.md": + "[CI](https://github.com/ndycode/oc-chatgpt-multi-auth/actions/workflows/does-not-exist.yml/badge.svg)\n", + }); + const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); + const relativeFixtureRoot = path.relative(process.cwd(), root).replace(/\\/g, "/"); + let failure: (Error & { code?: number; stderr?: string; stdout?: string }) | null = null; + + try { + await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + cwd: process.cwd(), + timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, + }); + } catch (error) { + if (error instanceof Error) { + failure = error as Error & { code?: number; stderr?: string; stdout?: string }; + } else { + throw error; + } + } + + expect(failure).not.toBeNull(); + expect(failure?.code).toBe(1); + expect(failure?.stderr).toContain("docs-check found broken documentation links:"); + expect(failure?.stderr).toContain("Missing workflow referenced by GitHub Actions badge/link: does-not-exist.yml"); + }); }); From f3052212f5afe78c4dcaad770bd51281b38ad5c9 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 20:10:53 +0800 Subject: [PATCH 31/38] ci: harden governance body parsing --- .github/workflows/pr-governance.yml | 5 +++-- docs/development/TESTING.md | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 7035c9a0..0554d060 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -38,8 +38,9 @@ jobs: const issueNumber = context.payload.pull_request.number; const body = context.payload.pull_request.body || ""; const strippedBody = body - .replace(/```[\s\S]*?```/g, "") - .replace(/`[^`\n]+`/g, ""); + .replace(//g, "") + .replace(/(^|\n)(?: {0,3})(`{3,}|~{3,})[^\n]*\n[\s\S]*?\n(?: {0,3})\2[^\n]*(?=\n|$)/g, "$1") + .replace(/(`+)([^`\n]|`(?!\1))*\1/g, ""); const liveLabels = await github.paginate(github.rest.issues.listLabelsOnIssue, { owner, repo, diff --git a/docs/development/TESTING.md b/docs/development/TESTING.md index 4d07589c..829559f6 100644 --- a/docs/development/TESTING.md +++ b/docs/development/TESTING.md @@ -39,7 +39,7 @@ Current PR automation is split into required and advisory lanes: - Required `required-pr`: aggregates `lint`, `typecheck`, `build`, `unit (linux)`, and `unit (windows)` for the default Node version, plus `docs-sanity` for Markdown changes and `actionlint` when workflows change. - Required `pr-governance`: enforces the pull request template, compliance checkbox, and a completed live-verification marker for auth/request/storage changes. -- Advisory `PR Advisory`: runs `npm run test:coverage`, a wider compatibility matrix (Ubuntu Node 18 and 22 plus macOS on the default Node version), and `npm run audit:ci`. +- Advisory `PR Advisory`: runs `npm run test:coverage`, a wider compatibility matrix (Ubuntu Node 18 and 22, macOS on the default Node version, and Windows Node 18), and `npm run audit:ci`. Notes on the advisory lane: From d16b76742e4d6f265e4c4c746423f4683f93a374 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 20:25:34 +0800 Subject: [PATCH 32/38] test: cover shortcut docs references --- scripts/ci/docs-check.js | 6 ++++++ test/docs-check.test.ts | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 56594997..0b61fa9e 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -248,6 +248,7 @@ export function extractMarkdownLinks(markdown) { .replace(/`[^`\n]+`/g, "`code`"); const openerPattern = /!?\[[^\]]*]\(/g; const referencePattern = /!?\[([^\]]+)]\[([^\]]*)]/g; + const shortcutReferencePattern = /!?\[([^\]]+)](?![\[(]:)/g; const referenceDefinitionPattern = /^\s{0,3}\[([^\]]+)]:\s+(.+)$/gm; const links = []; const referenceDefinitions = new Map(); @@ -273,6 +274,11 @@ export function extractMarkdownLinks(markdown) { if (referenceTarget) links.push(referenceTarget); } + for (const match of stripped.matchAll(shortcutReferencePattern)) { + const referenceTarget = referenceDefinitions.get(normalizeReferenceLabel(match[1] ?? "")); + if (referenceTarget) links.push(referenceTarget); + } + return links; } diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 53be1db9..b80b5d4f 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -257,6 +257,16 @@ describe("docs-check script", () => { await expect(validateLink(docsFile, referenceTarget)).resolves.toBe("Missing local target: ./targets/missing.md"); }); + it("extracts shortcut reference links so missing targets are still caught", async () => { + const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); + const { docsFile } = await createDocsFixture("[config]\n\n[config]: ./targets/missing.md\n"); + const markdown = await readFile(docsFile, "utf8"); + const [referenceTarget] = extractMarkdownLinks(markdown); + + expect(referenceTarget).toBe("./targets/missing.md"); + await expect(validateLink(docsFile, referenceTarget)).resolves.toBe("Missing local target: ./targets/missing.md"); + }); + it("ignores links that only appear inside HTML comments", async () => { const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); From 64afc50a691bbba70403f104ab2cc6dfeaac1a91 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 20:49:52 +0800 Subject: [PATCH 33/38] test: tighten shortcut reference parsing --- scripts/ci/docs-check.js | 2 +- test/docs-check.test.ts | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index 0b61fa9e..b514f170 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -248,7 +248,7 @@ export function extractMarkdownLinks(markdown) { .replace(/`[^`\n]+`/g, "`code`"); const openerPattern = /!?\[[^\]]*]\(/g; const referencePattern = /!?\[([^\]]+)]\[([^\]]*)]/g; - const shortcutReferencePattern = /!?\[([^\]]+)](?![\[(]:)/g; + const shortcutReferencePattern = /(? { await expect(validateLink(docsFile, referenceTarget)).resolves.toBe("Missing local target: ./targets/missing.md"); }); + it("does not treat inline or full reference links as shortcut references", async () => { + const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); + + const markdown = [ + "[Inline](./targets/inline.md)", + "[Config][cfg]", + "", + "[inline]: ./targets/inline-shortcut.md", + "[cfg]: ./targets/full.md", + "[config]: ./targets/full-shortcut.md", + "", + ].join("\n"); + + expect(extractMarkdownLinks(markdown)).toEqual(["./targets/inline.md", "./targets/full.md"]); + }); + it("ignores links that only appear inside HTML comments", async () => { const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); From 2681483fb8a9717331679bb6f2906f51a287fe74 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 20:50:00 +0800 Subject: [PATCH 34/38] ci: serialize PR governance runs --- .github/workflows/pr-governance.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 0554d060..15148bf4 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -17,8 +17,10 @@ permissions: pull-requests: read concurrency: - group: governance-${{ github.event.pull_request.number }}-${{ github.event.action }} - cancel-in-progress: true + # Serialize governance per PR so label/state mutations observe live state and + # synchronize runs are not canceled before invalidation logic completes. + group: governance-${{ github.event.pull_request.number }} + cancel-in-progress: false jobs: pr-governance: From 4d1be96d506f433e8a6aa9e0b711cbbc92870deb Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 22:02:58 +0800 Subject: [PATCH 35/38] ci: harden governance fence parsing --- .github/workflows/pr-governance.yml | 40 ++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 15148bf4..27af6eaa 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -39,10 +39,42 @@ jobs: const repo = context.repo.repo; const issueNumber = context.payload.pull_request.number; const body = context.payload.pull_request.body || ""; - const strippedBody = body - .replace(//g, "") - .replace(/(^|\n)(?: {0,3})(`{3,}|~{3,})[^\n]*\n[\s\S]*?\n(?: {0,3})\2[^\n]*(?=\n|$)/g, "$1") - .replace(/(`+)([^`\n]|`(?!\1))*\1/g, ""); + function stripMarkdownCode(markdown) { + const output = []; + const lines = markdown.split(/\r?\n/); + let inFence = false; + let fenceChar = ""; + let fenceLength = 0; + + for (const line of lines) { + if (!inFence) { + const openingFence = line.match(/^(?: {0,3})(`{3,}|~{3,})[^\n]*$/); + if (openingFence) { + inFence = true; + fenceChar = openingFence[1][0]; + fenceLength = openingFence[1].length; + output.push(""); + continue; + } + + output.push(line); + continue; + } + + const closingFence = new RegExp(`^(?: {0,3})${fenceChar}{${fenceLength},}[^\\n]*$`); + if (closingFence.test(line)) { + inFence = false; + fenceChar = ""; + fenceLength = 0; + } + + output.push(""); + } + + return output.join("\n").replace(/(`+)([^`\n]|`(?!\1))*\1/g, ""); + } + + const strippedBody = stripMarkdownCode(body.replace(//g, "")); const liveLabels = await github.paginate(github.rest.issues.listLabelsOnIssue, { owner, repo, From 19dabaf44705804cd8a89319c638e80cbcb557e5 Mon Sep 17 00:00:00 2001 From: ndycode Date: Sun, 15 Mar 2026 23:31:49 +0800 Subject: [PATCH 36/38] ci: fix governance and docs-check edge cases --- .github/workflows/pr-governance.yml | 47 ++++++++++++++++++++++++----- scripts/ci/docs-check.js | 7 ++--- test/docs-check.test.ts | 12 ++++++++ 3 files changed, 55 insertions(+), 11 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 27af6eaa..4d2acf4f 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -120,14 +120,47 @@ jobs: ]; const missingSections = requiredHeadings.filter((pattern) => !pattern.test(strippedBody)); - const complianceChecked = /- \[x\] This change stays within the repository scope and OpenAI Terms of Service expectations\./i.test(strippedBody); + function getSectionContent(markdown, heading) { + const headingPattern = new RegExp(`^## ${heading}\\s*$`, "m"); + const match = headingPattern.exec(markdown); + if (!match) return ""; + + const sectionStart = match.index + match[0].length; + const nextHeadingMatch = /^##\s+/m.exec(markdown.slice(sectionStart)); + const sectionEnd = nextHeadingMatch ? sectionStart + nextHeadingMatch.index : markdown.length; + return markdown.slice(sectionStart, sectionEnd); + } + + function hasCheckedChecklistLine(section, itemText) { + const escapedText = itemText.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + return new RegExp(`^\\s*-\\s*\\[x\\]\\s+${escapedText}\\s*$`, "im").test(section); + } + + const docsImpactSection = getSectionContent(strippedBody, "Docs Impact"); + const complianceSection = getSectionContent(strippedBody, "Compliance Confirmation"); + const complianceChecked = hasCheckedChecklistLine( + complianceSection, + "This change stays within the repository scope and OpenAI Terms of Service expectations.", + ); const docsImpactChecked = - /- \[x\] README or docs updated/i.test(strippedBody) || - /- \[x\] No docs changes needed/i.test(strippedBody); - const officialAuthChecked = /- \[x\] This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior\./i.test(strippedBody); - const testsDocsChecked = /- \[x\] I updated tests and documentation when the change affected users, maintainers, or repository behavior\./i.test(strippedBody); - const noLiveRequiredChecked = /- \[x\] No auth, request-routing, or storage paths changed\./i.test(strippedBody); - const manualLiveChecked = /- \[x\] I manually tested with a real ChatGPT Plus\/Pro account\./i.test(strippedBody); + hasCheckedChecklistLine(docsImpactSection, "README or docs updated") || + hasCheckedChecklistLine(docsImpactSection, "No docs changes needed"); + const officialAuthChecked = hasCheckedChecklistLine( + complianceSection, + "This change uses official authentication flows only and does not add bypass, scraping, or credential-sharing behavior.", + ); + const testsDocsChecked = hasCheckedChecklistLine( + complianceSection, + "I updated tests and documentation when the change affected users, maintainers, or repository behavior.", + ); + const noLiveRequiredChecked = hasCheckedChecklistLine( + complianceSection, + "No auth, request-routing, or storage paths changed.", + ); + const manualLiveChecked = hasCheckedChecklistLine( + complianceSection, + "I manually tested with a real ChatGPT Plus/Pro account.", + ); const liveVerificationLabelName = "needs-live-verification"; const maintainerVerifiedLabelName = "maintainer-live-verified"; const invalidateMaintainerVerification = diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index b514f170..a34445a2 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -1,12 +1,10 @@ -#!/usr/bin/env node - import { execFileSync } from "node:child_process"; import { readFileSync } from "node:fs"; import { access, readdir, readFile, stat } from "node:fs/promises"; import path from "node:path"; import { fileURLToPath } from "node:url"; -const DEFAULT_FILES = ["README.md", "CONTRIBUTING.md", "SECURITY.md", "CHANGELOG.md"]; +const DEFAULT_FILES = ["AGENTS.md", "CHANGELOG.md", "CODE_OF_CONDUCT.md", "CONTRIBUTING.md", "README.md", "SECURITY.md"]; const DEFAULT_DIRS = [".github", "config", "docs", "test"]; const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); @@ -242,9 +240,10 @@ function extractLinkTarget(markdown, startIndex) { } export function extractMarkdownLinks(markdown) { + const fencedCodePattern = new RegExp("(?:`{3}|~{3})[\\s\\S]*?(?:`{3}|~{3})", "g"); const stripped = markdown .replace(//g, "") - .replace(/```[\s\S]*?```/g, "\n") + .replace(fencedCodePattern, "\n") .replace(/`[^`\n]+`/g, "`code`"); const openerPattern = /!?\[[^\]]*]\(/g; const referencePattern = /!?\[([^\]]+)]\[([^\]]*)]/g; diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index e9fa12c1..2f47ac41 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -291,6 +291,14 @@ describe("docs-check script", () => { expect(extractMarkdownLinks(markdown)).toEqual(["./targets/exists.md"]); }); + it("ignores links that only appear inside tilde-fenced code blocks", async () => { + const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); + + const markdown = "~~~bash\n[missing](./targets/missing.md)\n~~~\n[Config Guide](./targets/exists.md)\n"; + + expect(extractMarkdownLinks(markdown)).toEqual(["./targets/exists.md"]); + }); + it("accepts angle-bracket targets that include an optional title", async () => { const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); const { docsFile } = await createDocsFixture('[Config Guide](<./targets/exists.md> "Config target")\n'); @@ -304,7 +312,9 @@ describe("docs-check script", () => { it("discovers default markdown files and skips ignored directories", async () => { const { collectMarkdownFiles } = await import("../scripts/ci/docs-check.js"); const { root } = await createRepoFixture({ + "AGENTS.md": "# Instructions\n", "README.md": "# Root\n", + "CODE_OF_CONDUCT.md": "# Code of Conduct\n", "CONTRIBUTING.md": "# Contributing\n", "SECURITY.md": "# Security\n", "CHANGELOG.md": "# Changelog\n", @@ -327,7 +337,9 @@ describe("docs-check script", () => { expect(relativeDiscoveredFiles).toEqual([ ".github/pull_request_template.md", + "AGENTS.md", "CHANGELOG.md", + "CODE_OF_CONDUCT.md", "CONTRIBUTING.md", "README.md", "SECURITY.md", From 3a967e1b37b589008607bca3e843efaabbf573b3 Mon Sep 17 00:00:00 2001 From: ndycode Date: Mon, 16 Mar 2026 00:23:48 +0800 Subject: [PATCH 37/38] fix: harden governance summary and docs-check retries --- .github/workflows/pr-governance.yml | 9 ++++- test/docs-check.test.ts | 56 ++++++++++++++++++++++++++--- 2 files changed, 59 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 4d2acf4f..7fce985d 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -289,7 +289,14 @@ jobs: summaryLines.push(`- Risky paths: ${riskyPaths.join(", ")}`); } - await core.summary.addRaw(summaryLines.join("\n")).write(); + try { + await core.summary.addRaw(summaryLines.join("\n")).write(); + } catch (summaryError) { + const message = + summaryError instanceof Error ? summaryError.message : String(summaryError); + core.warning(`Failed to write PR governance step summary: ${message}`); + console.log(summaryLines.join("\n")); + } if (failures.length > 0) { core.setFailed(failures.join("\n")); diff --git a/test/docs-check.test.ts b/test/docs-check.test.ts index 2f47ac41..2a8deb63 100644 --- a/test/docs-check.test.ts +++ b/test/docs-check.test.ts @@ -9,6 +9,9 @@ import { afterEach, describe, expect, it } from "vitest"; const tempRoots: string[] = []; const TEMP_CLEANUP_DELAYS_MS = [100, 500, 2000]; const TEMP_CLEANUP_ATTEMPTS = TEMP_CLEANUP_DELAYS_MS.length + 1; +const DOCS_CHECK_SUBPROCESS_RETRY_DELAYS_MS = [100, 500, 2000]; +const DOCS_CHECK_SUBPROCESS_ATTEMPTS = + DOCS_CHECK_SUBPROCESS_RETRY_DELAYS_MS.length + 1; const DOCS_CHECK_SUBPROCESS_TIMEOUT_MS = 15_000; const execFileAsync = promisify(execFile); @@ -43,6 +46,49 @@ async function writeFixtureFiles(root: string, files: Record) { } } +function isTransientDocsCheckSubprocessError(error: unknown) { + const details = [error instanceof Error ? error.message : String(error)]; + if (error && typeof error === "object") { + const typedError = error as { stderr?: string; stdout?: string }; + if (typedError.stderr) details.push(typedError.stderr); + if (typedError.stdout) details.push(typedError.stdout); + } + + return /\b(EPERM|EBUSY|EACCES)\b/i.test(details.join("\n")); +} + +async function runDocsCheckSubprocess( + scriptPath: string, + args: string[], + options: Parameters[2], +) { + for ( + let attempt = 1; + attempt <= DOCS_CHECK_SUBPROCESS_ATTEMPTS; + attempt += 1 + ) { + try { + return await execFileAsync(process.execPath, [scriptPath, ...args], options); + } catch (error) { + if ( + process.platform !== "win32" || + attempt === DOCS_CHECK_SUBPROCESS_ATTEMPTS || + !isTransientDocsCheckSubprocessError(error) + ) { + throw error; + } + + await delay( + DOCS_CHECK_SUBPROCESS_RETRY_DELAYS_MS[attempt - 1] ?? + DOCS_CHECK_SUBPROCESS_RETRY_DELAYS_MS.at(-1) ?? + 100, + ); + } + } + + throw new Error("docs-check subprocess retry loop exhausted unexpectedly"); +} + async function createRepoFixture(files: Record) { // docs-check resolves local links against process.cwd(), so fixtures must live // under the repo root for relative-link validation to exercise real behavior. @@ -386,7 +432,7 @@ describe("docs-check script", () => { const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); const relativeFixtureRoot = path.relative(process.cwd(), root).replace(/\\/g, "/"); - const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + const { stdout, stderr } = await runDocsCheckSubprocess(scriptPath, [relativeFixtureRoot], { cwd: process.cwd(), timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); @@ -400,7 +446,7 @@ describe("docs-check script", () => { const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); const relativeFixtureRoot = path.relative(process.cwd(), root).replace(/\\/g, "/"); - const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + const { stdout, stderr } = await runDocsCheckSubprocess(scriptPath, [relativeFixtureRoot], { cwd: process.cwd(), timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); @@ -417,7 +463,7 @@ describe("docs-check script", () => { }); const scriptPath = path.resolve(process.cwd(), "scripts/ci/docs-check.js"); - const { stdout, stderr } = await execFileAsync(process.execPath, [scriptPath], { + const { stdout, stderr } = await runDocsCheckSubprocess(scriptPath, [], { cwd: root, timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); @@ -435,7 +481,7 @@ describe("docs-check script", () => { let failure: (Error & { code?: number; stderr?: string; stdout?: string }) | null = null; try { - await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + await runDocsCheckSubprocess(scriptPath, [relativeFixtureRoot], { cwd: process.cwd(), timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); @@ -463,7 +509,7 @@ describe("docs-check script", () => { let failure: (Error & { code?: number; stderr?: string; stdout?: string }) | null = null; try { - await execFileAsync(process.execPath, [scriptPath, relativeFixtureRoot], { + await runDocsCheckSubprocess(scriptPath, [relativeFixtureRoot], { cwd: process.cwd(), timeout: DOCS_CHECK_SUBPROCESS_TIMEOUT_MS, }); From d4974b533254e0cd8f5746b81fc65df3732aef44 Mon Sep 17 00:00:00 2001 From: ndycode Date: Mon, 16 Mar 2026 05:53:35 +0800 Subject: [PATCH 38/38] Harden docs-check fence parsing --- .github/workflows/pr-governance.yml | 11 ++-- scripts/ci/docs-check.js | 92 +++++++++++++++++++++-------- test/docs-check.test.ts | 67 +++++++++++++++++++++ 3 files changed, 139 insertions(+), 31 deletions(-) diff --git a/.github/workflows/pr-governance.yml b/.github/workflows/pr-governance.yml index 7fce985d..6ca8f3fc 100644 --- a/.github/workflows/pr-governance.yml +++ b/.github/workflows/pr-governance.yml @@ -188,13 +188,12 @@ jobs: } } - await ensureLabel( - maintainerVerifiedLabelName, - "0e8a16", - "Maintainer completed independent live ChatGPT Plus/Pro verification", - ); - if (liveVerificationRequired) { + await ensureLabel( + maintainerVerifiedLabelName, + "0e8a16", + "Maintainer completed independent live ChatGPT Plus/Pro verification", + ); await ensureLabel( liveVerificationLabelName, "b60205", diff --git a/scripts/ci/docs-check.js b/scripts/ci/docs-check.js index a34445a2..c5b8ef1d 100644 --- a/scripts/ci/docs-check.js +++ b/scripts/ci/docs-check.js @@ -10,6 +10,7 @@ const MARKDOWN_EXTENSIONS = new Set([".md", ".markdown"]); const IGNORED_DIRS = new Set([".git", ".github/workflows", ".omx", "dist", "node_modules", "tmp"]); const MARKDOWN_PATH_ESCAPE_PATTERN = /\\([\x20-\x2F\x3A-\x40\x5B-\x60\x7B-\x7E])/g; const __filename = fileURLToPath(import.meta.url); +const repositorySlugCache = new Map(); function getRootDir() { return process.cwd(); @@ -54,30 +55,40 @@ function extractRepositorySlug(repositoryValue) { function getRepositorySlug(rootDir = getRootDir()) { const githubRepository = process.env.GITHUB_REPOSITORY?.trim(); - if (githubRepository && /^[^/]+\/[^/]+$/.test(githubRepository)) { - return githubRepository; + const cacheKey = `${normalizePathForCompare(rootDir)}::${githubRepository ?? ""}`; + if (repositorySlugCache.has(cacheKey)) { + return repositorySlugCache.get(cacheKey); } - try { - const packageJson = JSON.parse(readFileSync(path.join(rootDir, "package.json"), "utf8")); - const repositoryField = - typeof packageJson.repository === "string" ? packageJson.repository : packageJson.repository?.url; - const repositoryFromPackage = extractRepositorySlug(repositoryField); - if (repositoryFromPackage) return repositoryFromPackage; - } catch { - // Ignore package.json lookup failures and fall back to git metadata. - } + let repositorySlug = null; + if (githubRepository && /^[^/]+\/[^/]+$/.test(githubRepository)) { + repositorySlug = githubRepository; + } else { + try { + const packageJson = JSON.parse(readFileSync(path.join(rootDir, "package.json"), "utf8")); + const repositoryField = + typeof packageJson.repository === "string" ? packageJson.repository : packageJson.repository?.url; + repositorySlug = extractRepositorySlug(repositoryField); + } catch { + // Ignore package.json lookup failures and fall back to git metadata. + } - try { - const remoteUrl = execFileSync("git", ["config", "--get", "remote.origin.url"], { - cwd: rootDir, - encoding: "utf8", - stdio: ["ignore", "pipe", "ignore"], - }).trim(); - return extractRepositorySlug(remoteUrl); - } catch { - return null; + if (!repositorySlug) { + try { + const remoteUrl = execFileSync("git", ["config", "--get", "remote.origin.url"], { + cwd: rootDir, + encoding: "utf8", + stdio: ["ignore", "pipe", "ignore"], + }).trim(); + repositorySlug = extractRepositorySlug(remoteUrl); + } catch { + repositorySlug = null; + } + } } + + repositorySlugCache.set(cacheKey, repositorySlug); + return repositorySlug; } function normalizeLinkTarget(rawTarget) { @@ -240,11 +251,42 @@ function extractLinkTarget(markdown, startIndex) { } export function extractMarkdownLinks(markdown) { - const fencedCodePattern = new RegExp("(?:`{3}|~{3})[\\s\\S]*?(?:`{3}|~{3})", "g"); - const stripped = markdown - .replace(//g, "") - .replace(fencedCodePattern, "\n") - .replace(/`[^`\n]+`/g, "`code`"); + const stripMarkdownCode = (source) => { + const output = []; + const lines = source.split(/\r?\n/); + let inFence = false; + let fenceChar = ""; + let fenceLength = 0; + + for (const line of lines) { + if (!inFence) { + const openingFence = line.match(/^(?: {0,3})(`{3,}|~{3,})[^\n]*$/); + if (openingFence) { + inFence = true; + fenceChar = openingFence[1][0]; + fenceLength = openingFence[1].length; + output.push(""); + continue; + } + + output.push(line); + continue; + } + + const closingFence = new RegExp(`^(?: {0,3})${fenceChar}{${fenceLength},}[^\\n]*$`); + if (closingFence.test(line)) { + inFence = false; + fenceChar = ""; + fenceLength = 0; + } + + output.push(""); + } + + return output.join("\n").replace(/(`+)([^`\n]|`(?!\1))*\1/g, ""); + }; + + const stripped = stripMarkdownCode(markdown.replace(//g, "")); const openerPattern = /!?\[[^\]]*]\(/g; const referencePattern = /!?\[([^\]]+)]\[([^\]]*)]/g; const shortcutReferencePattern = /(? { } }); + it("memoizes repository metadata per root during workflow badge validation", async () => { + const { validateLink } = await import("../scripts/ci/docs-check.js"); + const { root } = await createExternalFixture({ + "package.json": JSON.stringify( + { + name: "fixture-docs-check", + repository: { + type: "git", + url: "git+https://github.com/example/docs-fixture.git", + }, + }, + null, + 2, + ), + "docs/guide.md": "# Guide\n", + }); + const docsFile = path.join(root, "docs", "guide.md"); + const originalRepository = process.env.GITHUB_REPOSITORY; + delete process.env.GITHUB_REPOSITORY; + + try { + await expect( + validateLink( + docsFile, + "https://github.com/example/docs-fixture/actions/workflows/missing.yml/badge.svg", + root, + ), + ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: missing.yml"); + + await rm(path.join(root, "package.json")); + + await expect( + validateLink( + docsFile, + "https://github.com/example/docs-fixture/actions/workflows/missing.yml/badge.svg", + root, + ), + ).resolves.toBe("Missing workflow referenced by GitHub Actions badge/link: missing.yml"); + } finally { + if (originalRepository === undefined) { + delete process.env.GITHUB_REPOSITORY; + } else { + process.env.GITHUB_REPOSITORY = originalRepository; + } + } + }); + it("skips workflow badge validation when repository metadata cannot be resolved", async () => { const { validateLink } = await import("../scripts/ci/docs-check.js"); const { root } = await createExternalFixture({ @@ -345,6 +392,26 @@ describe("docs-check script", () => { expect(extractMarkdownLinks(markdown)).toEqual(["./targets/exists.md"]); }); + it("ignores links after inner fences that appear inside a larger fenced code block", async () => { + const { extractMarkdownLinks } = await import("../scripts/ci/docs-check.js"); + + const markdown = [ + "````markdown", + "```", + "Use fenced blocks like:", + "```yaml", + "key: value", + "```", + "The link [guide](./targets/missing.md) is here.", + "```", + "````", + "[Config Guide](./targets/exists.md)", + "", + ].join("\n"); + + expect(extractMarkdownLinks(markdown)).toEqual(["./targets/exists.md"]); + }); + it("accepts angle-bracket targets that include an optional title", async () => { const { extractMarkdownLinks, validateLink } = await import("../scripts/ci/docs-check.js"); const { docsFile } = await createDocsFixture('[Config Guide](<./targets/exists.md> "Config target")\n');