From 491543be1dc9c8550d387d248e728a0e4cf51bba Mon Sep 17 00:00:00 2001 From: Kenichi Kamiya Date: Wed, 5 Jun 2024 15:05:51 +0900 Subject: [PATCH] Dump resources, especially checks, for debugging (#832) Struggled to determine this feature, see GH-832 for the fight. Finally I **removed** following ideas - Output only in debug mode.\ Debug mode is not helpful because it also dumps inside of `setOutput`.\ However, now this action only outputs the filepath, so we can reconsider this option. - Provide a flag to enable dump.\ I don't want to add many options like this, so I reduce checks only for the first and last to keep the filesize small. - Output detail instead of filepath.\ Multiline text is not useful in GITHUB_OUTPUT, and it has a small size limit. - Include Webhook payload.\ I hesitated to include the payload even though GitHub describes this schema does not include tokens. - Upload artifact in this action and remove the dump file.\ To focus on the job purpose, I want to extract artifact handlers from this action. --- .github/workflows/outputs.yml | 56 +++++++++++++++++++++++++ README.md | 8 ++++ action.yml | 6 +++ dist/index.js | 66 +++++++++++++++++------------- snapshots/README.md | 4 ++ snapshots/run-9281068681/README.md | 2 + src/input.ts | 17 ++++---- src/main.ts | 48 +++++++++++++++++----- src/schema.ts | 2 + 9 files changed, 163 insertions(+), 46 deletions(-) create mode 100644 .github/workflows/outputs.yml create mode 100644 snapshots/README.md diff --git a/.github/workflows/outputs.yml b/.github/workflows/outputs.yml new file mode 100644 index 00000000..f158c020 --- /dev/null +++ b/.github/workflows/outputs.yml @@ -0,0 +1,56 @@ +name: Outputs +on: + push: + branches: [main] + paths: + - '.github/workflows/outputs.yml' + - 'action.yml' + - 'dist/**' + pull_request: + paths: + - '.github/workflows/outputs.yml' + - 'action.yml' + - 'dist/**' + workflow_dispatch: + +# Disable all permissions in workflow global as to setup clean room +# However PRs will have read permissions because this project is on a public repository +permissions: {} + +jobs: + echo: + runs-on: ubuntu-24.04 + timeout-minutes: 5 + steps: + - run: echo ':)' + # Wait longer than min-interval-seconds + - run: sleep 8 + - run: echo '🎉' + dump: + runs-on: ubuntu-24.04 + timeout-minutes: 5 + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - uses: ./ + id: wait + with: + retry-method: 'equal_intervals' + wait-seconds-before-first-polling: '1' + min-interval-seconds: '3' + skip-same-workflow: 'false' + wait-list: | + [ + { + "workflowFile": "outputs.yml", + "jobName": "echo", + "optional": false + } + ] + - name: Make sure there is the file + run: | + ls -alh '${{ steps.wait.outputs.dump }}' + - name: Upload dumps as an artifact + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + with: + name: 'outputs-${{ github.job }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}' + path: '${{ steps.wait.outputs.dump }}' diff --git a/README.md b/README.md index b4ef9675..451b65b1 100644 --- a/README.md +++ b/README.md @@ -89,6 +89,14 @@ permissions: actions: read ``` +## outputs. + +(Since v3.4.0) + +- `dump`\ + A file path for collected resources which keeps fields than logged.\ + This data is only provided for debugging purposes, so the schema is not defined. + ## Examples I'm using this action for auto-merging bot PRs and wait for deploy.\ diff --git a/action.yml b/action.yml index 3f983bb9..3caf6267 100644 --- a/action.yml +++ b/action.yml @@ -45,6 +45,12 @@ inputs: description: 'Avoid http requests for tests' required: false default: 'false' +outputs: + dump: + description: | + A file path for collected resources which keeps fields than logged. + This data is only provided for debugging purposes, so the schema is not defined. + runs: using: 'node20' main: 'dist/index.js' diff --git a/dist/index.js b/dist/index.js index 14876255..98c962cb 100644 --- a/dist/index.js +++ b/dist/index.js @@ -726,7 +726,7 @@ var require_tunnel = __commonJS({ connectOptions.headers = connectOptions.headers || {}; connectOptions.headers["Proxy-Authorization"] = "Basic " + new Buffer(connectOptions.proxyAuth).toString("base64"); } - debug3("making CONNECT request"); + debug("making CONNECT request"); var connectReq = self.request(connectOptions); connectReq.useChunkedEncodingByDefault = false; connectReq.once("response", onResponse); @@ -746,7 +746,7 @@ var require_tunnel = __commonJS({ connectReq.removeAllListeners(); socket.removeAllListeners(); if (res.statusCode !== 200) { - debug3( + debug( "tunneling socket could not be established, statusCode=%d", res.statusCode ); @@ -758,7 +758,7 @@ var require_tunnel = __commonJS({ return; } if (head.length > 0) { - debug3("got illegal response body from proxy"); + debug("got illegal response body from proxy"); socket.destroy(); var error2 = new Error("got illegal response body from proxy"); error2.code = "ECONNRESET"; @@ -766,13 +766,13 @@ var require_tunnel = __commonJS({ self.removeSocket(placeholder); return; } - debug3("tunneling connection has established"); + debug("tunneling connection has established"); self.sockets[self.sockets.indexOf(placeholder)] = socket; return cb(socket); } function onError(cause) { connectReq.removeAllListeners(); - debug3( + debug( "tunneling socket could not be established, cause=%s\n", cause.message, cause.stack @@ -834,9 +834,9 @@ var require_tunnel = __commonJS({ } return target; } - var debug3; + var debug; if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug3 = function() { + debug = function() { var args = Array.prototype.slice.call(arguments); if (typeof args[0] === "string") { args[0] = "TUNNEL: " + args[0]; @@ -846,10 +846,10 @@ var require_tunnel = __commonJS({ console.error.apply(console, args); }; } else { - debug3 = function() { + debug = function() { }; } - exports.debug = debug3; + exports.debug = debug; } }); @@ -18878,7 +18878,7 @@ var require_core = __commonJS({ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); } exports.getBooleanInput = getBooleanInput2; - function setOutput(name, value) { + function setOutput2(name, value) { const filePath = process.env["GITHUB_OUTPUT"] || ""; if (filePath) { return file_command_1.issueFileCommand("OUTPUT", file_command_1.prepareKeyValueMessage(name, value)); @@ -18886,7 +18886,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(os2.EOL); command_1.issueCommand("set-output", { name }, utils_1.toCommandValue(value)); } - exports.setOutput = setOutput; + exports.setOutput = setOutput2; function setCommandEcho(enabled) { command_1.issue("echo", enabled ? "on" : "off"); } @@ -18896,14 +18896,14 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); error2(message); } exports.setFailed = setFailed2; - function isDebug3() { + function isDebug() { return process.env["RUNNER_DEBUG"] === "1"; } - exports.isDebug = isDebug3; - function debug3(message) { + exports.isDebug = isDebug; + function debug(message) { command_1.issueCommand("debug", {}, message); } - exports.debug = debug3; + exports.debug = debug; function error2(message, properties = {}) { command_1.issueCommand("error", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } @@ -31107,8 +31107,11 @@ var Options = z2.object({ path: ["initialDuration", "waitList"] } ); +var Path = z2.string().min(1); // src/input.ts +import { mkdtempSync } from "fs"; +import { join } from "path"; function parseInput() { const { repo, @@ -31125,12 +31128,11 @@ function parseInput() { if (typeof prSha === "string") { commitSha = prSha; } else { - if ((0, import_core.isDebug)()) { - (0, import_core.debug)(JSON.stringify({ label: "PullRequestContext", pr: pr2 }, null, 2)); - } (0, import_core.error)("github context has unexpected format: missing context.payload.pull_request.head.sha"); } } + const tempRoot = Path.parse(process.env["RUNNER_TEMP"]); + const tempDir = mkdtempSync(join(tempRoot, "wait-other-jobs-")); const waitSecondsBeforeFirstPolling = parseInt( (0, import_core.getInput)("wait-seconds-before-first-polling", { required: true, trimWhitespace: true }), 10 @@ -31161,7 +31163,7 @@ function parseInput() { const trigger = { ...repo, ref: commitSha, runId, jobName: job, eventName }; const githubToken = (0, import_core.getInput)("github-token", { required: true, trimWhitespace: false }); (0, import_core.setSecret)(githubToken); - return { trigger, options, githubToken }; + return { trigger, options, githubToken, tempDir }; } // node_modules/.pnpm/universal-user-agent@7.0.2/node_modules/universal-user-agent/index.js @@ -32346,7 +32348,7 @@ async function fetchChecks(token, trigger) { } // src/report.ts -import { join, relative } from "path"; +import { join as join2, relative } from "path"; // src/util.ts function groupBy(items, callback) { @@ -32400,7 +32402,7 @@ function summarize(check, trigger) { } function getSummaries(checks, trigger) { return checks.map((check) => summarize(check, trigger)).toSorted( - (a2, b2) => join(a2.workflowBasename, a2.jobName).localeCompare(join(b2.workflowBasename, b2.jobName)) + (a2, b2) => join2(a2.workflowBasename, a2.jobName).localeCompare(join2(b2.workflowBasename, b2.jobName)) ); } function seekWaitList(summaries, waitList, elapsed) { @@ -32532,6 +32534,8 @@ function getInterval(method, leastInterval, attempts) { } // src/main.ts +import { join as join3 } from "path"; +import { writeFileSync } from "fs"; function colorize(severity, message) { switch (severity) { case "error": { @@ -32555,8 +32559,9 @@ function colorize(severity, message) { async function run() { const startedAt = performance.now(); (0, import_core3.startGroup)("Parameters"); - const { trigger, options, githubToken } = parseInput(); + const { trigger, options, githubToken, tempDir } = parseInput(); (0, import_core3.info)(JSON.stringify( + // Do NOT include payload { trigger, startedAt, @@ -32572,6 +32577,8 @@ async function run() { if (options.isDryRun) { return; } + const dumper = { trigger, options, results: {} }; + const dumpFile = join3(tempDir, "dump.json"); for (; ; ) { attempts += 1; if (attempts > options.attemptLimits) { @@ -32589,15 +32596,15 @@ async function run() { const elapsed = mr.Duration.from({ milliseconds: Math.ceil(performance.now() - startedAt) }); (0, import_core3.startGroup)(`Polling ${attempts}: ${(/* @__PURE__ */ new Date()).toISOString()} # total elapsed ${readableDuration(elapsed)}`); const checks = await fetchChecks(githubToken, trigger); - if ((0, import_core3.isDebug)()) { - (0, import_core3.debug)(JSON.stringify({ label: "rawdata", checks, elapsed }, null, 2)); - } const report = generateReport( getSummaries(checks, trigger), trigger, elapsed, options ); + if (attempts === 1) { + dumper.results[attempts] = { elapsed, checks, report }; + } for (const summary of report.summaries) { const { runStatus, @@ -32613,9 +32620,6 @@ async function run() { `${workflowBasename}(${colorize(severity, jobName)}): [eventName: ${eventName}][runStatus: ${runStatus}][runConclusion: ${runConclusion ?? nullStr}][runURL: ${checkRunUrl}]` ); } - if ((0, import_core3.isDebug)()) { - (0, import_core3.debug)(JSON.stringify({ label: "filtered", report }, null, 2)); - } const { ok, done, logs } = report; for (const { severity, message, resource } of logs) { (0, import_core3.info)(colorize(severity, message)); @@ -32637,6 +32641,9 @@ async function run() { } (0, import_core3.endGroup)(); if (shouldStop) { + if (attempts !== 1) { + dumper.results[attempts] = { elapsed, checks, report }; + } if (ok) { (0, import_core3.info)(colorize("notice", "all jobs passed")); } else { @@ -32645,6 +32652,9 @@ async function run() { break; } } + writeFileSync(dumpFile, JSON.stringify(dumper, null, 2)); + (0, import_core3.setOutput)("dump", dumpFile); + (0, import_core3.info)(colorize("info", `Resources are saved in ${dumpFile}`)); } void run(); /*! Bundled license information: diff --git a/snapshots/README.md b/snapshots/README.md new file mode 100644 index 00000000..ed309f6a --- /dev/null +++ b/snapshots/README.md @@ -0,0 +1,4 @@ +# How to get snapshot of resources + +Since v3.4.0, it dumps data and outputs the path\ +See the section in [README](../README.md#outputsoutput_id) diff --git a/snapshots/run-9281068681/README.md b/snapshots/run-9281068681/README.md index 680ec0a7..17b30837 100644 --- a/snapshots/run-9281068681/README.md +++ b/snapshots/run-9281068681/README.md @@ -1,5 +1,7 @@ # How to extract checks from debug log +(This note is outdated) + Source: https://github.com/kachick/wait-other-jobs/actions/runs/9281068681 This file recorded snapshot checks for GH-820 in the debug log diff --git a/src/input.ts b/src/input.ts index ac5e0305..ae89ba67 100644 --- a/src/input.ts +++ b/src/input.ts @@ -1,9 +1,11 @@ -import { debug, getInput, getBooleanInput, setSecret, isDebug, error } from '@actions/core'; +import { getInput, getBooleanInput, setSecret, error } from '@actions/core'; import { context } from '@actions/github'; -import { Durationable, Options, Trigger } from './schema.ts'; +import { Durationable, Options, Path, Trigger } from './schema.ts'; +import { mkdtempSync } from 'fs'; +import { join } from 'path'; -export function parseInput(): { trigger: Trigger; options: Options; githubToken: string } { +export function parseInput(): { trigger: Trigger; options: Options; githubToken: string; tempDir: string } { const { repo, payload, @@ -19,13 +21,12 @@ export function parseInput(): { trigger: Trigger; options: Options; githubToken: if (typeof prSha === 'string') { commitSha = prSha; } else { - if (isDebug()) { - // Do not print secret even for debug code - debug(JSON.stringify({ label: 'PullRequestContext', pr }, null, 2)); - } error('github context has unexpected format: missing context.payload.pull_request.head.sha'); } } + // Do not use `tmpdir` from `node:os` in action: See https://github.com/actions/toolkit/issues/518 + const tempRoot = Path.parse(process.env['RUNNER_TEMP']); + const tempDir = mkdtempSync(join(tempRoot, 'wait-other-jobs-')); const waitSecondsBeforeFirstPolling = parseInt( getInput('wait-seconds-before-first-polling', { required: true, trimWhitespace: true }), @@ -62,5 +63,5 @@ export function parseInput(): { trigger: Trigger; options: Options; githubToken: const githubToken = getInput('github-token', { required: true, trimWhitespace: false }); setSecret(githubToken); - return { trigger, options, githubToken }; + return { trigger, options, githubToken, tempDir }; } diff --git a/src/main.ts b/src/main.ts index 80df1a16..26fa0921 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,4 +1,4 @@ -import { debug, info, setFailed, isDebug, startGroup, endGroup } from '@actions/core'; +import { info, setFailed, startGroup, endGroup, setOutput } from '@actions/core'; import styles from 'ansi-styles'; function colorize(severity: Severity, message: string): string { @@ -24,15 +24,33 @@ function colorize(severity: Severity, message: string): string { import { parseInput } from './input.ts'; import { fetchChecks } from './github-api.ts'; -import { Severity, generateReport, getSummaries, readableDuration } from './report.ts'; +import { Report, Severity, generateReport, getSummaries, readableDuration } from './report.ts'; import { getInterval, wait } from './wait.ts'; import { Temporal } from 'temporal-polyfill'; +import { Check, Options, Trigger } from './schema.ts'; +import { join } from 'path'; +import { writeFileSync } from 'fs'; + +interface Result { + elapsed: Temporal.Duration; + checks: Check[]; + report: Report; +} + +// `payload` is intentionally omitted for now: https://github.com/kachick/wait-other-jobs/pull/832#discussion_r1625952633 +interface Dumper { + trigger: Trigger; + options: Options; + // - Do not include all pollings in one file, it might be large size + results: Record; +} async function run(): Promise { const startedAt = performance.now(); startGroup('Parameters'); - const { trigger, options, githubToken } = parseInput(); + const { trigger, options, githubToken, tempDir } = parseInput(); info(JSON.stringify( + // Do NOT include payload { trigger, startedAt, @@ -50,6 +68,10 @@ async function run(): Promise { return; } + // - Do not include secret even in debug mode + const dumper: Dumper = { trigger, options, results: {} }; + const dumpFile = join(tempDir, 'dump.json'); + for (;;) { attempts += 1; if (attempts > options.attemptLimits) { @@ -70,9 +92,7 @@ async function run(): Promise { const elapsed = Temporal.Duration.from({ milliseconds: Math.ceil(performance.now() - startedAt) }); startGroup(`Polling ${attempts}: ${(new Date()).toISOString()} # total elapsed ${readableDuration(elapsed)}`); const checks = await fetchChecks(githubToken, trigger); - if (isDebug()) { - debug(JSON.stringify({ label: 'rawdata', checks, elapsed }, null, 2)); - } + const report = generateReport( getSummaries(checks, trigger), trigger, @@ -80,6 +100,10 @@ async function run(): Promise { options, ); + if (attempts === 1) { + dumper.results[attempts] = { elapsed: elapsed, checks, report }; + } + for (const summary of report.summaries) { const { runStatus, @@ -101,10 +125,6 @@ async function run(): Promise { ); } - if (isDebug()) { - debug(JSON.stringify({ label: 'filtered', report }, null, 2)); - } - const { ok, done, logs } = report; for (const { severity, message, resource } of logs) { @@ -130,6 +150,10 @@ async function run(): Promise { endGroup(); if (shouldStop) { + if (attempts !== 1) { + dumper.results[attempts] = { elapsed, checks, report }; + } + if (ok) { info(colorize('notice', 'all jobs passed')); } else { @@ -139,6 +163,10 @@ async function run(): Promise { break; } } + + writeFileSync(dumpFile, JSON.stringify(dumper, null, 2)); + setOutput('dump', dumpFile); + info(colorize('info', `Resources are saved in ${dumpFile}`)); } void run(); diff --git a/src/schema.ts b/src/schema.ts index 8857ad79..e3d4172c 100644 --- a/src/schema.ts +++ b/src/schema.ts @@ -122,6 +122,8 @@ export const Options = z.object({ }, ); +export const Path = z.string().min(1); + export type Options = z.infer; export interface Trigger {