Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci(update): port update.sh to nodejs #1368

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 0 additions & 126 deletions functions.sh
Original file line number Diff line number Diff line change
@@ -153,44 +153,6 @@ function get_versions() {
fi
}

function is_alpine() {
local variant
variant=${1}
shift

if [ "${variant}" = "${variant#alpine}" ]; then
return 1
fi
}

function is_debian() {
local variant
variant=$1
shift

IFS=' ' read -ra debianVersions <<< "$(get_config "./" "debian_versions")"
for d in "${debianVersions[@]}"; do
if [ "${d}" = "${variant}" ]; then
return 0
fi
done
return 1
}

function is_debian_slim() {
local variant
variant=$1
shift

IFS=' ' read -ra debianVersions <<< "$(get_config "./" "debian_versions")"
for d in "${debianVersions[@]}"; do
if [ "${d}-slim" = "${variant}" ]; then
return 0
fi
done
return 1
}

function get_fork_name() {
local version
version=$1
@@ -202,24 +164,6 @@ function get_fork_name() {
fi
}

function get_full_tag() {
local variant
local tag
local full_tag
variant="$1"
shift
tag="$1"
shift
if [ -z "${variant}" ]; then
full_tag="${tag}"
elif [ "${variant}" = "default" ]; then
full_tag="${tag}"
else
full_tag="${tag}-${variant}"
fi
echo "${full_tag}"
}

function get_full_version() {
local version
version=$1
@@ -246,25 +190,6 @@ function get_major_minor_version() {
echo "$(echo "${fullversion}" | cut -d'.' -f1).$(echo "${fullversion}" | cut -d'.' -f2)"
}

function get_path() {
local version
local variant
local path
version="$1"
shift
variant="$1"
shift

if [ -z "${variant}" ]; then
path="${version}/${variant}"
elif [ "${variant}" = "default" ]; then
path="${version}"
else
path="${version}/${variant}"
fi
echo "${path}"
}

function get_tag() {
local version
version=$1
@@ -308,54 +233,3 @@ function sort_versions() {

echo "${sorted[@]}"
}

function commit_range() {
local commit_id_end=${1}
shift
local commit_id_start=${1}

if [ -z "${commit_id_start}" ]; then
if [ -z "${commit_id_end}" ]; then
echo "HEAD~1..HEAD"
elif [[ "${commit_id_end}" =~ .. ]]; then
echo "${commit_id_end}"
else
echo "${commit_id_end}~1..${commit_id_end}"
fi
else
echo "${commit_id_end}..${commit_id_start}"
fi
}

function images_updated() {
local commit_range
local versions
local images_changed

commit_range="$(commit_range "$@")"

IFS=' ' read -ra versions <<< "$(
IFS=','
get_versions
)"
images_changed=$(git diff --name-only "${commit_range}" "${versions[@]}")

if [ -z "${images_changed}" ]; then
return 1
fi
return 0
}

function tests_updated() {
local commit_range
local test_changed

commit_range="$(commit_range "$@")"

test_changed=$(git diff --name-only "${commit_range}" test*)

if [ -z "${test_changed}" ]; then
return 1
fi
return 0
}
23 changes: 2 additions & 21 deletions genMatrix.js
Original file line number Diff line number Diff line change
@@ -1,31 +1,15 @@
'use strict';
const path = require('path');
const fs = require('fs');
const { getAllDockerfiles, getDockerfileNodeVersion } = require('./utils');

const testFiles = [
'genMatrix.js',
'.github/workflows/build-test.yml',
];

const nodeDirRegex = /^\d+$/;

const areTestFilesChanged = (changedFiles) => changedFiles
.some((file) => testFiles.includes(file));

// Returns a list of the child directories in the given path
const getChildDirectories = (parent) => fs.readdirSync(parent, { withFileTypes: true })
.filter((dirent) => dirent.isDirectory())
.map(({ name }) => path.resolve(parent, name));

const getNodeVerionDirs = (base) => getChildDirectories(base)
.filter((childPath) => nodeDirRegex.test(path.basename(childPath)));

// Returns the paths of Dockerfiles that are at: base/*/Dockerfile
const getDockerfilesInChildDirs = (base) => getChildDirectories(base)
.map((childDir) => path.resolve(childDir, 'Dockerfile'));

const getAllDockerfiles = (base) => getNodeVerionDirs(base).flatMap(getDockerfilesInChildDirs);

const getAffectedDockerfiles = (filesAdded, filesModified, filesRenamed) => {
const files = [
...filesAdded,
@@ -52,13 +36,10 @@ const getAffectedDockerfiles = (filesAdded, filesModified, filesRenamed) => {
];
};

const getFullNodeVersionFromDockerfile = (file) => fs.readFileSync(file, 'utf8')
.match(/^ENV NODE_VERSION (\d*\.*\d*\.\d*)/m)[1];

const getDockerfileMatrixEntry = (file) => {
const [variant] = path.dirname(file).split(path.sep).slice(-1);

const version = getFullNodeVersionFromDockerfile(file);
const version = getDockerfileNodeVersion(file);

return {
version,
62 changes: 62 additions & 0 deletions update.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
#!/usr/bin/env node
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

does this work on windows? requiring node update.js is probably better?

'use strict';
const update = require('./updateLib');

const usage = `
Update the node docker images.
Usage:
./update.js [ OPTIONS ]
OPTIONS:
-h, --help\tthis message
-a, --all\tupdate all images even if no node version update`;

const printUsage = () => {
console.log(usage);
};

const runUpdate = async (updateAll) => {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the Yarn part would be a separate parameter here, defaulting to true

const updated = await update(updateAll);

updated.forEach(({ file }) => {
console.log('Updated', file);
});

if (!updated.length) {
console.log('Nothing updated');
}
};

const main = async () => {
if (process.argv.length > 3) {
printUsage();
process.exit(1);
}

if (process.argv.length === 2) {
await runUpdate(false);
return;
}

switch (process.argv[2]) {
case '-a':
case '--all':
await runUpdate(true);
return;

case '-h':
case '--help':
printUsage();
return;

default:
printUsage();
process.exit(1);
}
};

main().catch((e) => {
console.error(e);
process.exit(1);
});
235 changes: 0 additions & 235 deletions update.sh

This file was deleted.

177 changes: 177 additions & 0 deletions updateLib.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
'use strict';
const https = require('https');
const path = require('path');
const { readFileSync, writeFileSync } = require('fs');
const { getAllDockerfiles, getDockerfileNodeVersion } = require('./utils');

const releaseUrl = 'https://nodejs.org/dist/index.json';
const yarnVersionUrl = 'https://classic.yarnpkg.com/latest-version';

const templates = Object.freeze({
alpine: 1,
debian: 2,
debianSlim: 3,
});

const templateFileMap = Object.freeze({
[templates.alpine]: 'Dockerfile-alpine.template',
[templates.debian]: 'Dockerfile-debian.template',
[templates.debianSlim]: 'Dockerfile-slim.template',
});

const templateRepoMap = Object.freeze({
[templates.alpine]: 'alpine',
[templates.debian]: 'buildpack-deps',
[templates.debianSlim]: 'debian',
});

const fetchText = (url) => new Promise((resolve, reject) => {
https.get(url, (res) => {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should use the builtin fetch

const { statusCode } = res;

if (statusCode < 200 || statusCode >= 300) {
// Consume response data to free up memory
res.resume();
reject(new Error(`Request Failed.\nStatus Code: ${statusCode}`));
return;
}

res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk;
});

res.on('end', () => {
resolve(rawData);
});
}).on('error', (e) => {
reject(e);
}).end();
});

const fetchJson = async (url) => {
const text = await fetchText(url);
return JSON.parse(text);
};

// nodeVersions is sorted
const getLatestNodeVersion = (nodeVersions, majorVersion) => nodeVersions
.find((version) => version.startsWith(`${majorVersion}.`));

const getTemplate = (variant) => {
if (variant.startsWith('alpine')) {
return templates.alpine;
}

if (variant.endsWith('-slim')) {
return templates.debianSlim;
}

return templates.debian;
};

const getDockerfileMetadata = (nodeVersions, file) => {
const [nodeMajorVersion, variant] = path.dirname(file).split(path.sep).slice(-2);
const fileNodeVersion = getDockerfileNodeVersion(file);

return {
file,
variant,
fileNodeVersion,
nodeMajorVersion,
latestVersion: getLatestNodeVersion(nodeVersions, nodeMajorVersion),
template: getTemplate(variant),
};
};

const isDockerfileOutdated = ({ fileNodeVersion, latestVersion }) => fileNodeVersion
!== latestVersion;

const fetchLatestNodeVersions = async () => {
const nodeDist = await fetchJson(releaseUrl);
return nodeDist.map(({ version }) => version.substring(1));
};

const findOutdated = async (updateAll) => {
const nodeVersions = await fetchLatestNodeVersions();

const dockerfileMetadatas = getAllDockerfiles(__dirname)
.map((file) => getDockerfileMetadata(nodeVersions, file));

return updateAll
? dockerfileMetadatas
: dockerfileMetadatas.filter(isDockerfileOutdated);
};

const getKeys = (basename) => readFileSync(path.resolve(__dirname, 'keys', basename))
.toString().trim().split('\n');

const readTemplate = (template) => readFileSync(
path.resolve(__dirname, templateFileMap[template]),
).toString();

const getBaseImage = ({ template, variant }) => {
const tag = template === templates.alpine
? variant.replace(/alpine/, '')
: variant;

return `${templateRepoMap[template]}:${tag}`;
};

const formatKeys = (keys) => keys.map((key) => `$1${key} \\`).join('\n');

const formatTemplate = (yarnVersion, nodeKeys, yarnKeys, muslChecksum, base, metadata) => {
const { latestVersion, template, nodeMajorVersion } = metadata;
const baseImage = getBaseImage(metadata);
const initialFormat = base.replace(/^FROM.+$/m, `FROM ${baseImage}`)
.replace(/^ENV NODE_VERSION .+$/m, `ENV NODE_VERSION ${latestVersion}`)
.replace(/^ENV YARN_VERSION .+$/m, `ENV YARN_VERSION ${yarnVersion}`)
.replace(/^(\s*)"\${NODE_KEYS\[@]}".*$/m, formatKeys(nodeKeys))
.replace(/^(\s*)"\${YARN_KEYS\[@]}".*$/m, formatKeys(yarnKeys));

if (template !== templates.alpine) {
return initialFormat;
}

const pythonVersion = parseInt(nodeMajorVersion, 10) < 14
? 'python2'
: 'python3';

return initialFormat.replace(/\${PYTHON_VERSION}/m, pythonVersion)
.replace(/CHECKSUM=CHECKSUM_x64/m, `CHECKSUM="${muslChecksum}"`);
};

const fetchMuslChecksum = async (nodeVersion) => {
const checksums = await fetchText(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there a way to handle/wait for the checksum to be available? Just thinking more in a CI way, for the auto-update PR flow

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the checksum isn't available, an action using this should fail since fetchText will reject on non 2XX status codes. Is that sufficient? I could instead have it periodically poll that site but that would run into the action time limit of 6 hours. Any thoughts?

`https://unofficial-builds.nodejs.org/download/release/v${nodeVersion}/SHASUMS256.txt`,
);
return checksums.match(/(\S+)\s+\S+-linux-x64-musl.tar.xz/m)[1];
};

const updateDockerfile = async (yarnVersion, nodeKeys, yarnKeys, metadata) => {
const { file, template, latestVersion } = metadata;
const base = readTemplate(template);
const muslChecksum = await fetchMuslChecksum(latestVersion);

const formatted = formatTemplate(yarnVersion, nodeKeys, yarnKeys, muslChecksum, base, metadata);
writeFileSync(file, formatted);
};

const updateDockerfiles = async (outdated) => {
const yarnVersion = await fetchText(yarnVersionUrl);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think here, if it is a security update, it would read the existing Dockerfile, instead of doing the fetch

const nodeKeys = getKeys('node.keys');
const yarnKeys = getKeys('yarn.keys');

await Promise.all(
outdated.map((metadata) => updateDockerfile(yarnVersion, nodeKeys, yarnKeys, metadata)),
);
};

const update = async (updateAll) => {
const outdated = await findOutdated(updateAll);
await updateDockerfiles(outdated);
return outdated;
};

module.exports = update;
27 changes: 27 additions & 0 deletions utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
'use strict';
const path = require('path');
const { readFileSync, readdirSync } = require('fs');

const nodeDirRegex = /^\d+$/;

// Returns a list of the child directories in the given path
const getChildDirectories = (parent) => readdirSync(parent, { withFileTypes: true })
.filter((dirent) => dirent.isDirectory())
.map(({ name }) => path.resolve(parent, name));

const getNodeVersionDirs = (base) => getChildDirectories(base)
.filter((childPath) => nodeDirRegex.test(path.basename(childPath)));

// Returns the paths of Dockerfiles that are at: base/*/Dockerfile
const getDockerfilesInChildDirs = (base) => getChildDirectories(base)
.map((childDir) => path.resolve(childDir, 'Dockerfile'));

const getAllDockerfiles = (base) => getNodeVersionDirs(base).flatMap(getDockerfilesInChildDirs);

const getDockerfileNodeVersion = (file) => readFileSync(file, 'utf8')
.match(/^ENV NODE_VERSION (\d*\.*\d*\.\d*)/m)[1];

module.exports = {
getAllDockerfiles,
getDockerfileNodeVersion,
};