2023-06-26 05:18:41 +02:00
|
|
|
// Main
|
|
|
|
|
|
|
|
import * as fs from 'node:fs/promises';
|
|
|
|
import * as os from 'node:os';
|
|
|
|
import * as path from 'node:path';
|
|
|
|
import { spawn } from 'node:child_process';
|
2023-12-04 22:47:15 +01:00
|
|
|
import { createWriteStream, openSync } from 'node:fs';
|
2023-06-26 05:18:41 +02:00
|
|
|
import { pipeline } from 'node:stream/promises';
|
|
|
|
import { setTimeout } from 'timers/promises';
|
2023-06-27 18:22:21 +02:00
|
|
|
import { inspect } from 'node:util';
|
2023-06-26 05:18:41 +02:00
|
|
|
|
|
|
|
import * as core from '@actions/core';
|
|
|
|
import { Tail } from 'tail';
|
|
|
|
import got from "got";
|
|
|
|
|
|
|
|
const ENV_CACHE_DAEMONDIR = 'MAGIC_NIX_CACHE_DAEMONDIR';
|
|
|
|
|
2023-06-27 18:22:21 +02:00
|
|
|
const gotClient = got.extend({
|
|
|
|
retry: {
|
|
|
|
limit: 5,
|
|
|
|
methods: [ 'POST', 'GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE' ],
|
|
|
|
},
|
|
|
|
hooks: {
|
|
|
|
beforeRetry: [
|
|
|
|
(error, retryCount) => {
|
|
|
|
core.info(`Retrying after error ${error.code}, retry #: ${retryCount}`);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
|
2023-06-26 05:18:41 +02:00
|
|
|
function getCacherUrl() : string {
|
|
|
|
const runnerArch = process.env.RUNNER_ARCH;
|
|
|
|
const runnerOs = process.env.RUNNER_OS;
|
|
|
|
const binarySuffix = `${runnerArch}-${runnerOs}`;
|
2023-12-11 15:29:18 +01:00
|
|
|
const urlPrefix = `https://magic-nix-cache-priv20231208150408868500000001.s3.us-east-2.amazonaws.com`;
|
2023-06-26 05:18:41 +02:00
|
|
|
|
|
|
|
if (core.getInput('source-url')) {
|
|
|
|
return core.getInput('source-url');
|
|
|
|
}
|
|
|
|
|
|
|
|
if (core.getInput('source-tag')) {
|
|
|
|
return `${urlPrefix}/tag/${core.getInput('source-tag')}/${binarySuffix}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (core.getInput('source-pr')) {
|
2023-12-11 15:29:18 +01:00
|
|
|
return `${urlPrefix}/pr_${core.getInput('source-pr')}/magic-nix-cache-${binarySuffix}`;
|
2023-06-26 05:18:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (core.getInput('source-branch')) {
|
|
|
|
return `${urlPrefix}/branch/${core.getInput('source-branch')}/${binarySuffix}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (core.getInput('source-revision')) {
|
|
|
|
return `${urlPrefix}/rev/${core.getInput('source-revision')}/${binarySuffix}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
return `${urlPrefix}/latest/${binarySuffix}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function fetchAutoCacher(destination: string) {
|
|
|
|
const stream = createWriteStream(destination, {
|
|
|
|
encoding: "binary",
|
|
|
|
mode: 0o755,
|
|
|
|
});
|
|
|
|
|
|
|
|
const binary_url = getCacherUrl();
|
2023-12-11 15:29:18 +01:00
|
|
|
core.info(`Fetching the Magic Nix Cache from ${binary_url}`);
|
2023-06-26 05:18:41 +02:00
|
|
|
|
|
|
|
return pipeline(
|
2023-06-27 18:22:21 +02:00
|
|
|
gotClient.stream(binary_url),
|
2023-06-26 05:18:41 +02:00
|
|
|
stream
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function setUpAutoCache() {
|
|
|
|
const tmpdir = process.env['RUNNER_TEMP'] || os.tmpdir();
|
|
|
|
const required_env = ['ACTIONS_CACHE_URL', 'ACTIONS_RUNTIME_URL', 'ACTIONS_RUNTIME_TOKEN'];
|
|
|
|
|
|
|
|
var anyMissing = false;
|
|
|
|
for (const n of required_env) {
|
|
|
|
if (!process.env.hasOwnProperty(n)) {
|
|
|
|
anyMissing = true;
|
|
|
|
core.warning(`Disabling automatic caching since required environment ${n} isn't available`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (anyMissing) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
core.debug(`GitHub Action Cache URL: ${process.env['ACTIONS_CACHE_URL']}`);
|
|
|
|
|
|
|
|
const daemonDir = await fs.mkdtemp(path.join(tmpdir, 'magic-nix-cache-'));
|
|
|
|
|
2023-12-11 15:29:18 +01:00
|
|
|
var daemonBin: string;
|
|
|
|
if (core.getInput('source-binary')) {
|
2023-06-26 05:18:41 +02:00
|
|
|
daemonBin = core.getInput('source-binary');
|
|
|
|
} else {
|
|
|
|
daemonBin = `${daemonDir}/magic-nix-cache`;
|
|
|
|
await fetchAutoCacher(daemonBin);
|
|
|
|
}
|
|
|
|
|
|
|
|
var runEnv;
|
|
|
|
if (core.isDebug()) {
|
|
|
|
runEnv = {
|
2023-06-26 18:27:45 +02:00
|
|
|
RUST_LOG: "trace,magic_nix_cache=debug,gha_cache=debug",
|
2023-06-26 05:18:41 +02:00
|
|
|
RUST_BACKTRACE: "full",
|
|
|
|
...process.env
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
runEnv = process.env;
|
|
|
|
}
|
|
|
|
|
2023-12-04 22:47:15 +01:00
|
|
|
const outputPath = `${daemonDir}/parent.log`;
|
|
|
|
const output = openSync(outputPath, 'a');
|
2023-06-26 05:18:41 +02:00
|
|
|
const launch = spawn(
|
|
|
|
daemonBin,
|
|
|
|
[
|
|
|
|
'--daemon-dir', daemonDir,
|
|
|
|
'--listen', core.getInput('listen'),
|
|
|
|
'--upstream', core.getInput('upstream-cache'),
|
2023-12-04 22:47:15 +01:00
|
|
|
'--diagnostic-endpoint', core.getInput('diagnostic-endpoint'),
|
|
|
|
'--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
|
|
|
|
].concat(
|
|
|
|
core.getInput('use-flakehub') === 'true' ? [
|
|
|
|
'--use-flakehub',
|
2023-12-11 13:46:39 +01:00
|
|
|
'--flakehub-cache-server', core.getInput('flakehub-cache-server'),
|
2023-12-04 22:47:15 +01:00
|
|
|
'--flakehub-api-server', core.getInput('flakehub-api-server'),
|
|
|
|
'--flakehub-api-server-netrc', path.join(process.env['RUNNER_TEMP'], 'determinate-nix-installer-netrc'),
|
|
|
|
] : []).concat(
|
|
|
|
core.getInput('use-gha-cache') === 'true' ? [
|
|
|
|
'--use-gha-cache'
|
|
|
|
] : []),
|
2023-06-26 05:18:41 +02:00
|
|
|
{
|
|
|
|
stdio: ['ignore', output, output],
|
|
|
|
env: runEnv
|
|
|
|
}
|
|
|
|
);
|
|
|
|
|
|
|
|
await new Promise<void>((resolve, reject) => {
|
2023-12-04 22:47:15 +01:00
|
|
|
launch.on('exit', async (code, signal) => {
|
|
|
|
const log: string = await fs.readFile(outputPath, 'utf-8');
|
|
|
|
console.log(log);
|
2023-06-26 05:18:41 +02:00
|
|
|
if (signal) {
|
2023-12-04 22:47:15 +01:00
|
|
|
reject(new Error(`Daemon was killed by signal ${signal}: ${log}`));
|
2023-06-26 05:18:41 +02:00
|
|
|
} else if (code) {
|
2023-12-04 22:47:15 +01:00
|
|
|
reject(new Error(`Daemon exited with code ${code}: ${log}`));
|
2023-06-26 05:18:41 +02:00
|
|
|
} else {
|
|
|
|
resolve();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2023-12-04 22:47:15 +01:00
|
|
|
core.info('Launched Magic Nix Cache');
|
2023-06-26 05:18:41 +02:00
|
|
|
core.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function notifyAutoCache() {
|
|
|
|
const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
|
|
|
|
|
|
|
|
if (!daemonDir) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-06-27 18:22:21 +02:00
|
|
|
try {
|
|
|
|
core.debug(`Indicating workflow start`);
|
|
|
|
const res: any = await gotClient.post(`http://${core.getInput('listen')}/api/workflow-start`).json();
|
|
|
|
core.debug(`back from post`);
|
|
|
|
core.debug(res);
|
|
|
|
} catch (e) {
|
|
|
|
core.info(`Error marking the workflow as started:`);
|
|
|
|
core.info(inspect(e));
|
|
|
|
core.info(`Magic Nix Cache may not be running for this workflow.`);
|
|
|
|
}
|
2023-06-26 05:18:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async function tearDownAutoCache() {
|
|
|
|
const daemonDir = process.env[ENV_CACHE_DAEMONDIR];
|
|
|
|
|
|
|
|
if (!daemonDir) {
|
|
|
|
core.debug('magic-nix-cache not started - Skipping');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const pidFile = path.join(daemonDir, 'daemon.pid');
|
|
|
|
const pid = parseInt(await fs.readFile(pidFile, { encoding: 'ascii' }));
|
|
|
|
core.debug(`found daemon pid: ${pid}`);
|
|
|
|
if (!pid) {
|
|
|
|
throw new Error("magic-nix-cache did not start successfully");
|
|
|
|
}
|
|
|
|
|
|
|
|
const log = new Tail(path.join(daemonDir, 'daemon.log'));
|
|
|
|
core.debug(`tailing daemon.log...`);
|
|
|
|
log.on('line', (line) => {
|
|
|
|
core.debug(`got a log line`);
|
|
|
|
core.info(line);
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
core.debug(`about to post to localhost`);
|
2023-06-27 18:22:21 +02:00
|
|
|
const res: any = await gotClient.post(`http://${core.getInput('listen')}/api/workflow-finish`).json();
|
2023-06-26 05:18:41 +02:00
|
|
|
core.debug(`back from post`);
|
|
|
|
core.debug(res);
|
|
|
|
} finally {
|
|
|
|
await setTimeout(5000);
|
|
|
|
|
|
|
|
core.debug(`unwatching the daemon log`);
|
|
|
|
log.unwatch();
|
|
|
|
}
|
|
|
|
|
|
|
|
core.debug(`killing`);
|
|
|
|
try {
|
|
|
|
process.kill(pid, 'SIGTERM');
|
|
|
|
} catch (e) {
|
|
|
|
if (e.code !== 'ESRCH') {
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const isPost = !!process.env['STATE_isPost'];
|
|
|
|
|
|
|
|
try {
|
|
|
|
if (!isPost) {
|
|
|
|
core.saveState('isPost', 'true');
|
|
|
|
await setUpAutoCache();
|
|
|
|
await notifyAutoCache();
|
|
|
|
} else {
|
|
|
|
await tearDownAutoCache();
|
|
|
|
}
|
|
|
|
} catch (e) {
|
|
|
|
core.info(`got an exception:`);
|
|
|
|
core.info(e);
|
|
|
|
|
|
|
|
if (!isPost) {
|
|
|
|
core.setFailed(e.message);
|
|
|
|
throw e;
|
|
|
|
} else {
|
|
|
|
core.info("not considering this a failure: finishing the upload is optional, anyway.");
|
|
|
|
process.exit();
|
|
|
|
}}
|
|
|
|
|
|
|
|
core.debug(`rip`);
|
|
|
|
|