Support uploading to FlakeHub

This commit is contained in:
Eelco Dolstra 2023-12-04 22:47:15 +01:00
parent a04e6275a6
commit e27fd4f93e
5 changed files with 85 additions and 32 deletions

View file

@ -4,6 +4,10 @@ branding:
color: "purple" color: "purple"
description: "Free, no-configuration Nix cache. Cut CI time by 50% or more by caching to GitHub Actions' cache." description: "Free, no-configuration Nix cache. Cut CI time by 50% or more by caching to GitHub Actions' cache."
inputs: inputs:
use-gha-cache:
description: "Whether to upload build results to the GitHub Actions cache."
default: true
required: false
listen: listen:
description: The host and port to listen on. description: The host and port to listen on.
default: 127.0.0.1:37515 default: 127.0.0.1:37515
@ -31,6 +35,16 @@ inputs:
diagnostic-endpoint: diagnostic-endpoint:
description: "Diagnostic endpoint url where diagnostics and performance data is sent. To disable set this to an empty string." description: "Diagnostic endpoint url where diagnostics and performance data is sent. To disable set this to an empty string."
default: "https://install.determinate.systems/magic-nix-cache/perf" default: "https://install.determinate.systems/magic-nix-cache/perf"
use-flakehub:
description: "Whether to upload build results to FlakeHub."
default: true
required: false
attic-server:
description: "The Attic binary cache server."
default: "https://attic-test.fly.dev"
flakehub-api-server:
description: "The FlakeHub API server."
default: "https://api.flakehub.com"
runs: runs:
using: "node16" using: "node16"

BIN
bin/X64-Linux Executable file

Binary file not shown.

BIN
bun.lockb

Binary file not shown.

50
dist/index.js generated vendored
View file

@ -3,10 +3,11 @@ import * as os$2 from 'node:os';
import os__default from 'node:os'; import os__default from 'node:os';
import * as path$1 from 'node:path'; import * as path$1 from 'node:path';
import { spawn } from 'node:child_process'; import { spawn } from 'node:child_process';
import { openSync, writeSync, close, createWriteStream } from 'node:fs'; import { openSync, createWriteStream } from 'node:fs';
import { pipeline } from 'node:stream/promises'; import { pipeline } from 'node:stream/promises';
import { setTimeout as setTimeout$1 } from 'timers/promises'; import { setTimeout as setTimeout$1 } from 'timers/promises';
import { promisify as promisify$1, inspect } from 'node:util'; import { promisify as promisify$1, inspect } from 'node:util';
import require$$0$5, { fileURLToPath } from 'url';
import require$$0 from 'os'; import require$$0 from 'os';
import require$$1 from 'fs'; import require$$1 from 'fs';
import crypto from 'crypto'; import crypto from 'crypto';
@ -33,7 +34,6 @@ import { checkServerIdentity } from 'node:tls';
import https$4 from 'node:https'; import https$4 from 'node:https';
import { lookup, V4MAPPED, ALL, ADDRCONFIG, promises } from 'node:dns'; import { lookup, V4MAPPED, ALL, ADDRCONFIG, promises } from 'node:dns';
import require$$3 from 'http2'; import require$$3 from 'http2';
import require$$0$5 from 'url';
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
@ -12147,6 +12147,15 @@ async function fetchAutoCacher(destination) {
coreExports.debug(`Fetching the Magic Nix Cache from ${binary_url}`); coreExports.debug(`Fetching the Magic Nix Cache from ${binary_url}`);
return pipeline(gotClient.stream(binary_url), stream); return pipeline(gotClient.stream(binary_url), stream);
} }
async function fileExists(path) {
try {
await fs$2.access(path, fs$2.constants.F_OK);
return true;
}
catch (err) {
return false;
}
}
async function setUpAutoCache() { async function setUpAutoCache() {
const tmpdir = process.env['RUNNER_TEMP'] || os$2.tmpdir(); const tmpdir = process.env['RUNNER_TEMP'] || os$2.tmpdir();
const required_env = ['ACTIONS_CACHE_URL', 'ACTIONS_RUNTIME_URL', 'ACTIONS_RUNTIME_TOKEN']; const required_env = ['ACTIONS_CACHE_URL', 'ACTIONS_RUNTIME_URL', 'ACTIONS_RUNTIME_TOKEN'];
@ -12162,8 +12171,11 @@ async function setUpAutoCache() {
} }
coreExports.debug(`GitHub Action Cache URL: ${process.env['ACTIONS_CACHE_URL']}`); coreExports.debug(`GitHub Action Cache URL: ${process.env['ACTIONS_CACHE_URL']}`);
const daemonDir = await fs$2.mkdtemp(path$1.join(tmpdir, 'magic-nix-cache-')); const daemonDir = await fs$2.mkdtemp(path$1.join(tmpdir, 'magic-nix-cache-'));
var daemonBin; const __filename = fileURLToPath(import.meta.url);
if (coreExports.getInput('source-binary')) { const __dirname = path$1.dirname(__filename);
var daemonBin = path$1.join(__dirname, "../bin/X64-Linux");
if (await fileExists(daemonBin)) ;
else if (coreExports.getInput('source-binary')) {
daemonBin = coreExports.getInput('source-binary'); daemonBin = coreExports.getInput('source-binary');
} }
else { else {
@ -12181,35 +12193,41 @@ async function setUpAutoCache() {
else { else {
runEnv = process.env; runEnv = process.env;
} }
const output = openSync(`${daemonDir}/parent.log`, 'a'); const outputPath = `${daemonDir}/parent.log`;
const output = openSync(outputPath, 'a');
const launch = spawn(daemonBin, [ const launch = spawn(daemonBin, [
'--daemon-dir', daemonDir, '--daemon-dir', daemonDir,
'--listen', coreExports.getInput('listen'), '--listen', coreExports.getInput('listen'),
'--upstream', coreExports.getInput('upstream-cache'), '--upstream', coreExports.getInput('upstream-cache'),
'--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint') '--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint'),
], { '--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
].concat(coreExports.getInput('use-flakehub') === 'true' ? [
'--use-flakehub',
'--attic-server', coreExports.getInput('attic-server'),
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
'--flakehub-api-server-netrc', path$1.join(process.env['RUNNER_TEMP'], 'determinate-nix-installer-netrc'),
] : []).concat(coreExports.getInput('use-gha-cache') === 'true' ? [
'--use-gha-cache'
] : []), {
stdio: ['ignore', output, output], stdio: ['ignore', output, output],
env: runEnv env: runEnv
}); });
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
launch.on('exit', (code, signal) => { launch.on('exit', async (code, signal) => {
const log = await fs$2.readFile(outputPath, 'utf-8');
console.log(log);
if (signal) { if (signal) {
reject(new Error(`Daemon was killed by signal ${signal}`)); reject(new Error(`Daemon was killed by signal ${signal}: ${log}`));
} }
else if (code) { else if (code) {
reject(new Error(`Daemon exited with code ${code}`)); reject(new Error(`Daemon exited with code ${code}: ${log}`));
} }
else { else {
resolve(); resolve();
} }
}); });
}); });
await fs$2.mkdir(`${process.env["HOME"]}/.config/nix`, { recursive: true }); coreExports.info('Launched Magic Nix Cache');
const nixConf = openSync(`${process.env["HOME"]}/.config/nix/nix.conf`, 'a');
writeSync(nixConf, `${"\n"}extra-substituters = http://${coreExports.getInput('listen')}/?trusted=1&compression=zstd&parallel-compression=true${"\n"}`);
writeSync(nixConf, `fallback = true${"\n"}`);
close(nixConf);
coreExports.debug('Launched Magic Nix Cache');
coreExports.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir); coreExports.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir);
} }
async function notifyAutoCache() { async function notifyAutoCache() {

View file

@ -4,10 +4,11 @@ import * as fs from 'node:fs/promises';
import * as os from 'node:os'; import * as os from 'node:os';
import * as path from 'node:path'; import * as path from 'node:path';
import { spawn } from 'node:child_process'; import { spawn } from 'node:child_process';
import { createWriteStream, openSync, writeSync, close } from 'node:fs'; import { createWriteStream, openSync } from 'node:fs';
import { pipeline } from 'node:stream/promises'; import { pipeline } from 'node:stream/promises';
import { setTimeout } from 'timers/promises'; import { setTimeout } from 'timers/promises';
import { inspect } from 'node:util'; import { inspect } from 'node:util';
import { fileURLToPath } from 'url';
import * as core from '@actions/core'; import * as core from '@actions/core';
import { Tail } from 'tail'; import { Tail } from 'tail';
@ -74,6 +75,15 @@ async function fetchAutoCacher(destination: string) {
); );
} }
async function fileExists(path: string) {
try {
await fs.access(path, fs.constants.F_OK);
return true;
} catch (err) {
return false;
}
}
async function setUpAutoCache() { async function setUpAutoCache() {
const tmpdir = process.env['RUNNER_TEMP'] || os.tmpdir(); const tmpdir = process.env['RUNNER_TEMP'] || os.tmpdir();
const required_env = ['ACTIONS_CACHE_URL', 'ACTIONS_RUNTIME_URL', 'ACTIONS_RUNTIME_TOKEN']; const required_env = ['ACTIONS_CACHE_URL', 'ACTIONS_RUNTIME_URL', 'ACTIONS_RUNTIME_TOKEN'];
@ -94,8 +104,12 @@ async function setUpAutoCache() {
const daemonDir = await fs.mkdtemp(path.join(tmpdir, 'magic-nix-cache-')); const daemonDir = await fs.mkdtemp(path.join(tmpdir, 'magic-nix-cache-'));
var daemonBin: string; const __filename = fileURLToPath(import.meta.url);
if (core.getInput('source-binary')) { const __dirname = path.dirname(__filename);
var daemonBin: string = path.join(__dirname, "../bin/X64-Linux");
if (await fileExists(daemonBin)) {
} else if (core.getInput('source-binary')) {
daemonBin = core.getInput('source-binary'); daemonBin = core.getInput('source-binary');
} else { } else {
daemonBin = `${daemonDir}/magic-nix-cache`; daemonBin = `${daemonDir}/magic-nix-cache`;
@ -113,15 +127,26 @@ async function setUpAutoCache() {
runEnv = process.env; runEnv = process.env;
} }
const output = openSync(`${daemonDir}/parent.log`, 'a'); const outputPath = `${daemonDir}/parent.log`;
const output = openSync(outputPath, 'a');
const launch = spawn( const launch = spawn(
daemonBin, daemonBin,
[ [
'--daemon-dir', daemonDir, '--daemon-dir', daemonDir,
'--listen', core.getInput('listen'), '--listen', core.getInput('listen'),
'--upstream', core.getInput('upstream-cache'), '--upstream', core.getInput('upstream-cache'),
'--diagnostic-endpoint', core.getInput('diagnostic-endpoint') '--diagnostic-endpoint', core.getInput('diagnostic-endpoint'),
], '--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
].concat(
core.getInput('use-flakehub') === 'true' ? [
'--use-flakehub',
'--attic-server', core.getInput('attic-server'),
'--flakehub-api-server', core.getInput('flakehub-api-server'),
'--flakehub-api-server-netrc', path.join(process.env['RUNNER_TEMP'], 'determinate-nix-installer-netrc'),
] : []).concat(
core.getInput('use-gha-cache') === 'true' ? [
'--use-gha-cache'
] : []),
{ {
stdio: ['ignore', output, output], stdio: ['ignore', output, output],
env: runEnv env: runEnv
@ -129,24 +154,20 @@ async function setUpAutoCache() {
); );
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
launch.on('exit', (code, signal) => { launch.on('exit', async (code, signal) => {
const log: string = await fs.readFile(outputPath, 'utf-8');
console.log(log);
if (signal) { if (signal) {
reject(new Error(`Daemon was killed by signal ${signal}`)); reject(new Error(`Daemon was killed by signal ${signal}: ${log}`));
} else if (code) { } else if (code) {
reject(new Error(`Daemon exited with code ${code}`)); reject(new Error(`Daemon exited with code ${code}: ${log}`));
} else { } else {
resolve(); resolve();
} }
}); });
}); });
await fs.mkdir(`${process.env["HOME"]}/.config/nix`, { recursive: true }); core.info('Launched Magic Nix Cache');
const nixConf = openSync(`${process.env["HOME"]}/.config/nix/nix.conf`, 'a');
writeSync(nixConf, `${"\n"}extra-substituters = http://${core.getInput('listen')}/?trusted=1&compression=zstd&parallel-compression=true${"\n"}`);
writeSync(nixConf, `fallback = true${"\n"}`);
close(nixConf);
core.debug('Launched Magic Nix Cache');
core.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir); core.exportVariable(ENV_CACHE_DAEMONDIR, daemonDir);
} }