mirror of
https://github.com/DeterminateSystems/magic-nix-cache-action.git
synced 2024-12-23 13:32:03 +01:00
Merge pull request #39 from DeterminateSystems/handle-boolean-values
Better Boolean handling and debug statement
This commit is contained in:
commit
e734a0ac4a
3 changed files with 50 additions and 33 deletions
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
|
@ -41,6 +41,8 @@ jobs:
|
||||||
permissions:
|
permissions:
|
||||||
id-token: "write"
|
id-token: "write"
|
||||||
contents: "read"
|
contents: "read"
|
||||||
|
env:
|
||||||
|
ACTIONS_STEP_DEBUG: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
|
@ -61,6 +63,8 @@ jobs:
|
||||||
permissions:
|
permissions:
|
||||||
id-token: "write"
|
id-token: "write"
|
||||||
contents: "read"
|
contents: "read"
|
||||||
|
env:
|
||||||
|
ACTIONS_STEP_DEBUG: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
|
@ -82,6 +86,8 @@ jobs:
|
||||||
permissions:
|
permissions:
|
||||||
id-token: "write"
|
id-token: "write"
|
||||||
contents: "read"
|
contents: "read"
|
||||||
|
env:
|
||||||
|
ACTIONS_STEP_DEBUG: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
|
|
20
dist/index.js
generated
vendored
20
dist/index.js
generated
vendored
|
@ -12211,26 +12211,32 @@ async function setUpAutoCache() {
|
||||||
const output = openSync(outputPath, 'a');
|
const output = openSync(outputPath, 'a');
|
||||||
const log = tailLog(daemonDir);
|
const log = tailLog(daemonDir);
|
||||||
const netrc = await netrcPath();
|
const netrc = await netrcPath();
|
||||||
// Start the server. Once it is ready, it will notify us via the notification server.
|
const nixConfPath = `${process.env["HOME"]}/.config/nix/nix.conf`;
|
||||||
const daemon = spawn(daemonBin, [
|
const daemonCliFlags = [
|
||||||
'--startup-notification-url', `http://127.0.0.1:${notifyPort}`,
|
'--startup-notification-url', `http://127.0.0.1:${notifyPort}`,
|
||||||
'--listen', coreExports.getInput('listen'),
|
'--listen', coreExports.getInput('listen'),
|
||||||
'--upstream', coreExports.getInput('upstream-cache'),
|
'--upstream', coreExports.getInput('upstream-cache'),
|
||||||
'--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint'),
|
'--diagnostic-endpoint', coreExports.getInput('diagnostic-endpoint'),
|
||||||
'--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
|
'--nix-conf', nixConfPath
|
||||||
].concat(coreExports.getInput('use-flakehub') === 'true' ? [
|
].concat(coreExports.getBooleanInput('use-flakehub') ? [
|
||||||
'--use-flakehub',
|
'--use-flakehub',
|
||||||
'--flakehub-cache-server', coreExports.getInput('flakehub-cache-server'),
|
'--flakehub-cache-server', coreExports.getInput('flakehub-cache-server'),
|
||||||
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
|
'--flakehub-api-server', coreExports.getInput('flakehub-api-server'),
|
||||||
'--flakehub-api-server-netrc', netrc,
|
'--flakehub-api-server-netrc', netrc,
|
||||||
'--flakehub-flake-name', coreExports.getInput('flakehub-flake-name'),
|
'--flakehub-flake-name', coreExports.getInput('flakehub-flake-name'),
|
||||||
] : []).concat(coreExports.getInput('use-gha-cache') === 'true' ? [
|
] : []).concat(coreExports.getBooleanInput('use-gha-cache') ? [
|
||||||
'--use-gha-cache'
|
'--use-gha-cache'
|
||||||
] : []), {
|
] : []);
|
||||||
|
const opts = {
|
||||||
stdio: ['ignore', output, output],
|
stdio: ['ignore', output, output],
|
||||||
env: runEnv,
|
env: runEnv,
|
||||||
detached: true
|
detached: true
|
||||||
});
|
};
|
||||||
|
// Display the final command for debugging purposes
|
||||||
|
coreExports.debug("Full daemon start command:");
|
||||||
|
coreExports.debug(`${daemonBin} ${daemonCliFlags.join(" ")}`);
|
||||||
|
// Start the server. Once it is ready, it will notify us via the notification server.
|
||||||
|
const daemon = spawn(daemonBin, daemonCliFlags, opts);
|
||||||
const pidFile = path$1.join(daemonDir, 'daemon.pid');
|
const pidFile = path$1.join(daemonDir, 'daemon.pid');
|
||||||
await fs$2.writeFile(pidFile, `${daemon.pid}`);
|
await fs$2.writeFile(pidFile, `${daemon.pid}`);
|
||||||
coreExports.info("Waiting for magic-nix-cache to start...");
|
coreExports.info("Waiting for magic-nix-cache to start...");
|
||||||
|
|
29
src/index.ts
29
src/index.ts
|
@ -3,7 +3,7 @@
|
||||||
import * as fs from 'node:fs/promises';
|
import * as fs from 'node:fs/promises';
|
||||||
import * as os from 'node:os';
|
import * as os from 'node:os';
|
||||||
import * as path from 'node:path';
|
import * as path from 'node:path';
|
||||||
import { spawn, exec } from 'node:child_process';
|
import { spawn, exec, SpawnOptions } from 'node:child_process';
|
||||||
import { openSync, readFileSync } from 'node:fs';
|
import { openSync, readFileSync } from 'node:fs';
|
||||||
import { inspect, promisify } from 'node:util';
|
import { inspect, promisify } from 'node:util';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -145,33 +145,38 @@ async function setUpAutoCache() {
|
||||||
const output = openSync(outputPath, 'a');
|
const output = openSync(outputPath, 'a');
|
||||||
const log = tailLog(daemonDir);
|
const log = tailLog(daemonDir);
|
||||||
const netrc = await netrcPath();
|
const netrc = await netrcPath();
|
||||||
|
const nixConfPath = `${process.env["HOME"]}/.config/nix/nix.conf`;
|
||||||
|
|
||||||
// Start the server. Once it is ready, it will notify us via the notification server.
|
const daemonCliFlags: string[] = [
|
||||||
const daemon = spawn(
|
|
||||||
daemonBin,
|
|
||||||
[
|
|
||||||
'--startup-notification-url', `http://127.0.0.1:${notifyPort}`,
|
'--startup-notification-url', `http://127.0.0.1:${notifyPort}`,
|
||||||
'--listen', core.getInput('listen'),
|
'--listen', core.getInput('listen'),
|
||||||
'--upstream', core.getInput('upstream-cache'),
|
'--upstream', core.getInput('upstream-cache'),
|
||||||
'--diagnostic-endpoint', core.getInput('diagnostic-endpoint'),
|
'--diagnostic-endpoint', core.getInput('diagnostic-endpoint'),
|
||||||
'--nix-conf', `${process.env["HOME"]}/.config/nix/nix.conf`
|
'--nix-conf', nixConfPath
|
||||||
].concat(
|
].concat(
|
||||||
core.getInput('use-flakehub') === 'true' ? [
|
core.getBooleanInput('use-flakehub') ? [
|
||||||
'--use-flakehub',
|
'--use-flakehub',
|
||||||
'--flakehub-cache-server', core.getInput('flakehub-cache-server'),
|
'--flakehub-cache-server', core.getInput('flakehub-cache-server'),
|
||||||
'--flakehub-api-server', core.getInput('flakehub-api-server'),
|
'--flakehub-api-server', core.getInput('flakehub-api-server'),
|
||||||
'--flakehub-api-server-netrc', netrc,
|
'--flakehub-api-server-netrc', netrc,
|
||||||
'--flakehub-flake-name', core.getInput('flakehub-flake-name'),
|
'--flakehub-flake-name', core.getInput('flakehub-flake-name'),
|
||||||
] : []).concat(
|
] : []).concat(
|
||||||
core.getInput('use-gha-cache') === 'true' ? [
|
core.getBooleanInput('use-gha-cache') ? [
|
||||||
'--use-gha-cache'
|
'--use-gha-cache'
|
||||||
] : []),
|
] : []);
|
||||||
{
|
|
||||||
|
const opts: SpawnOptions = {
|
||||||
stdio: ['ignore', output, output],
|
stdio: ['ignore', output, output],
|
||||||
env: runEnv,
|
env: runEnv,
|
||||||
detached: true
|
detached: true
|
||||||
}
|
};
|
||||||
);
|
|
||||||
|
// Display the final command for debugging purposes
|
||||||
|
core.debug("Full daemon start command:");
|
||||||
|
core.debug(`${daemonBin} ${daemonCliFlags.join(" ")}`);
|
||||||
|
|
||||||
|
// Start the server. Once it is ready, it will notify us via the notification server.
|
||||||
|
const daemon = spawn(daemonBin, daemonCliFlags, opts);
|
||||||
|
|
||||||
const pidFile = path.join(daemonDir, 'daemon.pid');
|
const pidFile = path.join(daemonDir, 'daemon.pid');
|
||||||
await fs.writeFile(pidFile, `${daemon.pid}`);
|
await fs.writeFile(pidFile, `${daemon.pid}`);
|
||||||
|
|
Loading…
Reference in a new issue