Compare commits
5 Commits
1346ae5405
...
a8f1c81b60
| Author | SHA1 | Date | |
|---|---|---|---|
|
a8f1c81b60
|
|||
|
249eaf3c11
|
|||
|
a79e20c417
|
|||
|
bd9eb638af
|
|||
|
d09b84676c
|
11
.lazy.lua
11
.lazy.lua
@@ -3,16 +3,5 @@ vim.lsp.config("biome", {
|
||||
on_dir(vim.fn.getcwd())
|
||||
end
|
||||
})
|
||||
vim.lsp.config("rust_analyzer", {
|
||||
settings = {
|
||||
["rust-analyzer"] = {
|
||||
cargo = {
|
||||
features = {
|
||||
"daemon"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {}
|
||||
|
||||
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -1956,6 +1956,7 @@ dependencies = [
|
||||
"nix-compat",
|
||||
"nix-js-macros",
|
||||
"nix-nar",
|
||||
"num_enum",
|
||||
"petgraph",
|
||||
"regex",
|
||||
"reqwest",
|
||||
|
||||
@@ -4,15 +4,11 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
build = "build.rs"
|
||||
|
||||
[features]
|
||||
default = ["daemon"]
|
||||
daemon = ["dep:tokio", "dep:nix-compat"]
|
||||
|
||||
[dependencies]
|
||||
mimalloc = "0.1"
|
||||
|
||||
tokio = { version = "1.41", features = ["rt-multi-thread", "sync", "net", "io-util"], optional = true }
|
||||
nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", features = ["wire", "async"], optional = true }
|
||||
tokio = { version = "1.41", features = ["rt-multi-thread", "sync", "net", "io-util"] }
|
||||
nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", features = ["wire", "async"] }
|
||||
|
||||
# REPL
|
||||
anyhow = "1.0"
|
||||
@@ -63,6 +59,7 @@ rowan = "0.15"
|
||||
|
||||
nix-js-macros = { path = "../nix-js-macros" }
|
||||
ere = "0.2.4"
|
||||
num_enum = "0.7.5"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
|
||||
@@ -167,11 +167,16 @@ export const coerceToString = (
|
||||
if ("outPath" in v) {
|
||||
// Recursively coerce the outPath value
|
||||
const outPath = coerceToString(v.outPath, mode, copyToStore, outContext);
|
||||
if ("type" in v && v.type === "derivation" && "drvPath" in v) {
|
||||
const drvPath = force(v.drvPath);
|
||||
if (typeof drvPath === "string" && outContext) {
|
||||
if ("type" in v && v.type === "derivation" && "drvPath" in v && outContext) {
|
||||
const drvPathValue = force(v.drvPath);
|
||||
const drvPathStr = isStringWithContext(drvPathValue)
|
||||
? drvPathValue.value
|
||||
: typeof drvPathValue === "string"
|
||||
? drvPathValue
|
||||
: null;
|
||||
if (drvPathStr) {
|
||||
const outputName = "outputName" in v ? String(force(v.outputName)) : "out";
|
||||
addBuiltContext(outContext, drvPath, outputName);
|
||||
addBuiltContext(outContext, drvPathStr, outputName);
|
||||
}
|
||||
}
|
||||
return outPath;
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import type { NixValue, NixAttrs } from "../types";
|
||||
import { forceStringValue, forceList } from "../type-assert";
|
||||
import { forceStringValue, forceList, forceStringNoCtx } from "../type-assert";
|
||||
import { force, createThunk } from "../thunk";
|
||||
import { type DerivationData, type OutputInfo, generateAterm } from "../derivation-helpers";
|
||||
import {
|
||||
type DerivationData,
|
||||
type OutputInfo,
|
||||
generateAterm,
|
||||
generateAtermModulo,
|
||||
} from "../derivation-helpers";
|
||||
import { coerceToString, StringCoercionMode } from "./conversion";
|
||||
import {
|
||||
type NixStringContext,
|
||||
@@ -14,6 +19,8 @@ import {
|
||||
import { nixValueToJson } from "../conversion";
|
||||
import { isNixPath } from "../types";
|
||||
|
||||
const drvHashCache = new Map<string, string>();
|
||||
|
||||
const forceAttrs = (value: NixValue): NixAttrs => {
|
||||
const forced = force(value);
|
||||
if (
|
||||
@@ -56,13 +63,7 @@ const validateSystem = (attrs: NixAttrs): string => {
|
||||
return forceStringValue(attrs.system);
|
||||
};
|
||||
|
||||
const extractOutputs = (attrs: NixAttrs): string[] => {
|
||||
if (!("outputs" in attrs)) {
|
||||
return ["out"];
|
||||
}
|
||||
const outputsList = forceList(attrs.outputs);
|
||||
const outputs = outputsList.map((o) => forceStringValue(o));
|
||||
|
||||
const validateOutputs = (outputs: string[]): void => {
|
||||
if (outputs.length === 0) {
|
||||
throw new Error("derivation: outputs list cannot be empty");
|
||||
}
|
||||
@@ -78,7 +79,34 @@ const extractOutputs = (attrs: NixAttrs): string[] => {
|
||||
}
|
||||
seen.add(output);
|
||||
}
|
||||
};
|
||||
|
||||
const extractOutputs = (attrs: NixAttrs, structuredAttrs: boolean): string[] => {
|
||||
if (!("outputs" in attrs)) {
|
||||
return ["out"];
|
||||
}
|
||||
|
||||
let outputs: string[];
|
||||
if (structuredAttrs) {
|
||||
const outputsList = forceList(attrs.outputs);
|
||||
outputs = outputsList.map((o) => forceStringValue(o));
|
||||
} else {
|
||||
const outputsStr = coerceToString(attrs.outputs, StringCoercionMode.ToString, false, new Set());
|
||||
outputs = outputsStr.split(/\s+/).filter((s) => s.length > 0);
|
||||
}
|
||||
|
||||
validateOutputs(outputs);
|
||||
return outputs;
|
||||
};
|
||||
|
||||
const extractOutputsForWrapper = (attrs: NixAttrs): string[] => {
|
||||
if (!("outputs" in attrs)) {
|
||||
return ["out"];
|
||||
}
|
||||
|
||||
// FIXME: trace context?
|
||||
const outputs = forceList(attrs.outputs).map(forceStringValue);
|
||||
validateOutputs(outputs);
|
||||
return outputs;
|
||||
};
|
||||
|
||||
@@ -94,20 +122,15 @@ const structuredAttrsExcludedKeys = new Set([
|
||||
"__structuredAttrs",
|
||||
"__ignoreNulls",
|
||||
"__contentAddressed",
|
||||
"impure",
|
||||
"__impure",
|
||||
"args",
|
||||
]);
|
||||
|
||||
const specialAttrs = new Set([
|
||||
"name",
|
||||
"builder",
|
||||
"system",
|
||||
"args",
|
||||
"outputs",
|
||||
"__structuredAttrs",
|
||||
"__ignoreNulls",
|
||||
"__contentAddressed",
|
||||
"impure",
|
||||
"__impure",
|
||||
]);
|
||||
|
||||
const sortedJsonStringify = (obj: Record<string, any>): string => {
|
||||
@@ -142,43 +165,43 @@ const extractEnv = (
|
||||
if (key === "allowedReferences") {
|
||||
console.warn(
|
||||
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
|
||||
`the derivation attribute 'allowedReferences'; use ` +
|
||||
`'outputChecks.<output>.allowedReferences' instead`
|
||||
`the derivation attribute 'allowedReferences'; use ` +
|
||||
`'outputChecks.<output>.allowedReferences' instead`,
|
||||
);
|
||||
}
|
||||
if (key === "allowedRequisites") {
|
||||
console.warn(
|
||||
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
|
||||
`the derivation attribute 'allowedRequisites'; use ` +
|
||||
`'outputChecks.<output>.allowedRequisites' instead`
|
||||
`the derivation attribute 'allowedRequisites'; use ` +
|
||||
`'outputChecks.<output>.allowedRequisites' instead`,
|
||||
);
|
||||
}
|
||||
if (key === "disallowedReferences") {
|
||||
console.warn(
|
||||
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
|
||||
`the derivation attribute 'disallowedReferences'; use ` +
|
||||
`'outputChecks.<output>.disallowedReferences' instead`
|
||||
`the derivation attribute 'disallowedReferences'; use ` +
|
||||
`'outputChecks.<output>.disallowedReferences' instead`,
|
||||
);
|
||||
}
|
||||
if (key === "disallowedRequisites") {
|
||||
console.warn(
|
||||
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
|
||||
`the derivation attribute 'disallowedRequisites'; use ` +
|
||||
`'outputChecks.<output>.disallowedRequisites' instead`
|
||||
`the derivation attribute 'disallowedRequisites'; use ` +
|
||||
`'outputChecks.<output>.disallowedRequisites' instead`,
|
||||
);
|
||||
}
|
||||
if (key === "maxSize") {
|
||||
console.warn(
|
||||
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
|
||||
`the derivation attribute 'maxSize'; use ` +
|
||||
`'outputChecks.<output>.maxSize' instead`
|
||||
`the derivation attribute 'maxSize'; use ` +
|
||||
`'outputChecks.<output>.maxSize' instead`,
|
||||
);
|
||||
}
|
||||
if (key === "maxClosureSize") {
|
||||
console.warn(
|
||||
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
|
||||
`the derivation attribute 'maxClosureSize'; use ` +
|
||||
`'outputChecks.<output>.maxClosureSize' instead`
|
||||
`the derivation attribute 'maxClosureSize'; use ` +
|
||||
`'outputChecks.<output>.maxClosureSize' instead`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -213,13 +236,13 @@ const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOut
|
||||
if (ignoreNulls && hashValue === null) {
|
||||
return null;
|
||||
}
|
||||
const hash = forceStringValue(attrs.outputHash);
|
||||
const hashRaw = forceStringNoCtx(attrs.outputHash);
|
||||
|
||||
let hashAlgo = "sha256";
|
||||
let hashAlgo = null;
|
||||
if ("outputHashAlgo" in attrs) {
|
||||
const algoValue = force(attrs.outputHashAlgo);
|
||||
if (!(ignoreNulls && algoValue === null)) {
|
||||
hashAlgo = forceStringValue(attrs.outputHashAlgo);
|
||||
hashAlgo = forceStringNoCtx(attrs.outputHashAlgo);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,7 +258,9 @@ const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOut
|
||||
throw new Error(`derivation: invalid outputHashMode '${hashMode}' (must be 'flat' or 'recursive')`);
|
||||
}
|
||||
|
||||
return { hash, hashAlgo, hashMode };
|
||||
const parsed = Deno.core.ops.op_parse_hash(hashRaw, hashAlgo);
|
||||
|
||||
return { hash: parsed.hex, hashAlgo: parsed.algo, hashMode };
|
||||
};
|
||||
|
||||
const validateFixedOutputConstraints = (fixedOutput: FixedOutputInfo | null, outputs: string[]) => {
|
||||
@@ -255,7 +280,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
||||
const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false;
|
||||
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false;
|
||||
|
||||
const outputs = extractOutputs(attrs);
|
||||
const outputs = extractOutputs(attrs, structuredAttrs);
|
||||
const fixedOutputInfo = extractFixedOutputInfo(attrs, ignoreNulls);
|
||||
validateFixedOutputConstraints(fixedOutputInfo, outputs);
|
||||
|
||||
@@ -263,21 +288,13 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
||||
throw new Error("ca derivations are not supported");
|
||||
}
|
||||
|
||||
if ("impure" in attrs && force(attrs.impure) === true) {
|
||||
if ("__impure" in attrs && force(attrs.__impure) === true) {
|
||||
throw new Error("impure derivations are not supported");
|
||||
}
|
||||
|
||||
const drvArgs = extractArgs(attrs, collectedContext);
|
||||
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName);
|
||||
|
||||
if (!structuredAttrs) {
|
||||
env.set("name", drvName);
|
||||
env.set("builder", builder);
|
||||
env.set("system", platform);
|
||||
if (outputs.length > 1 || outputs[0] !== "out") {
|
||||
env.set("outputs", outputs.join(" "));
|
||||
}
|
||||
}
|
||||
|
||||
const { inputDrvs, inputSrcs } = extractInputDrvsAndSrcs(collectedContext);
|
||||
|
||||
@@ -328,8 +345,11 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
||||
env,
|
||||
};
|
||||
const finalAterm = generateAterm(finalDrv);
|
||||
const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm);
|
||||
drvPath = Deno.core.ops.op_make_text_store_path(finalDrvHash, `${drvName}.drv`, collectDrvReferences());
|
||||
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
|
||||
|
||||
const fixedHashFingerprint = `fixed:out:${hashAlgoPrefix}${fixedOutputInfo.hashAlgo}:${fixedOutputInfo.hash}:${outPath}`;
|
||||
const fixedModuloHash = Deno.core.ops.op_sha256_hex(fixedHashFingerprint);
|
||||
drvHashCache.set(drvPath, fixedModuloHash);
|
||||
} else {
|
||||
const maskedOutputs = new Map<string, OutputInfo>(
|
||||
outputs.map((o) => [
|
||||
@@ -357,7 +377,16 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
||||
env: maskedEnv,
|
||||
};
|
||||
|
||||
const maskedAterm = generateAterm(maskedDrv);
|
||||
const inputDrvHashes = new Map<string, string>();
|
||||
for (const [drvPath, outputNames] of inputDrvs) {
|
||||
const cachedHash = drvHashCache.get(drvPath);
|
||||
if (!cachedHash) {
|
||||
throw new Error(`Missing modulo hash for input derivation: ${drvPath}`);
|
||||
}
|
||||
inputDrvHashes.set(cachedHash, Array.from(outputNames).join(","));
|
||||
}
|
||||
|
||||
const maskedAterm = generateAtermModulo(maskedDrv, inputDrvHashes);
|
||||
const drvModuloHash = Deno.core.ops.op_sha256_hex(maskedAterm);
|
||||
|
||||
outputInfos = new Map<string, OutputInfo>();
|
||||
@@ -378,9 +407,11 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
||||
env,
|
||||
};
|
||||
const finalAterm = generateAterm(finalDrv);
|
||||
const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm);
|
||||
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
|
||||
|
||||
drvPath = Deno.core.ops.op_make_text_store_path(finalDrvHash, `${drvName}.drv`, collectDrvReferences());
|
||||
const finalAtermModulo = generateAtermModulo(finalDrv, inputDrvHashes);
|
||||
const cachedModuloHash = Deno.core.ops.op_sha256_hex(finalAtermModulo);
|
||||
drvHashCache.set(drvPath, cachedModuloHash);
|
||||
}
|
||||
|
||||
const result: NixAttrs = {};
|
||||
@@ -401,7 +432,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
||||
export const derivation = (args: NixValue): NixAttrs => {
|
||||
const attrs = forceAttrs(args);
|
||||
|
||||
const outputs: string[] = extractOutputs(attrs);
|
||||
const outputs: string[] = extractOutputsForWrapper(attrs);
|
||||
|
||||
const strictThunk = createThunk(() => derivationStrict(args), "derivationStrict");
|
||||
|
||||
@@ -426,10 +457,7 @@ export const derivation = (args: NixValue): NixAttrs => {
|
||||
`output_${outputName}`,
|
||||
);
|
||||
}
|
||||
commonAttrs.all = createThunk(
|
||||
() => outputsList.map((o) => o.value),
|
||||
"all_outputs",
|
||||
);
|
||||
commonAttrs.all = createThunk(() => outputsList.map((o) => o.value), "all_outputs");
|
||||
commonAttrs.drvAttrs = attrs;
|
||||
|
||||
for (const { value: outputObj } of outputsList) {
|
||||
@@ -439,13 +467,9 @@ export const derivation = (args: NixValue): NixAttrs => {
|
||||
`output_${outputName}`,
|
||||
);
|
||||
}
|
||||
outputObj.all = createThunk(
|
||||
() => outputsList.map((o) => o.value),
|
||||
"all_outputs",
|
||||
);
|
||||
outputObj.all = createThunk(() => outputsList.map((o) => o.value), "all_outputs");
|
||||
outputObj.drvAttrs = attrs;
|
||||
}
|
||||
|
||||
return outputsList[0].value;
|
||||
};
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
* Functional programming builtin functions
|
||||
*/
|
||||
|
||||
import { CatchableError, HAS_CONTEXT, type NixValue } from "../types";
|
||||
import { CatchableError, type NixValue } from "../types";
|
||||
import { force } from "../thunk";
|
||||
import { coerceToString, StringCoercionMode } from "./conversion";
|
||||
import { printValue } from "../print";
|
||||
import { isAttrs } from "./type-check";
|
||||
|
||||
export const seq =
|
||||
(e1: NixValue) =>
|
||||
@@ -17,16 +18,25 @@ export const seq =
|
||||
export const deepSeq =
|
||||
(e1: NixValue) =>
|
||||
(e2: NixValue): NixValue => {
|
||||
const forced = force(e1);
|
||||
if (Array.isArray(forced)) {
|
||||
for (const val of forced) {
|
||||
deepSeq(val);
|
||||
const seen: Set<NixValue> = new Set();
|
||||
const recurse = (e: NixValue) => {
|
||||
if (!seen.has(e)) {
|
||||
seen.add(e);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
} else if (typeof forced === "object" && forced !== null && !(HAS_CONTEXT in forced)) {
|
||||
for (const [_, val] of Object.entries(forced)) {
|
||||
deepSeq(val);
|
||||
const forced = force(e);
|
||||
if (Array.isArray(forced)) {
|
||||
for (const val of forced) {
|
||||
recurse(val);
|
||||
}
|
||||
} else if (isAttrs(forced)) {
|
||||
for (const [_, val] of Object.entries(forced)) {
|
||||
recurse(val);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
recurse(e1);
|
||||
return e2;
|
||||
};
|
||||
|
||||
|
||||
@@ -193,7 +193,7 @@ export const builtins: any = {
|
||||
warn: mkPrimop(functional.warn, "warn", 2),
|
||||
break: mkPrimop(functional.breakFunc, "break", 1),
|
||||
|
||||
derivation: mkPrimop(derivation.derivation, "derivation", 1),
|
||||
derivation: undefined as any,
|
||||
derivationStrict: mkPrimop(derivation.derivationStrict, "derivationStrict", 1),
|
||||
|
||||
import: mkPrimop(io.importFunc, "import", 1),
|
||||
@@ -266,7 +266,7 @@ export const builtins: any = {
|
||||
storeDir: "INVALID_PATH",
|
||||
|
||||
__traceCaller: (e: NixValue) => {
|
||||
console.log(`traceCaller: ${getTos()}`)
|
||||
return e
|
||||
console.log(`traceCaller: ${getTos()}`);
|
||||
return e;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -3,7 +3,14 @@
|
||||
* Implemented via Rust ops exposed through deno_core
|
||||
*/
|
||||
|
||||
import { forceAttrs, forceBool, forceFunction, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
|
||||
import {
|
||||
forceAttrs,
|
||||
forceBool,
|
||||
forceFunction,
|
||||
forceList,
|
||||
forceStringNoCtx,
|
||||
forceStringValue,
|
||||
} from "../type-assert";
|
||||
import type { NixValue, NixAttrs, NixPath } from "../types";
|
||||
import { isNixPath, IS_PATH, CatchableError } from "../types";
|
||||
import { force } from "../thunk";
|
||||
@@ -11,7 +18,7 @@ import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
|
||||
import { getPathValue } from "../path";
|
||||
import type { NixStringContext, StringWithContext } from "../string-context";
|
||||
import { mkStringWithContext } from "../string-context";
|
||||
import { isPath } from "./type-check";
|
||||
import { isAttrs, isPath } from "./type-check";
|
||||
|
||||
const importCache = new Map<string, NixValue>();
|
||||
|
||||
@@ -132,22 +139,19 @@ const normalizeUrlInput = (
|
||||
|
||||
const normalizeTarballInput = (
|
||||
args: NixValue,
|
||||
): { url: string; hash?: string; narHash?: string; name?: string } => {
|
||||
): { url: string; sha256?: string; name?: string } => {
|
||||
const forced = force(args);
|
||||
if (typeof forced === "string") {
|
||||
return { url: forced };
|
||||
}
|
||||
const attrs = forceAttrs(args);
|
||||
const url = forceStringValue(attrs.url);
|
||||
const hash = "hash" in attrs ? forceStringValue(attrs.hash) : undefined;
|
||||
const narHash =
|
||||
"narHash" in attrs
|
||||
? forceStringValue(attrs.narHash)
|
||||
: "sha256" in attrs
|
||||
? forceStringValue(attrs.sha256)
|
||||
if (isAttrs(forced)) {
|
||||
const url = forceStringNoCtx(forced.url);
|
||||
const sha256 =
|
||||
"sha256" in forced
|
||||
? forceStringNoCtx(forced.sha256)
|
||||
: undefined;
|
||||
const name = "name" in attrs ? forceStringValue(attrs.name) : undefined;
|
||||
return { url, hash, narHash, name };
|
||||
const name = "name" in forced ? forceStringNoCtx(forced.name) : undefined;
|
||||
return { url, sha256, name };
|
||||
} else {
|
||||
return { url: forceStringNoCtx(forced) };
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchurl = (args: NixValue): string => {
|
||||
@@ -162,12 +166,11 @@ export const fetchurl = (args: NixValue): string => {
|
||||
};
|
||||
|
||||
export const fetchTarball = (args: NixValue): string => {
|
||||
const { url, hash, narHash, name } = normalizeTarballInput(args);
|
||||
const { url, name, sha256 } = normalizeTarballInput(args);
|
||||
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(
|
||||
url,
|
||||
hash ?? null,
|
||||
narHash ?? null,
|
||||
name ?? null,
|
||||
sha256 ?? null,
|
||||
);
|
||||
return result.store_path;
|
||||
};
|
||||
@@ -488,9 +491,8 @@ export const findFile =
|
||||
}
|
||||
|
||||
const resolvedPath = Deno.core.ops.op_resolve_path(pathVal, "");
|
||||
const candidatePath = suffix.length > 0
|
||||
? Deno.core.ops.op_resolve_path(suffix, resolvedPath)
|
||||
: resolvedPath;
|
||||
const candidatePath =
|
||||
suffix.length > 0 ? Deno.core.ops.op_resolve_path(suffix, resolvedPath) : resolvedPath;
|
||||
|
||||
if (Deno.core.ops.op_path_exists(candidatePath)) {
|
||||
return { [IS_PATH]: true, value: candidatePath };
|
||||
|
||||
@@ -5,7 +5,14 @@
|
||||
import { force } from "../thunk";
|
||||
import { CatchableError, ATTR_POSITIONS } from "../types";
|
||||
import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types";
|
||||
import { forceList, forceAttrs, forceFunction, forceStringValue, forceString, forceStringNoCtx } from "../type-assert";
|
||||
import {
|
||||
forceList,
|
||||
forceAttrs,
|
||||
forceFunction,
|
||||
forceStringValue,
|
||||
forceString,
|
||||
forceStringNoCtx,
|
||||
} from "../type-assert";
|
||||
import * as context from "./context";
|
||||
import { compareValues } from "../operators";
|
||||
import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check";
|
||||
@@ -242,7 +249,7 @@ export const parseDrvName = (s: NixValue): NixAttrs => {
|
||||
let name = fullName;
|
||||
let version = "";
|
||||
for (let i = 0; i < fullName.length; ++i) {
|
||||
if (fullName[i] === '-' && i + 1 < fullName.length && !/[a-zA-Z]/.test(fullName[i + 1])) {
|
||||
if (fullName[i] === "-" && i + 1 < fullName.length && !/[a-zA-Z]/.test(fullName[i + 1])) {
|
||||
name = fullName.substring(0, i);
|
||||
version = fullName.substring(i + 1);
|
||||
break;
|
||||
@@ -250,8 +257,8 @@ export const parseDrvName = (s: NixValue): NixAttrs => {
|
||||
}
|
||||
return {
|
||||
name,
|
||||
version
|
||||
}
|
||||
version,
|
||||
};
|
||||
};
|
||||
|
||||
export const parseFlakeName = (s: NixValue): never => {
|
||||
|
||||
@@ -43,9 +43,12 @@ export const escapeString = (s: string): string => {
|
||||
|
||||
const quoteString = (s: string): string => `"${s}"`;
|
||||
|
||||
const cmpByKey = <T>(a: [string, T], b: [string, T]): number =>
|
||||
a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
|
||||
|
||||
export const generateAterm = (drv: DerivationData): string => {
|
||||
const outputEntries: string[] = [];
|
||||
const sortedOutputs = Array.from(drv.outputs.entries()).sort();
|
||||
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
|
||||
for (const [name, info] of sortedOutputs) {
|
||||
outputEntries.push(
|
||||
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
|
||||
@@ -54,18 +57,51 @@ export const generateAterm = (drv: DerivationData): string => {
|
||||
const outputs = outputEntries.join(",");
|
||||
|
||||
const inputDrvEntries: string[] = [];
|
||||
for (const [drvPath, outputs] of drv.inputDrvs) {
|
||||
const outList = `[${Array.from(outputs).map(quoteString).join(",")}]`;
|
||||
const sortedInputDrvs = Array.from(drv.inputDrvs.entries()).sort(cmpByKey);
|
||||
for (const [drvPath, outputs] of sortedInputDrvs) {
|
||||
const sortedOuts = Array.from(outputs).sort();
|
||||
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
|
||||
inputDrvEntries.push(`(${quoteString(drvPath)},${outList})`);
|
||||
}
|
||||
const inputDrvs = inputDrvEntries.join(",");
|
||||
|
||||
const inputSrcs = Array.from(drv.inputSrcs).map(quoteString).join(",");
|
||||
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
|
||||
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
|
||||
|
||||
const args = drv.args.map(escapeString).join(",");
|
||||
const envs = Array.from(drv.env.entries())
|
||||
.sort()
|
||||
.sort(cmpByKey)
|
||||
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
|
||||
|
||||
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${quoteString(drv.builder)},[${args}],[${envs}])`;
|
||||
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
|
||||
};
|
||||
|
||||
export const generateAtermModulo = (drv: DerivationData, inputDrvHashes: Map<string, string>): string => {
|
||||
const outputEntries: string[] = [];
|
||||
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
|
||||
for (const [name, info] of sortedOutputs) {
|
||||
outputEntries.push(
|
||||
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
|
||||
);
|
||||
}
|
||||
const outputs = outputEntries.join(",");
|
||||
|
||||
const inputDrvEntries: string[] = [];
|
||||
const sortedInputDrvHashes = Array.from(inputDrvHashes.entries()).sort(cmpByKey);
|
||||
for (const [drvHash, outputs] of sortedInputDrvHashes) {
|
||||
const sortedOuts = outputs.split(",").sort();
|
||||
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
|
||||
inputDrvEntries.push(`(${quoteString(drvHash)},${outList})`);
|
||||
}
|
||||
const inputDrvs = inputDrvEntries.join(",");
|
||||
|
||||
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
|
||||
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
|
||||
|
||||
const args = drv.args.map(escapeString).join(",");
|
||||
const envs = Array.from(drv.env.entries())
|
||||
.sort(cmpByKey)
|
||||
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
|
||||
|
||||
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
|
||||
};
|
||||
|
||||
@@ -40,7 +40,7 @@ export const getTos = (): string => {
|
||||
const tos = callStack[callStack.length - 2];
|
||||
const { file, line, column } = Deno.core.ops.op_decode_span(tos.span);
|
||||
return `${tos.message} at ${file}:${line}:${column}`;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Push an error context onto the stack
|
||||
|
||||
@@ -4,7 +4,16 @@
|
||||
* All functionality is exported via the global `Nix` object
|
||||
*/
|
||||
|
||||
import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS, forceDeep, IS_CYCLE, forceShallow } from "./thunk";
|
||||
import {
|
||||
createThunk,
|
||||
force,
|
||||
isThunk,
|
||||
IS_THUNK,
|
||||
DEBUG_THUNKS,
|
||||
forceDeep,
|
||||
IS_CYCLE,
|
||||
forceShallow,
|
||||
} from "./thunk";
|
||||
import {
|
||||
select,
|
||||
selectWithDefault,
|
||||
|
||||
@@ -183,7 +183,9 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
|
||||
*
|
||||
* Context type handling:
|
||||
* - Opaque: Added to inputSrcs
|
||||
* - DrvDeep: Added to inputSrcs (entire derivation + all outputs)
|
||||
* - DrvDeep: Computes FS closure (like Nix's computeFSClosure) - adds all paths
|
||||
* in the dependency graph to inputSrcs, and all derivations with their
|
||||
* outputs to inputDrvs
|
||||
* - Built: Added to inputDrvs with specific output name
|
||||
*/
|
||||
export const extractInputDrvsAndSrcs = (
|
||||
@@ -198,9 +200,28 @@ export const extractInputDrvsAndSrcs = (
|
||||
case "opaque":
|
||||
inputSrcs.add(elem.path);
|
||||
break;
|
||||
case "drvDeep":
|
||||
inputSrcs.add(elem.drvPath);
|
||||
case "drvDeep": {
|
||||
const closure: {
|
||||
input_drvs: [string, string[]][];
|
||||
input_srcs: string[];
|
||||
} = Deno.core.ops.op_compute_fs_closure(elem.drvPath);
|
||||
|
||||
for (const src of closure.input_srcs) {
|
||||
inputSrcs.add(src);
|
||||
}
|
||||
|
||||
for (const [drvPath, outputs] of closure.input_drvs) {
|
||||
let existingOutputs = inputDrvs.get(drvPath);
|
||||
if (!existingOutputs) {
|
||||
existingOutputs = new Set<string>();
|
||||
inputDrvs.set(drvPath, existingOutputs);
|
||||
}
|
||||
for (const output of outputs) {
|
||||
existingOutputs.add(output);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "built": {
|
||||
let outputs = inputDrvs.get(elem.drvPath);
|
||||
if (!outputs) {
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import type { NixValue, NixThunkInterface, NixStrictValue } from "./types";
|
||||
import { HAS_CONTEXT } from "./string-context";
|
||||
import { IS_PATH } from "./types";
|
||||
import { isAttrs } from "./builtins/type-check";
|
||||
import { isAttrs, isList } from "./builtins/type-check";
|
||||
|
||||
/**
|
||||
* Symbol used to mark objects as thunks
|
||||
@@ -165,7 +165,9 @@ export const forceDeep = (value: NixValue, seen: WeakSet<object> = new WeakSet()
|
||||
}
|
||||
return CYCLE_MARKER;
|
||||
}
|
||||
seen.add(forced);
|
||||
if (isAttrs(forced) || isList(forced)) {
|
||||
seen.add(forced);
|
||||
}
|
||||
|
||||
if (HAS_CONTEXT in forced || IS_PATH in forced) {
|
||||
return forced;
|
||||
@@ -197,9 +199,9 @@ export const forceShallow = (value: NixValue): NixStrictValue => {
|
||||
return forced.map((item) => {
|
||||
const forcedItem = force(item);
|
||||
if (typeof forcedItem === "object" && forcedItem === forced) {
|
||||
return CYCLE_MARKER
|
||||
return CYCLE_MARKER;
|
||||
} else {
|
||||
return forcedItem
|
||||
return forcedItem;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -39,12 +39,7 @@ export const forceFunction = (value: NixValue): NixFunction => {
|
||||
if (isFunction(forced)) {
|
||||
return forced;
|
||||
}
|
||||
if (
|
||||
typeof forced === "object" &&
|
||||
!Array.isArray(forced) &&
|
||||
forced !== null &&
|
||||
"__functor" in forced
|
||||
) {
|
||||
if (typeof forced === "object" && !Array.isArray(forced) && forced !== null && "__functor" in forced) {
|
||||
const functorSet = forced as NixAttrs;
|
||||
const functor = forceFunction(functorSet.__functor);
|
||||
return (arg: NixValue) => forceFunction(functor(functorSet))(arg);
|
||||
@@ -100,10 +95,10 @@ export const forceStringNoCtx = (value: NixValue): string => {
|
||||
return forced;
|
||||
}
|
||||
if (isStringWithContext(forced)) {
|
||||
throw new TypeError(`the string '${forced.value}' is not allowed to refer to a store path`)
|
||||
throw new TypeError(`the string '${forced.value}' is not allowed to refer to a store path`);
|
||||
}
|
||||
throw new TypeError(`Expected string, got ${typeOf(forced)}`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Force a value and assert it's a boolean
|
||||
|
||||
@@ -95,7 +95,7 @@ const ATTR_POSITIONS = Symbol("attrPositions");
|
||||
export const mkAttrsWithPos = (
|
||||
attrs: NixAttrs,
|
||||
positions: Record<string, string>,
|
||||
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] }
|
||||
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] },
|
||||
): NixAttrs => {
|
||||
if (dyns) {
|
||||
const len = dyns.dynKeys.length;
|
||||
|
||||
17
nix-js/runtime-ts/src/types/global.d.ts
vendored
17
nix-js/runtime-ts/src/types/global.d.ts
vendored
@@ -44,10 +44,14 @@ declare global {
|
||||
function op_path_exists(path: string): boolean;
|
||||
function op_sha256_hex(data: string): string;
|
||||
function op_make_placeholder(output: string): string;
|
||||
function op_decode_span(span: string): { file: string | null; line: number | null; column: number | null };
|
||||
function op_decode_span(span: string): {
|
||||
file: string | null;
|
||||
line: number | null;
|
||||
column: number | null;
|
||||
};
|
||||
function op_make_store_path(ty: string, hash_hex: string, name: string): string;
|
||||
function op_make_text_store_path(hash_hex: string, name: string, references: string[]): string;
|
||||
function op_output_path_name(drv_name: string, output_name: string): string;
|
||||
function op_parse_hash(hash_str: string, algo: string | null): { hex: string; algo: string };
|
||||
function op_make_fixed_output_path(
|
||||
hash_algo: string,
|
||||
hash: string,
|
||||
@@ -62,9 +66,8 @@ declare global {
|
||||
): FetchUrlResult;
|
||||
function op_fetch_tarball(
|
||||
url: string,
|
||||
expected_hash: string | null,
|
||||
expected_nar_hash: string | null,
|
||||
name: string | null,
|
||||
sha256: string | null,
|
||||
): FetchTarballResult;
|
||||
function op_fetch_git(
|
||||
url: string,
|
||||
@@ -84,6 +87,12 @@ declare global {
|
||||
): string;
|
||||
function op_store_path(path: string): string;
|
||||
function op_to_file(name: string, contents: string, references: string[]): string;
|
||||
function op_write_derivation(drv_name: string, aterm: string, references: string[]): string;
|
||||
function op_read_derivation_outputs(drv_path: string): string[];
|
||||
function op_compute_fs_closure(drv_path: string): {
|
||||
input_drvs: [string, string[]][];
|
||||
input_srcs: string[];
|
||||
};
|
||||
function op_copy_path_to_store(path: string): string;
|
||||
function op_get_env(key: string): string;
|
||||
function op_walk_dir(path: string): [string, string][];
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::ir::{
|
||||
ToIr as _,
|
||||
};
|
||||
use crate::runtime::{Runtime, RuntimeContext};
|
||||
use crate::store::{Store, StoreBackend, StoreConfig};
|
||||
use crate::store::{DaemonStore, Store, StoreConfig};
|
||||
use crate::value::{Symbol, Value};
|
||||
|
||||
pub struct Context {
|
||||
@@ -41,7 +41,22 @@ impl Context {
|
||||
let ctx = Ctx::new()?;
|
||||
let runtime = Runtime::new()?;
|
||||
|
||||
Ok(Self { ctx, runtime })
|
||||
let mut context = Self { ctx, runtime };
|
||||
context.init_derivation()?;
|
||||
|
||||
Ok(context)
|
||||
}
|
||||
|
||||
fn init_derivation(&mut self) -> Result<()> {
|
||||
const DERIVATION_NIX: &str = include_str!("runtime/corepkgs/derivation.nix");
|
||||
let source = Source::new_virtual(
|
||||
"<nix/derivation-internal.nix>".into(),
|
||||
DERIVATION_NIX.to_string(),
|
||||
);
|
||||
let code = self.ctx.compile(source, None)?;
|
||||
self.runtime
|
||||
.eval(format!("Nix.builtins.derivation = {}", code), &mut self.ctx)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
eval!(eval, "Nix.force({})");
|
||||
@@ -93,7 +108,7 @@ pub(crate) struct Ctx {
|
||||
symbols: DefaultStringInterner,
|
||||
global: NonNull<HashMap<SymId, ExprId>>,
|
||||
sources: Vec<Source>,
|
||||
store: StoreBackend,
|
||||
store: DaemonStore,
|
||||
}
|
||||
|
||||
impl Ctx {
|
||||
@@ -182,7 +197,7 @@ impl Ctx {
|
||||
}
|
||||
|
||||
let config = StoreConfig::from_env();
|
||||
let store = StoreBackend::new(config)?;
|
||||
let store = DaemonStore::connect(&config.daemon_socket)?;
|
||||
|
||||
Ok(Self {
|
||||
symbols,
|
||||
@@ -296,7 +311,7 @@ impl CodegenContext for Ctx {
|
||||
self.sources.last().expect("current_source not set").clone()
|
||||
}
|
||||
fn get_store_dir(&self) -> &str {
|
||||
self.store.as_store().get_store_dir()
|
||||
self.store.get_store_dir()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -316,8 +331,8 @@ impl RuntimeContext for Ctx {
|
||||
fn get_source(&self, id: usize) -> Source {
|
||||
self.get_source(id)
|
||||
}
|
||||
fn get_store(&self) -> &dyn Store {
|
||||
self.store.as_store()
|
||||
fn get_store(&self) -> &DaemonStore {
|
||||
&self.store
|
||||
}
|
||||
}
|
||||
|
||||
@@ -518,7 +533,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
|
||||
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
|
||||
use crate::ir::TopLevel;
|
||||
let body = root.downgrade(&mut self)?;
|
||||
let thunks = self.pop_thunk_scope();
|
||||
let thunks = self.thunk_scopes.pop().expect("no thunk scope left???");
|
||||
let span = self.get_ir(body).span();
|
||||
let top_level = self.new_expr(TopLevel { body, thunks, span }.to_ir());
|
||||
self.ctx.irs.extend(self.irs);
|
||||
@@ -553,14 +568,16 @@ impl DowngradeContext for DowngradeCtx<'_> {
|
||||
f(guard.as_ctx())
|
||||
}
|
||||
|
||||
fn push_thunk_scope(&mut self) {
|
||||
fn with_thunk_scope<F, R>(&mut self, f: F) -> (R, Vec<(ExprId, ExprId)>)
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R,
|
||||
{
|
||||
self.thunk_scopes.push(Vec::new());
|
||||
}
|
||||
|
||||
fn pop_thunk_scope(&mut self) -> Vec<(ExprId, ExprId)> {
|
||||
self.thunk_scopes
|
||||
.pop()
|
||||
.expect("pop_thunk_scope without active scope")
|
||||
let ret = f(self);
|
||||
(
|
||||
ret,
|
||||
self.thunk_scopes.pop().expect("no thunk scope left???"),
|
||||
)
|
||||
}
|
||||
|
||||
fn register_thunk(&mut self, slot: ExprId, inner: ExprId) {
|
||||
|
||||
@@ -18,6 +18,7 @@ pub trait DowngradeContext {
|
||||
fn new_expr(&mut self, expr: Ir) -> ExprId;
|
||||
fn new_arg(&mut self, span: TextRange) -> ExprId;
|
||||
fn maybe_thunk(&mut self, id: ExprId) -> ExprId;
|
||||
fn register_thunk(&mut self, slot: ExprId, inner: ExprId);
|
||||
|
||||
fn new_sym(&mut self, sym: String) -> SymId;
|
||||
fn get_sym(&self, id: SymId) -> Symbol<'_>;
|
||||
@@ -37,10 +38,9 @@ pub trait DowngradeContext {
|
||||
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R;
|
||||
|
||||
fn push_thunk_scope(&mut self);
|
||||
fn pop_thunk_scope(&mut self) -> Vec<(ExprId, ExprId)>;
|
||||
fn register_thunk(&mut self, slot: ExprId, inner: ExprId);
|
||||
fn with_thunk_scope<F, R>(&mut self, f: F) -> (R, Vec<(ExprId, ExprId)>)
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R;
|
||||
}
|
||||
|
||||
pub trait Downgrade<Ctx: DowngradeContext> {
|
||||
@@ -419,48 +419,56 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
|
||||
let raw_param = self.param().unwrap();
|
||||
let arg = ctx.new_arg(raw_param.syntax().text_range());
|
||||
|
||||
ctx.push_thunk_scope();
|
||||
|
||||
let param;
|
||||
let body;
|
||||
|
||||
match raw_param {
|
||||
ast::Param::IdentParam(id) => {
|
||||
// Simple case: `x: body`
|
||||
let param_sym = ctx.new_sym(id.to_string());
|
||||
param = None;
|
||||
|
||||
// Downgrade body in Param scope
|
||||
body = ctx
|
||||
.with_param_scope(param_sym, arg, |ctx| self.body().unwrap().downgrade(ctx))?;
|
||||
}
|
||||
ast::Param::Pattern(pattern) => {
|
||||
let alias = pattern
|
||||
.pat_bind()
|
||||
.map(|alias| ctx.new_sym(alias.ident().unwrap().to_string()));
|
||||
|
||||
let ellipsis = pattern.ellipsis_token().is_some();
|
||||
let pat_entries = pattern.pat_entries();
|
||||
|
||||
let PatternBindings {
|
||||
body: inner_body,
|
||||
required,
|
||||
optional,
|
||||
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
|
||||
self.body().unwrap().downgrade(ctx)
|
||||
})?;
|
||||
|
||||
param = Some(Param {
|
||||
required,
|
||||
optional,
|
||||
ellipsis,
|
||||
});
|
||||
|
||||
body = inner_body;
|
||||
}
|
||||
struct Ret {
|
||||
param: Option<Param>,
|
||||
body: ExprId,
|
||||
}
|
||||
|
||||
let thunks = ctx.pop_thunk_scope();
|
||||
let (ret, thunks) = ctx.with_thunk_scope(|ctx| {
|
||||
let param;
|
||||
let body;
|
||||
|
||||
match raw_param {
|
||||
ast::Param::IdentParam(id) => {
|
||||
// Simple case: `x: body`
|
||||
let param_sym = ctx.new_sym(id.to_string());
|
||||
param = None;
|
||||
|
||||
// Downgrade body in Param scope
|
||||
body = ctx.with_param_scope(param_sym, arg, |ctx| {
|
||||
self.body().unwrap().downgrade(ctx)
|
||||
})?;
|
||||
}
|
||||
ast::Param::Pattern(pattern) => {
|
||||
let alias = pattern
|
||||
.pat_bind()
|
||||
.map(|alias| ctx.new_sym(alias.ident().unwrap().to_string()));
|
||||
|
||||
let ellipsis = pattern.ellipsis_token().is_some();
|
||||
let pat_entries = pattern.pat_entries();
|
||||
|
||||
let PatternBindings {
|
||||
body: inner_body,
|
||||
required,
|
||||
optional,
|
||||
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
|
||||
self.body().unwrap().downgrade(ctx)
|
||||
})?;
|
||||
|
||||
param = Some(Param {
|
||||
required,
|
||||
optional,
|
||||
ellipsis,
|
||||
});
|
||||
|
||||
body = inner_body;
|
||||
}
|
||||
}
|
||||
|
||||
Result::Ok(Ret { param, body })
|
||||
});
|
||||
let Ret { param, body } = ret?;
|
||||
|
||||
let span = self.syntax().text_range();
|
||||
Ok(ctx.new_expr(
|
||||
Func {
|
||||
|
||||
@@ -21,6 +21,8 @@ pub enum SourceType {
|
||||
Repl(Arc<PathBuf>),
|
||||
/// file
|
||||
File(Arc<PathBuf>),
|
||||
/// virtual (name, no path)
|
||||
Virtual(Arc<str>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -71,6 +73,13 @@ impl Source {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_virtual(name: Arc<str>, src: String) -> Self {
|
||||
Self {
|
||||
ty: SourceType::Virtual(name),
|
||||
src: src.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_dir(&self) -> &Path {
|
||||
use SourceType::*;
|
||||
match &self.ty {
|
||||
@@ -79,6 +88,7 @@ impl Source {
|
||||
.as_path()
|
||||
.parent()
|
||||
.expect("source file must have a parent dir"),
|
||||
Virtual(_) => Path::new("/"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +97,7 @@ impl Source {
|
||||
SourceType::Eval(_) => "«eval»".into(),
|
||||
SourceType::Repl(_) => "«repl»".into(),
|
||||
SourceType::File(path) => path.as_os_str().to_string_lossy().to_string(),
|
||||
SourceType::Virtual(name) => name.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
use deno_core::OpState;
|
||||
use deno_core::op2;
|
||||
use nix_compat::nixhash::HashAlgo;
|
||||
use nix_compat::nixhash::NixHash;
|
||||
use serde::Serialize;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
use crate::runtime::OpStateExt;
|
||||
use crate::runtime::RuntimeContext;
|
||||
use crate::store::Store as _;
|
||||
|
||||
mod archive;
|
||||
pub(crate) mod cache;
|
||||
@@ -117,7 +120,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
|
||||
|
||||
info!(bytes = data.len(), "Download complete");
|
||||
|
||||
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
|
||||
let hash = crate::nix_utils::sha256_hex(&String::from_utf8_lossy(&data));
|
||||
|
||||
if let Some(ref expected) = expected_hash {
|
||||
let normalized_expected = normalize_hash(expected);
|
||||
@@ -164,9 +167,8 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
|
||||
pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
state: &mut OpState,
|
||||
#[string] url: String,
|
||||
#[string] expected_hash: Option<String>,
|
||||
#[string] expected_nar_hash: Option<String>,
|
||||
#[string] name: Option<String>,
|
||||
#[string] sha256: Option<String>,
|
||||
) -> Result<FetchTarballResult, NixRuntimeError> {
|
||||
let _span = tracing::info_span!("op_fetch_tarball", url = %url).entered();
|
||||
info!("fetchTarball started");
|
||||
@@ -181,6 +183,16 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
"name": dir_name,
|
||||
});
|
||||
|
||||
let expected_sha256 = sha256
|
||||
.map(
|
||||
|ref sha256| match NixHash::from_str(sha256, Some(HashAlgo::Sha256)) {
|
||||
Ok(NixHash::Sha256(digest)) => Ok(digest),
|
||||
_ => Err(format!("fetchTarball: invalid sha256 '{sha256}'")),
|
||||
},
|
||||
)
|
||||
.transpose()?;
|
||||
let expected_hex = expected_sha256.map(hex::encode);
|
||||
|
||||
if let Some(cached_entry) = metadata_cache
|
||||
.lookup(&input)
|
||||
.map_err(|e| NixRuntimeError::from(e.to_string()))?
|
||||
@@ -196,9 +208,8 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if let Some(ref expected_nar) = expected_nar_hash {
|
||||
let normalized_expected = normalize_hash(expected_nar);
|
||||
if cached_nar_hash == normalized_expected {
|
||||
if let Some(ref hex) = expected_hex {
|
||||
if cached_nar_hash == hex {
|
||||
info!("Cache hit");
|
||||
return Ok(FetchTarballResult {
|
||||
store_path: cached_entry.store_path.clone(),
|
||||
@@ -224,16 +235,15 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
|
||||
info!(bytes = data.len(), "Download complete");
|
||||
|
||||
let tarball_hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
|
||||
let tarball_hash = crate::nix_utils::sha256_hex(&String::from_utf8_lossy(&data));
|
||||
|
||||
if let Some(ref expected) = expected_hash {
|
||||
let normalized_expected = normalize_hash(expected);
|
||||
if tarball_hash != normalized_expected {
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"Tarball hash mismatch for '{}': expected {}, got {}",
|
||||
url, normalized_expected, tarball_hash
|
||||
)));
|
||||
}
|
||||
if let Some(ref expected) = expected_hex
|
||||
&& tarball_hash != *expected
|
||||
{
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"Tarball hash mismatch for '{}': expected {}, got {}",
|
||||
url, expected, tarball_hash
|
||||
)));
|
||||
}
|
||||
|
||||
info!("Extracting tarball");
|
||||
@@ -245,21 +255,23 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
info!("Computing NAR hash");
|
||||
let nar_hash =
|
||||
nar::compute_nar_hash(&extracted_path).map_err(|e| NixRuntimeError::from(e.to_string()))?;
|
||||
let nar_hash_hex = hex::encode(nar_hash);
|
||||
|
||||
debug!(
|
||||
tarball_hash = %tarball_hash,
|
||||
nar_hash = %nar_hash,
|
||||
nar_hash = %nar_hash_hex,
|
||||
"Hash computation complete"
|
||||
);
|
||||
|
||||
if let Some(ref expected) = expected_nar_hash {
|
||||
let normalized_expected = normalize_hash(expected);
|
||||
if nar_hash != normalized_expected {
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"NAR hash mismatch for '{}': expected {}, got {}",
|
||||
url, normalized_expected, nar_hash
|
||||
)));
|
||||
}
|
||||
if let Some(ref expected) = expected_sha256
|
||||
&& nar_hash != *expected
|
||||
{
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"NAR hash mismatch for '{}': expected {}, got {}",
|
||||
url,
|
||||
expected_hex.expect("must be Some"),
|
||||
nar_hash_hex
|
||||
)));
|
||||
}
|
||||
|
||||
info!("Adding to store");
|
||||
@@ -277,7 +289,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
"url": url,
|
||||
});
|
||||
|
||||
let immutable = expected_nar_hash.is_some();
|
||||
let immutable = expected_sha256.is_some();
|
||||
metadata_cache
|
||||
.add(&input, &info, &store_path, immutable)
|
||||
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
|
||||
@@ -285,7 +297,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||
Ok(FetchTarballResult {
|
||||
store_path,
|
||||
hash: tarball_hash,
|
||||
nar_hash,
|
||||
nar_hash: nar_hash_hex,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::archive::ArchiveError;
|
||||
|
||||
@@ -65,7 +65,7 @@ impl FetcherCache {
|
||||
}
|
||||
|
||||
fn hash_key(url: &str) -> String {
|
||||
crate::nix_hash::sha256_hex(url)
|
||||
crate::nix_utils::sha256_hex(url)
|
||||
}
|
||||
|
||||
pub fn get_git_bare(&self, url: &str) -> PathBuf {
|
||||
@@ -87,21 +87,3 @@ impl FetcherCache {
|
||||
Ok((extracted_path, temp_dir))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> {
|
||||
fs::create_dir_all(dst)?;
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let dest_path = dst.join(entry.file_name());
|
||||
|
||||
if path.is_dir() {
|
||||
copy_dir_recursive(&path, &dest_path)?;
|
||||
} else {
|
||||
fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -31,8 +31,10 @@ pub fn fetch_git(
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let checkout_dir = checkout_rev_to_temp(&bare_repo, &target_rev, submodules, temp_dir.path())?;
|
||||
|
||||
let nar_hash = crate::nar::compute_nar_hash(&checkout_dir)
|
||||
.map_err(|e| GitError::NarHashError(e.to_string()))?;
|
||||
let nar_hash = hex::encode(
|
||||
crate::nar::compute_nar_hash(&checkout_dir)
|
||||
.map_err(|e| GitError::NarHashError(e.to_string()))?,
|
||||
);
|
||||
|
||||
let store_path = store
|
||||
.add_to_store_from_path(name, &checkout_dir, vec![])
|
||||
|
||||
@@ -124,7 +124,7 @@ fn checkout_rev(
|
||||
name: &str,
|
||||
cache: &FetcherCache,
|
||||
) -> Result<PathBuf, HgError> {
|
||||
let hash = crate::nix_hash::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
|
||||
let hash = crate::nix_utils::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
|
||||
let checkout_dir = cache.make_store_path(&hash, name);
|
||||
|
||||
if checkout_dir.exists() {
|
||||
|
||||
@@ -10,7 +10,7 @@ mod downgrade;
|
||||
mod fetcher;
|
||||
mod ir;
|
||||
mod nar;
|
||||
mod nix_hash;
|
||||
mod nix_utils;
|
||||
mod runtime;
|
||||
mod store;
|
||||
|
||||
|
||||
@@ -5,14 +5,14 @@ use std::path::Path;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
pub fn compute_nar_hash(path: &Path) -> Result<String> {
|
||||
pub fn compute_nar_hash(path: &Path) -> Result<[u8; 32]> {
|
||||
let mut hasher = Sha256::new();
|
||||
std::io::copy(
|
||||
&mut Encoder::new(path).map_err(|err| Error::internal(err.to_string()))?,
|
||||
&mut hasher,
|
||||
)
|
||||
.map_err(|err| Error::internal(err.to_string()))?;
|
||||
Ok(hex::encode(hasher.finalize()))
|
||||
Ok(hasher.finalize().into())
|
||||
}
|
||||
|
||||
pub fn pack_nar(path: &Path) -> Result<Vec<u8>> {
|
||||
@@ -37,7 +37,7 @@ mod tests {
|
||||
let file_path = temp.path().join("test.txt");
|
||||
fs::write(&file_path, "hello").unwrap();
|
||||
|
||||
let hash = compute_nar_hash(&file_path).unwrap();
|
||||
let hash = hex::encode(compute_nar_hash(&file_path).unwrap());
|
||||
assert_eq!(
|
||||
hash,
|
||||
"0a430879c266f8b57f4092a0f935cf3facd48bbccde5760d4748ca405171e969"
|
||||
@@ -52,7 +52,7 @@ mod tests {
|
||||
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
|
||||
fs::write(temp.path().join("b.txt"), "bbb").unwrap();
|
||||
|
||||
let hash = compute_nar_hash(temp.path()).unwrap();
|
||||
let hash = hex::encode(compute_nar_hash(temp.path()).unwrap());
|
||||
assert_eq!(
|
||||
hash,
|
||||
"0036c14209749bc9b9631e2077b108b701c322ab53853cd26f2746268a86fc0f"
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
const NIX_BASE32_CHARS: &[u8; 32] = b"0123456789abcdfghijklmnpqrsvwxyz";
|
||||
|
||||
pub fn sha256_hex(data: &str) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(data.as_bytes());
|
||||
hex::encode(hasher.finalize())
|
||||
}
|
||||
|
||||
pub fn compress_hash(hash: &[u8; 32], new_size: usize) -> Vec<u8> {
|
||||
let mut result = vec![0u8; new_size];
|
||||
for i in 0..32 {
|
||||
result[i % new_size] ^= hash[i];
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn nix_base32_encode(bytes: &[u8]) -> String {
|
||||
let len = (bytes.len() * 8 - 1) / 5 + 1;
|
||||
let mut result = String::with_capacity(len);
|
||||
|
||||
for n in (0..len).rev() {
|
||||
let b = n * 5;
|
||||
let i = b / 8;
|
||||
let j = b % 8;
|
||||
|
||||
let c = if i >= bytes.len() {
|
||||
0
|
||||
} else {
|
||||
let mut c = (bytes[i] as u16) >> j;
|
||||
if j > 3 && i + 1 < bytes.len() {
|
||||
c |= (bytes[i + 1] as u16) << (8 - j);
|
||||
}
|
||||
c
|
||||
};
|
||||
|
||||
result.push(NIX_BASE32_CHARS[(c & 0x1f) as usize] as char);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn nix_base32_decode(input: &str) -> Option<Vec<u8>> {
|
||||
let len = input.len() * 5 / 8;
|
||||
let mut bytes = vec![0u8; len];
|
||||
|
||||
for (n, ch) in input.chars().rev().enumerate() {
|
||||
let digit = NIX_BASE32_CHARS.iter().position(|&c| c == ch as u8)? as u16;
|
||||
let b = n * 5;
|
||||
let i = b / 8;
|
||||
let j = b % 8;
|
||||
if i < len {
|
||||
bytes[i] |= (digit << j) as u8;
|
||||
}
|
||||
if j > 3 && i + 1 < len {
|
||||
bytes[i + 1] |= (digit >> (8 - j)) as u8;
|
||||
}
|
||||
}
|
||||
|
||||
Some(bytes)
|
||||
}
|
||||
|
||||
pub fn decode_hash_to_hex(hash_str: &str) -> Option<String> {
|
||||
if let Some(rest) = hash_str.strip_prefix("sha256:") {
|
||||
return decode_hash_to_hex(rest);
|
||||
}
|
||||
if let Some(base64_str) = hash_str.strip_prefix("sha256-") {
|
||||
use base64::{Engine, engine::general_purpose::STANDARD};
|
||||
let bytes = STANDARD.decode(base64_str).ok()?;
|
||||
return Some(hex::encode(bytes));
|
||||
}
|
||||
if hash_str.len() == 64 && hash_str.chars().all(|c| c.is_ascii_hexdigit()) {
|
||||
return Some(hash_str.to_string());
|
||||
}
|
||||
if hash_str.len() == 52 {
|
||||
let bytes = nix_base32_decode(hash_str)?;
|
||||
return Some(hex::encode(bytes));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String {
|
||||
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name);
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(s.as_bytes());
|
||||
let hash: [u8; 32] = hasher.finalize().into();
|
||||
|
||||
let compressed = compress_hash(&hash, 20);
|
||||
let encoded = nix_base32_encode(&compressed);
|
||||
|
||||
format!("{}/{}-{}", store_dir, encoded, name)
|
||||
}
|
||||
|
||||
pub fn make_text_store_path(
|
||||
store_dir: &str,
|
||||
hash_hex: &str,
|
||||
name: &str,
|
||||
references: &[String],
|
||||
) -> String {
|
||||
let mut ty = String::from("text");
|
||||
for reference in references {
|
||||
ty.push(':');
|
||||
ty.push_str(reference);
|
||||
}
|
||||
make_store_path(store_dir, &ty, hash_hex, name)
|
||||
}
|
||||
|
||||
pub fn output_path_name(drv_name: &str, output_name: &str) -> String {
|
||||
if output_name == "out" {
|
||||
drv_name.to_string()
|
||||
} else {
|
||||
format!("{}-{}", drv_name, output_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_nix_base32_encode() {
|
||||
let bytes = [0xFF, 0xFF, 0xFF, 0xFF, 0xFF];
|
||||
let encoded = nix_base32_encode(&bytes);
|
||||
assert_eq!(encoded.len(), 8);
|
||||
|
||||
let bytes_zero = [0u8; 20];
|
||||
let encoded_zero = nix_base32_encode(&bytes_zero);
|
||||
assert_eq!(encoded_zero.len(), 32);
|
||||
assert!(encoded_zero.chars().all(|c| c == '0'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compress_hash() {
|
||||
let hash = [0u8; 32];
|
||||
let compressed = compress_hash(&hash, 20);
|
||||
assert_eq!(compressed.len(), 20);
|
||||
assert!(compressed.iter().all(|&b| b == 0));
|
||||
|
||||
let hash_ones = [0xFF; 32];
|
||||
let compressed_ones = compress_hash(&hash_ones, 20);
|
||||
assert_eq!(compressed_ones.len(), 20);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sha256_hex() {
|
||||
let data = "hello world";
|
||||
let hash = sha256_hex(data);
|
||||
assert_eq!(hash.len(), 64);
|
||||
assert_eq!(
|
||||
hash,
|
||||
"b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_path_name() {
|
||||
assert_eq!(output_path_name("hello", "out"), "hello");
|
||||
assert_eq!(output_path_name("hello", "dev"), "hello-dev");
|
||||
assert_eq!(output_path_name("hello", "doc"), "hello-doc");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_make_store_path() {
|
||||
let path = make_store_path("/nix/store", "output:out", "abc123", "hello");
|
||||
assert!(path.starts_with("/nix/store/"));
|
||||
assert!(path.ends_with("-hello"));
|
||||
|
||||
let hash_parts: Vec<&str> = path.split('/').collect();
|
||||
assert_eq!(hash_parts.len(), 4);
|
||||
let name_part = hash_parts[3];
|
||||
assert!(name_part.contains('-'));
|
||||
}
|
||||
}
|
||||
29
nix-js/src/nix_utils.rs
Normal file
29
nix-js/src/nix_utils.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use nix_compat::store_path::compress_hash;
|
||||
use sha2::{Digest as _, Sha256};
|
||||
|
||||
pub fn sha256_hex(data: &str) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(data.as_bytes());
|
||||
hex::encode(hasher.finalize())
|
||||
}
|
||||
|
||||
pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String {
|
||||
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name);
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(s.as_bytes());
|
||||
let hash: [u8; 32] = hasher.finalize().into();
|
||||
|
||||
let compressed = compress_hash::<20>(&hash);
|
||||
let encoded = nix_compat::nixbase32::encode(&compressed);
|
||||
|
||||
format!("{}/{}-{}", store_dir, encoded, name)
|
||||
}
|
||||
|
||||
pub fn output_path_name(drv_name: &str, output_name: &str) -> String {
|
||||
if output_name == "out" {
|
||||
drv_name.to_string()
|
||||
} else {
|
||||
format!("{}-{}", drv_name, output_name)
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ use std::path::Path;
|
||||
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
|
||||
|
||||
use crate::error::{Error, Result, Source};
|
||||
use crate::store::Store;
|
||||
use crate::store::DaemonStore;
|
||||
use crate::value::{AttrSet, List, Symbol, Value};
|
||||
|
||||
mod ops;
|
||||
@@ -21,7 +21,7 @@ pub(crate) trait RuntimeContext: 'static {
|
||||
fn compile(&mut self, source: Source) -> Result<String>;
|
||||
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String>;
|
||||
fn get_source(&self, id: usize) -> Source;
|
||||
fn get_store(&self) -> &dyn Store;
|
||||
fn get_store(&self) -> &DaemonStore;
|
||||
}
|
||||
|
||||
pub(crate) trait OpStateExt<Ctx: RuntimeContext> {
|
||||
@@ -55,12 +55,15 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
|
||||
op_make_placeholder(),
|
||||
op_decode_span::<Ctx>(),
|
||||
op_make_store_path::<Ctx>(),
|
||||
op_make_text_store_path::<Ctx>(),
|
||||
op_output_path_name(),
|
||||
op_parse_hash(),
|
||||
op_make_fixed_output_path::<Ctx>(),
|
||||
op_add_path::<Ctx>(),
|
||||
op_store_path::<Ctx>(),
|
||||
op_to_file::<Ctx>(),
|
||||
op_write_derivation::<Ctx>(),
|
||||
op_read_derivation_outputs(),
|
||||
op_compute_fs_closure(),
|
||||
op_copy_path_to_store::<Ctx>(),
|
||||
op_get_env(),
|
||||
op_walk_dir(),
|
||||
|
||||
31
nix-js/src/runtime/corepkgs/derivation.nix
Normal file
31
nix-js/src/runtime/corepkgs/derivation.nix
Normal file
@@ -0,0 +1,31 @@
|
||||
drvAttrs@{
|
||||
outputs ? [ "out" ],
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
strict = derivationStrict drvAttrs;
|
||||
|
||||
commonAttrs =
|
||||
drvAttrs
|
||||
// (builtins.listToAttrs outputsList)
|
||||
// {
|
||||
all = map (x: x.value) outputsList;
|
||||
inherit drvAttrs;
|
||||
};
|
||||
|
||||
outputToAttrListElement = outputName: {
|
||||
name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName strict;
|
||||
drvPath = strict.drvPath;
|
||||
type = "derivation";
|
||||
inherit outputName;
|
||||
};
|
||||
};
|
||||
|
||||
outputsList = map outputToAttrListElement outputs;
|
||||
|
||||
in
|
||||
(builtins.head outputsList).value
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use hashbrown::hash_map::{Entry, HashMap};
|
||||
|
||||
@@ -7,9 +7,9 @@ use deno_core::OpState;
|
||||
use regex::Regex;
|
||||
use rust_embed::Embed;
|
||||
|
||||
use crate::error::Source;
|
||||
|
||||
use super::{NixRuntimeError, OpStateExt, RuntimeContext};
|
||||
use crate::error::Source;
|
||||
use crate::store::Store as _;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct RegexCache {
|
||||
@@ -52,12 +52,12 @@ pub(super) fn op_import<Ctx: RuntimeContext>(
|
||||
let corepkg_name = &path[5..path.len() - 1];
|
||||
if let Some(file) = CorePkgs::get(corepkg_name) {
|
||||
tracing::info!("Importing corepkg: {}", corepkg_name);
|
||||
let source = Source {
|
||||
ty: crate::error::SourceType::Eval(Arc::new(ctx.get_current_dir().to_path_buf())),
|
||||
src: str::from_utf8(&file.data)
|
||||
let source = Source::new_virtual(
|
||||
path.into(),
|
||||
str::from_utf8(&file.data)
|
||||
.expect("corrupted corepkgs file")
|
||||
.into(),
|
||||
};
|
||||
);
|
||||
ctx.add_source(source.clone());
|
||||
return Ok(ctx.compile(source).map_err(|err| err.to_string())?);
|
||||
} else {
|
||||
@@ -246,7 +246,7 @@ pub(super) fn op_resolve_path(
|
||||
#[deno_core::op2]
|
||||
#[string]
|
||||
pub(super) fn op_sha256_hex(#[string] data: String) -> String {
|
||||
crate::nix_hash::sha256_hex(&data)
|
||||
crate::nix_utils::sha256_hex(&data)
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
@@ -257,7 +257,7 @@ pub(super) fn op_make_placeholder(#[string] output: String) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(input.as_bytes());
|
||||
let hash: [u8; 32] = hasher.finalize().into();
|
||||
let encoded = crate::nix_hash::nix_base32_encode(&hash);
|
||||
let encoded = nix_compat::nixbase32::encode(&hash);
|
||||
format!("/{}", encoded)
|
||||
}
|
||||
|
||||
@@ -322,21 +322,7 @@ pub(super) fn op_make_store_path<Ctx: RuntimeContext>(
|
||||
let ctx: &Ctx = state.get_ctx();
|
||||
let store = ctx.get_store();
|
||||
let store_dir = store.get_store_dir();
|
||||
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
#[string]
|
||||
pub(super) fn op_make_text_store_path<Ctx: RuntimeContext>(
|
||||
state: &mut OpState,
|
||||
#[string] hash_hex: String,
|
||||
#[string] name: String,
|
||||
#[serde] references: Vec<String>,
|
||||
) -> String {
|
||||
let ctx: &Ctx = state.get_ctx();
|
||||
let store = ctx.get_store();
|
||||
let store_dir = store.get_store_dir();
|
||||
crate::nix_hash::make_text_store_path(store_dir, &hash_hex, &name, &references)
|
||||
crate::nix_utils::make_store_path(store_dir, &ty, &hash_hex, &name)
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
@@ -345,7 +331,40 @@ pub(super) fn op_output_path_name(
|
||||
#[string] drv_name: String,
|
||||
#[string] output_name: String,
|
||||
) -> String {
|
||||
crate::nix_hash::output_path_name(&drv_name, &output_name)
|
||||
crate::nix_utils::output_path_name(&drv_name, &output_name)
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub(super) struct ParsedHash {
|
||||
hex: String,
|
||||
algo: String,
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
#[serde]
|
||||
pub(super) fn op_parse_hash(
|
||||
#[string] hash_str: String,
|
||||
#[string] algo: Option<String>,
|
||||
) -> std::result::Result<ParsedHash, NixRuntimeError> {
|
||||
use nix_compat::nixhash::{HashAlgo, NixHash};
|
||||
|
||||
let hash_algo = algo
|
||||
.as_deref()
|
||||
.and_then(|algo| HashAlgo::from_str(algo).ok());
|
||||
|
||||
let hash = NixHash::from_str(&hash_str, hash_algo).map_err(|e| {
|
||||
NixRuntimeError::from(format!(
|
||||
"invalid hash '{}'{}: {}",
|
||||
hash_str,
|
||||
algo.map_or("".to_string(), |algo| format!(" for algorithm '{algo}'")),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(ParsedHash {
|
||||
hex: hex::encode(hash.digest_as_bytes()),
|
||||
algo: hash.algo().to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
@@ -364,7 +383,7 @@ pub(super) fn op_make_fixed_output_path<Ctx: RuntimeContext>(
|
||||
let store_dir = store.get_store_dir();
|
||||
|
||||
if hash_algo == "sha256" && hash_mode == "recursive" {
|
||||
crate::nix_hash::make_store_path(store_dir, "source", &hash, &name)
|
||||
crate::nix_utils::make_store_path(store_dir, "source", &hash, &name)
|
||||
} else {
|
||||
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
|
||||
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
|
||||
@@ -372,7 +391,7 @@ pub(super) fn op_make_fixed_output_path<Ctx: RuntimeContext>(
|
||||
hasher.update(inner_input.as_bytes());
|
||||
let inner_hash = hex::encode(hasher.finalize());
|
||||
|
||||
crate::nix_hash::make_store_path(store_dir, "output:out", &inner_hash, &name)
|
||||
crate::nix_utils::make_store_path(store_dir, "output:out", &inner_hash, &name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -385,6 +404,7 @@ pub(super) fn op_add_path<Ctx: RuntimeContext>(
|
||||
recursive: bool,
|
||||
#[string] sha256: Option<String>,
|
||||
) -> std::result::Result<String, NixRuntimeError> {
|
||||
use nix_compat::nixhash::{HashAlgo, NixHash};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
@@ -420,18 +440,18 @@ pub(super) fn op_add_path<Ctx: RuntimeContext>(
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(&contents);
|
||||
hex::encode(hasher.finalize())
|
||||
hasher.finalize().into()
|
||||
};
|
||||
|
||||
if let Some(expected_hash) = sha256 {
|
||||
let expected_hex =
|
||||
crate::nix_hash::decode_hash_to_hex(&expected_hash).ok_or_else(|| {
|
||||
NixRuntimeError::from(format!("invalid hash format: {}", expected_hash))
|
||||
})?;
|
||||
if computed_hash != expected_hex {
|
||||
if let Some(ref expected_hash) = sha256 {
|
||||
let expected_hex = NixHash::from_str(expected_hash, Some(HashAlgo::Sha256))
|
||||
.map_err(|err| err.to_string())?;
|
||||
if computed_hash != expected_hex.digest_as_bytes() {
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"hash mismatch for path '{}': expected {}, got {}",
|
||||
path, expected_hex, computed_hash
|
||||
path,
|
||||
hex::encode(expected_hex.digest_as_bytes()),
|
||||
hex::encode(computed_hash)
|
||||
)));
|
||||
}
|
||||
}
|
||||
@@ -492,6 +512,269 @@ pub(super) fn op_to_file<Ctx: RuntimeContext>(
|
||||
Ok(store_path)
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
#[string]
|
||||
pub(super) fn op_write_derivation<Ctx: RuntimeContext>(
|
||||
state: &mut OpState,
|
||||
#[string] drv_name: String,
|
||||
#[string] aterm: String,
|
||||
#[serde] references: Vec<String>,
|
||||
) -> std::result::Result<String, NixRuntimeError> {
|
||||
tracing::debug!(
|
||||
"op_write_derivation: name={}.drv, references={:?}",
|
||||
drv_name,
|
||||
references
|
||||
);
|
||||
let ctx: &Ctx = state.get_ctx();
|
||||
let store = ctx.get_store();
|
||||
let store_path = store
|
||||
.add_text_to_store(&format!("{}.drv", drv_name), &aterm, references)
|
||||
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
|
||||
|
||||
Ok(store_path)
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
#[serde]
|
||||
pub(super) fn op_read_derivation_outputs(
|
||||
#[string] drv_path: String,
|
||||
) -> std::result::Result<Vec<String>, NixRuntimeError> {
|
||||
let content = std::fs::read_to_string(&drv_path).map_err(|e| {
|
||||
NixRuntimeError::from(format!("failed to read derivation {}: {}", drv_path, e))
|
||||
})?;
|
||||
|
||||
let outputs = parse_derivation_outputs(&content)
|
||||
.ok_or_else(|| NixRuntimeError::from(format!("failed to parse derivation {}", drv_path)))?;
|
||||
|
||||
Ok(outputs)
|
||||
}
|
||||
|
||||
fn parse_derivation_outputs(aterm: &str) -> Option<Vec<String>> {
|
||||
let aterm = aterm.strip_prefix("Derive([")?;
|
||||
let outputs_end = aterm.find("],[")?;
|
||||
let outputs_section = &aterm[..outputs_end];
|
||||
|
||||
let mut outputs = Vec::new();
|
||||
let mut pos = 0;
|
||||
let bytes = outputs_section.as_bytes();
|
||||
|
||||
while pos < bytes.len() {
|
||||
while pos < bytes.len() && bytes[pos] != b'(' {
|
||||
pos += 1;
|
||||
}
|
||||
if pos >= bytes.len() {
|
||||
break;
|
||||
}
|
||||
pos += 1;
|
||||
|
||||
if pos >= bytes.len() || bytes[pos] != b'"' {
|
||||
break;
|
||||
}
|
||||
pos += 1;
|
||||
|
||||
let name_start = pos;
|
||||
while pos < bytes.len() && bytes[pos] != b'"' {
|
||||
pos += 1;
|
||||
}
|
||||
let name = std::str::from_utf8(&bytes[name_start..pos]).ok()?;
|
||||
outputs.push(name.to_string());
|
||||
|
||||
while pos < bytes.len() && bytes[pos] != b')' {
|
||||
pos += 1;
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
Some(outputs)
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub(super) struct DerivationInputs {
|
||||
input_drvs: Vec<(String, Vec<String>)>,
|
||||
input_srcs: Vec<String>,
|
||||
}
|
||||
|
||||
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
|
||||
let aterm = aterm.strip_prefix("Derive([")?;
|
||||
|
||||
let mut bracket_count = 1;
|
||||
let mut pos = 0;
|
||||
let bytes = aterm.as_bytes();
|
||||
while pos < bytes.len() && bracket_count > 0 {
|
||||
match bytes[pos] {
|
||||
b'[' => bracket_count += 1,
|
||||
b']' => bracket_count -= 1,
|
||||
_ => {}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
if bracket_count != 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let rest = &aterm[pos..];
|
||||
let rest = rest.strip_prefix(",[")?;
|
||||
|
||||
let mut input_drvs = Vec::new();
|
||||
let mut bracket_count = 1;
|
||||
let mut start = 0;
|
||||
pos = 0;
|
||||
let bytes = rest.as_bytes();
|
||||
|
||||
while pos < bytes.len() && bracket_count > 0 {
|
||||
match bytes[pos] {
|
||||
b'[' => bracket_count += 1,
|
||||
b']' => bracket_count -= 1,
|
||||
b'(' if bracket_count == 1 => {
|
||||
start = pos;
|
||||
}
|
||||
b')' if bracket_count == 1 => {
|
||||
let entry = &rest[start + 1..pos];
|
||||
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
|
||||
input_drvs.push((drv_path, outputs));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
let rest = &rest[pos..];
|
||||
let rest = rest.strip_prefix(",[")?;
|
||||
|
||||
let mut input_srcs = Vec::new();
|
||||
bracket_count = 1;
|
||||
pos = 0;
|
||||
let bytes = rest.as_bytes();
|
||||
|
||||
while pos < bytes.len() && bracket_count > 0 {
|
||||
match bytes[pos] {
|
||||
b'[' => bracket_count += 1,
|
||||
b']' => bracket_count -= 1,
|
||||
b'"' if bracket_count == 1 => {
|
||||
pos += 1;
|
||||
let src_start = pos;
|
||||
while pos < bytes.len() && bytes[pos] != b'"' {
|
||||
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
}
|
||||
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
|
||||
input_srcs.push(src.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
Some(DerivationInputs {
|
||||
input_drvs,
|
||||
input_srcs,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
|
||||
let entry = entry.strip_prefix('"')?;
|
||||
let quote_end = entry.find('"')?;
|
||||
let drv_path = entry[..quote_end].to_string();
|
||||
|
||||
let rest = &entry[quote_end + 1..];
|
||||
let rest = rest.strip_prefix(",[")?;
|
||||
let rest = rest.strip_suffix(']')?;
|
||||
|
||||
let mut outputs = Vec::new();
|
||||
for part in rest.split(',') {
|
||||
let part = part.trim();
|
||||
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
|
||||
outputs.push(name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Some((drv_path, outputs))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub(super) struct FsClosureResult {
|
||||
input_drvs: Vec<(String, Vec<String>)>,
|
||||
input_srcs: Vec<String>,
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
#[serde]
|
||||
pub(super) fn op_compute_fs_closure(
|
||||
#[string] drv_path: String,
|
||||
) -> std::result::Result<FsClosureResult, NixRuntimeError> {
|
||||
use std::collections::{BTreeMap, BTreeSet, VecDeque};
|
||||
|
||||
let mut all_input_srcs: BTreeSet<String> = BTreeSet::new();
|
||||
let mut all_input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
|
||||
|
||||
let mut queue: VecDeque<String> = VecDeque::new();
|
||||
let mut visited: BTreeSet<String> = BTreeSet::new();
|
||||
|
||||
queue.push_back(drv_path);
|
||||
|
||||
while let Some(current_path) = queue.pop_front() {
|
||||
if visited.contains(¤t_path) {
|
||||
continue;
|
||||
}
|
||||
visited.insert(current_path.clone());
|
||||
|
||||
all_input_srcs.insert(current_path.clone());
|
||||
|
||||
if !current_path.ends_with(".drv") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let content = match std::fs::read_to_string(¤t_path) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"failed to read derivation {}: {}",
|
||||
current_path, e
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
let inputs = parse_derivation_inputs(&content).ok_or_else(|| {
|
||||
NixRuntimeError::from(format!("failed to parse derivation {}", current_path))
|
||||
})?;
|
||||
|
||||
for src in inputs.input_srcs {
|
||||
all_input_srcs.insert(src.clone());
|
||||
if !visited.contains(&src) {
|
||||
queue.push_back(src);
|
||||
}
|
||||
}
|
||||
|
||||
for (dep_drv, outputs) in inputs.input_drvs {
|
||||
all_input_srcs.insert(dep_drv.clone());
|
||||
|
||||
let entry = all_input_drvs.entry(dep_drv.clone()).or_default();
|
||||
for output in outputs {
|
||||
entry.insert(output);
|
||||
}
|
||||
|
||||
if !visited.contains(&dep_drv) {
|
||||
queue.push_back(dep_drv);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let input_drvs: Vec<(String, Vec<String>)> = all_input_drvs
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, v.into_iter().collect()))
|
||||
.collect();
|
||||
let input_srcs: Vec<String> = all_input_srcs.into_iter().collect();
|
||||
|
||||
Ok(FsClosureResult {
|
||||
input_drvs,
|
||||
input_srcs,
|
||||
})
|
||||
}
|
||||
|
||||
#[deno_core::op2]
|
||||
#[string]
|
||||
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
|
||||
@@ -601,6 +884,7 @@ pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
|
||||
#[string] sha256: Option<String>,
|
||||
#[serde] include_paths: Vec<String>,
|
||||
) -> std::result::Result<String, NixRuntimeError> {
|
||||
use nix_compat::nixhash::{HashAlgo, NixHash};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs;
|
||||
|
||||
@@ -670,18 +954,18 @@ pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
|
||||
.map_err(|e| NixRuntimeError::from(format!("failed to read file: {}", e)))?;
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(&contents);
|
||||
hex::encode(hasher.finalize())
|
||||
hasher.finalize().into()
|
||||
};
|
||||
|
||||
if let Some(expected_hash) = sha256 {
|
||||
let expected_hex =
|
||||
crate::nix_hash::decode_hash_to_hex(&expected_hash).ok_or_else(|| {
|
||||
NixRuntimeError::from(format!("invalid hash format: {}", expected_hash))
|
||||
})?;
|
||||
if computed_hash != expected_hex {
|
||||
if let Some(ref expected_hash) = sha256 {
|
||||
let expected_hex = NixHash::from_str(expected_hash, Some(HashAlgo::Sha256))
|
||||
.map_err(|err| err.to_string())?;
|
||||
if computed_hash != expected_hex.digest_as_bytes() {
|
||||
return Err(NixRuntimeError::from(format!(
|
||||
"hash mismatch for path '{}': expected {}, got {}",
|
||||
src_path, expected_hex, computed_hash
|
||||
src_path,
|
||||
hex::encode(expected_hex.digest_as_bytes()),
|
||||
hex::encode(computed_hash)
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::error::Result;
|
||||
|
||||
mod config;
|
||||
mod daemon;
|
||||
mod error;
|
||||
mod validation;
|
||||
|
||||
pub use config::{StoreConfig, StoreMode};
|
||||
pub use config::StoreConfig;
|
||||
pub use daemon::DaemonStore;
|
||||
pub use validation::validate_store_path;
|
||||
|
||||
use crate::error::Result;
|
||||
|
||||
pub trait Store: Send + Sync {
|
||||
fn get_store_dir(&self) -> &str;
|
||||
|
||||
@@ -46,66 +48,3 @@ pub trait Store: Send + Sync {
|
||||
name: &str,
|
||||
) -> Result<String>;
|
||||
}
|
||||
|
||||
pub enum StoreBackend {
|
||||
Simulated(SimulatedStore),
|
||||
#[cfg(feature = "daemon")]
|
||||
Daemon(Box<DaemonStore>),
|
||||
}
|
||||
|
||||
impl StoreBackend {
|
||||
pub fn new(config: StoreConfig) -> Result<Self> {
|
||||
match config.mode {
|
||||
#[cfg(feature = "daemon")]
|
||||
StoreMode::Daemon => {
|
||||
let daemon = Box::new(DaemonStore::connect(&config.daemon_socket)?);
|
||||
Ok(StoreBackend::Daemon(daemon))
|
||||
}
|
||||
#[cfg(not(feature = "daemon"))]
|
||||
StoreMode::Daemon => {
|
||||
tracing::warn!(
|
||||
"Daemon mode not available (nix-js not compiled with 'daemon' feature), falling back to simulated store"
|
||||
);
|
||||
let simulated = SimulatedStore::new()?;
|
||||
Ok(StoreBackend::Simulated(simulated))
|
||||
}
|
||||
StoreMode::Simulated => {
|
||||
let simulated = SimulatedStore::new()?;
|
||||
Ok(StoreBackend::Simulated(simulated))
|
||||
}
|
||||
#[cfg(feature = "daemon")]
|
||||
StoreMode::Auto => match DaemonStore::connect(&config.daemon_socket) {
|
||||
Ok(daemon) => {
|
||||
tracing::debug!("Using nix-daemon at {}", config.daemon_socket.display());
|
||||
Ok(StoreBackend::Daemon(Box::new(daemon)))
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("Daemon unavailable ({}), using simulated store", e);
|
||||
let simulated = SimulatedStore::new()?;
|
||||
Ok(StoreBackend::Simulated(simulated))
|
||||
}
|
||||
},
|
||||
#[cfg(not(feature = "daemon"))]
|
||||
StoreMode::Auto => {
|
||||
let simulated = SimulatedStore::new()?;
|
||||
Ok(StoreBackend::Simulated(simulated))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_store(&self) -> &dyn Store {
|
||||
match self {
|
||||
StoreBackend::Simulated(s) => s,
|
||||
#[cfg(feature = "daemon")]
|
||||
StoreBackend::Daemon(d) => d.as_ref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod simulated;
|
||||
pub use simulated::SimulatedStore;
|
||||
|
||||
#[cfg(feature = "daemon")]
|
||||
mod daemon;
|
||||
#[cfg(feature = "daemon")]
|
||||
pub use daemon::DaemonStore;
|
||||
|
||||
@@ -1,42 +1,17 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum StoreMode {
|
||||
Daemon,
|
||||
Simulated,
|
||||
Auto,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StoreConfig {
|
||||
pub mode: StoreMode,
|
||||
pub daemon_socket: PathBuf,
|
||||
}
|
||||
|
||||
impl StoreConfig {
|
||||
pub fn from_env() -> Self {
|
||||
let mode = match std::env::var("NIX_JS_STORE_MODE")
|
||||
.as_deref()
|
||||
.map(|s| s.to_lowercase())
|
||||
.as_deref()
|
||||
{
|
||||
Ok("daemon") => StoreMode::Daemon,
|
||||
Ok("simulated") => StoreMode::Simulated,
|
||||
Ok("auto") | Err(_) => StoreMode::Auto,
|
||||
Ok(other) => {
|
||||
tracing::warn!("Invalid NIX_JS_STORE_MODE '{}', using 'auto'", other);
|
||||
StoreMode::Auto
|
||||
}
|
||||
};
|
||||
|
||||
let daemon_socket = std::env::var("NIX_DAEMON_SOCKET")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| PathBuf::from("/nix/var/nix/daemon-socket/socket"));
|
||||
|
||||
Self {
|
||||
mode,
|
||||
daemon_socket,
|
||||
}
|
||||
Self { daemon_socket }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::io::{Error as IoError, ErrorKind as IoErrorKind, Result as IoResult};
|
||||
use std::path::Path;
|
||||
|
||||
@@ -9,13 +7,14 @@ use nix_compat::store_path::StorePath;
|
||||
use nix_compat::wire::ProtocolVersion;
|
||||
use nix_compat::wire::de::{NixRead, NixReader};
|
||||
use nix_compat::wire::ser::{NixSerialize, NixWrite, NixWriter, NixWriterBuilder};
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
use thiserror::Error;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt, ReadHalf, WriteHalf, split};
|
||||
use tokio::net::UnixStream;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
use super::Store;
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
pub struct DaemonStore {
|
||||
runtime: tokio::runtime::Runtime,
|
||||
@@ -472,6 +471,7 @@ pub struct NixDaemonClient {
|
||||
protocol_version: ProtocolVersion,
|
||||
reader: NixReader<ReadHalf<UnixStream>>,
|
||||
writer: NixWriter<WriteHalf<UnixStream>>,
|
||||
_marker: std::marker::PhantomData<std::cell::Cell<()>>,
|
||||
}
|
||||
|
||||
impl NixDaemonClient {
|
||||
@@ -503,26 +503,10 @@ impl NixDaemonClient {
|
||||
protocol_version,
|
||||
reader,
|
||||
writer,
|
||||
_marker: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute an operation that returns a typed result
|
||||
///
|
||||
/// This is the main method for implementing protocol operations:
|
||||
/// 1. Send operation code
|
||||
/// 2. Send operation parameters
|
||||
/// 3. Receive response or error
|
||||
async fn execute<T>(&mut self, operation: Operation) -> IoResult<T>
|
||||
where
|
||||
T: nix_compat::wire::de::NixDeserialize,
|
||||
{
|
||||
// Send operation
|
||||
self.writer.write_value(&operation).await?;
|
||||
self.writer.flush().await?;
|
||||
|
||||
self.read_response().await
|
||||
}
|
||||
|
||||
/// Execute an operation with a single parameter
|
||||
async fn execute_with<P, T>(&mut self, operation: Operation, param: &P) -> IoResult<T>
|
||||
where
|
||||
@@ -542,7 +526,7 @@ impl NixDaemonClient {
|
||||
///
|
||||
/// The daemon sends either:
|
||||
/// - STDERR_LAST followed by the result
|
||||
/// - STDERR_ERROR followed by an error message
|
||||
/// - STDERR_ERROR followed by a structured error
|
||||
async fn read_response<T>(&mut self) -> IoResult<T>
|
||||
where
|
||||
T: nix_compat::wire::de::NixDeserialize,
|
||||
@@ -551,23 +535,43 @@ impl NixDaemonClient {
|
||||
let msg = self.reader.read_number().await?;
|
||||
|
||||
if msg == STDERR_LAST {
|
||||
// Success, read the actual response
|
||||
let result: T = self.reader.read_value().await?;
|
||||
return Ok(result);
|
||||
} else if msg == STDERR_ERROR {
|
||||
// IoError, read error message
|
||||
// The error is sent as a NixIoError struct, but we just read the message
|
||||
let error_msg: String = self.reader.read_value().await?;
|
||||
let error_msg = self.read_daemon_error().await?;
|
||||
return Err(IoError::other(error_msg));
|
||||
} else {
|
||||
// Other STDERR_* codes (logging, etc.) - for now, we ignore them
|
||||
// Read and discard the associated data
|
||||
let _data: String = self.reader.read_value().await?;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn read_daemon_error(&mut self) -> IoResult<NixDaemonError> {
|
||||
let type_marker: String = self.reader.read_value().await?;
|
||||
assert_eq!(type_marker, "Error");
|
||||
|
||||
let level = NixDaemonErrorLevel::try_from_primitive(
|
||||
self.reader.read_number().await?.try_into().unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
// removed
|
||||
let _name: String = self.reader.read_value().await?;
|
||||
let msg: String = self.reader.read_value().await?;
|
||||
let have_pos: u64 = self.reader.read_number().await?;
|
||||
assert_eq!(have_pos, 0);
|
||||
|
||||
let nr_traces: u64 = self.reader.read_number().await?;
|
||||
let mut traces = Vec::new();
|
||||
for _ in 0..nr_traces {
|
||||
let _trace_pos: u64 = self.reader.read_number().await?;
|
||||
let trace_hint: String = self.reader.read_value().await?;
|
||||
traces.push(trace_hint);
|
||||
}
|
||||
|
||||
Ok(NixDaemonError { level, msg, traces })
|
||||
}
|
||||
|
||||
/// Check if a path is valid in the store
|
||||
pub async fn is_valid_path(&mut self, path: &str) -> IoResult<bool> {
|
||||
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
|
||||
@@ -581,19 +585,15 @@ impl NixDaemonClient {
|
||||
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
|
||||
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
|
||||
|
||||
// QueryPathInfo returns Option<UnkeyedValidPathInfo> which is serialized
|
||||
// as a bool followed by the value if true
|
||||
self.writer.write_value(&Operation::QueryPathInfo).await?;
|
||||
self.writer.write_value(&store_path).await?;
|
||||
self.writer.flush().await?;
|
||||
|
||||
// Read response - it's serialized as bool + optional value
|
||||
loop {
|
||||
let msg = self.reader.read_number().await?;
|
||||
if msg == STDERR_LAST {
|
||||
let has_value: bool = self.reader.read_value().await?;
|
||||
if has_value {
|
||||
// Manually deserialize UnkeyedValidPathInfo
|
||||
use nix_compat::narinfo::Signature;
|
||||
use nix_compat::nixhash::CAHash;
|
||||
|
||||
@@ -621,7 +621,7 @@ impl NixDaemonClient {
|
||||
return Ok(None);
|
||||
}
|
||||
} else if msg == STDERR_ERROR {
|
||||
let error_msg: String = self.reader.read_value().await?;
|
||||
let error_msg = self.read_daemon_error().await?;
|
||||
return Err(IoError::other(error_msg));
|
||||
} else {
|
||||
let _data: String = self.reader.read_value().await?;
|
||||
@@ -635,18 +635,16 @@ impl NixDaemonClient {
|
||||
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
|
||||
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
|
||||
|
||||
// EnsurePath returns void (no value)
|
||||
self.writer.write_value(&Operation::EnsurePath).await?;
|
||||
self.writer.write_value(&store_path).await?;
|
||||
self.writer.flush().await?;
|
||||
|
||||
// Read response - expect STDERR_LAST with no value
|
||||
loop {
|
||||
let msg = self.reader.read_number().await?;
|
||||
if msg == STDERR_LAST {
|
||||
return Ok(());
|
||||
} else if msg == STDERR_ERROR {
|
||||
let error_msg: String = self.reader.read_value().await?;
|
||||
let error_msg = self.read_daemon_error().await?;
|
||||
return Err(IoError::other(error_msg));
|
||||
} else {
|
||||
let _data: String = self.reader.read_value().await?;
|
||||
@@ -729,7 +727,7 @@ impl NixDaemonClient {
|
||||
if msg == STDERR_LAST {
|
||||
return Ok(());
|
||||
} else if msg == STDERR_ERROR {
|
||||
let error_msg: String = self.reader.read_value().await?;
|
||||
let error_msg = self.read_daemon_error().await?;
|
||||
return Err(IoError::other(error_msg));
|
||||
} else {
|
||||
let _data: String = self.reader.read_value().await?;
|
||||
@@ -787,3 +785,24 @@ impl NixDaemonConnection {
|
||||
client.add_to_store_nar(request, nar_data).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, IntoPrimitive, TryFromPrimitive)]
|
||||
#[repr(u8)]
|
||||
pub enum NixDaemonErrorLevel {
|
||||
Error = 0,
|
||||
Warn,
|
||||
Notice,
|
||||
Info,
|
||||
Talkative,
|
||||
Chatty,
|
||||
Debug,
|
||||
Vomit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("{msg}")]
|
||||
pub struct NixDaemonError {
|
||||
level: NixDaemonErrorLevel,
|
||||
msg: String,
|
||||
traces: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
use super::Store;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::fetcher::cache::FetcherCache;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct SimulatedStore {
|
||||
cache: FetcherCache,
|
||||
store_dir: String,
|
||||
}
|
||||
|
||||
impl SimulatedStore {
|
||||
pub fn new() -> Result<Self> {
|
||||
let cache = FetcherCache::new()
|
||||
.map_err(|e| Error::internal(format!("Failed to create simulated store: {}", e)))?;
|
||||
|
||||
let store_dir = dirs::cache_dir()
|
||||
.unwrap_or_else(|| std::path::PathBuf::from("/tmp"))
|
||||
.join("nix-js")
|
||||
.join("fetchers")
|
||||
.join("store")
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
Ok(Self { cache, store_dir })
|
||||
}
|
||||
}
|
||||
|
||||
impl Store for SimulatedStore {
|
||||
fn get_store_dir(&self) -> &str {
|
||||
&self.store_dir
|
||||
}
|
||||
|
||||
fn is_valid_path(&self, path: &str) -> Result<bool> {
|
||||
Ok(Path::new(path).exists())
|
||||
}
|
||||
|
||||
fn ensure_path(&self, path: &str) -> Result<()> {
|
||||
if !Path::new(path).exists() {
|
||||
return Err(Error::eval_error(
|
||||
format!(
|
||||
"builtins.storePath: path '{}' does not exist in the simulated store",
|
||||
path
|
||||
),
|
||||
None,
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_to_store(
|
||||
&self,
|
||||
name: &str,
|
||||
content: &[u8],
|
||||
_recursive: bool,
|
||||
_references: Vec<String>,
|
||||
) -> Result<String> {
|
||||
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(content));
|
||||
|
||||
let store_path = self.cache.make_store_path(&hash, name);
|
||||
|
||||
if !store_path.exists() {
|
||||
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))
|
||||
.map_err(|e| Error::internal(format!("Failed to create store directory: {}", e)))?;
|
||||
|
||||
fs::write(&store_path, content)
|
||||
.map_err(|e| Error::internal(format!("Failed to write to store: {}", e)))?;
|
||||
}
|
||||
|
||||
Ok(store_path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
fn add_to_store_from_path(
|
||||
&self,
|
||||
name: &str,
|
||||
source_path: &Path,
|
||||
_references: Vec<String>,
|
||||
) -> Result<String> {
|
||||
use crate::fetcher::cache::copy_dir_recursive;
|
||||
|
||||
let nar_hash = crate::nar::compute_nar_hash(source_path)
|
||||
.map_err(|e| Error::internal(format!("Failed to compute NAR hash: {}", e)))?;
|
||||
|
||||
let store_path = self.cache.make_store_path(&nar_hash, name);
|
||||
|
||||
if !store_path.exists() {
|
||||
fs::create_dir_all(&store_path)
|
||||
.map_err(|e| Error::internal(format!("Failed to create store directory: {}", e)))?;
|
||||
|
||||
if source_path.is_dir() {
|
||||
copy_dir_recursive(source_path, &store_path)
|
||||
.map_err(|e| Error::internal(format!("Failed to copy to store: {}", e)))?;
|
||||
} else {
|
||||
fs::copy(source_path, &store_path)
|
||||
.map_err(|e| Error::internal(format!("Failed to copy to store: {}", e)))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(store_path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
fn add_text_to_store(
|
||||
&self,
|
||||
name: &str,
|
||||
content: &str,
|
||||
references: Vec<String>,
|
||||
) -> Result<String> {
|
||||
self.add_to_store(name, content.as_bytes(), false, references)
|
||||
}
|
||||
|
||||
fn make_fixed_output_path(
|
||||
&self,
|
||||
_hash_algo: &str,
|
||||
hash: &str,
|
||||
_hash_mode: &str,
|
||||
name: &str,
|
||||
) -> Result<String> {
|
||||
let store_path = self.cache.make_store_path(hash, name);
|
||||
Ok(store_path.to_string_lossy().to_string())
|
||||
}
|
||||
}
|
||||
@@ -1,28 +1,10 @@
|
||||
mod utils;
|
||||
|
||||
use std::sync::Once;
|
||||
|
||||
use nix_js::value::Value;
|
||||
use utils::eval_result;
|
||||
|
||||
fn init() {
|
||||
static INIT: Once = Once::new();
|
||||
INIT.call_once(|| {
|
||||
#[cfg(not(feature = "daemon"))]
|
||||
unsafe {
|
||||
std::env::set_var("NIX_JS_STORE_MODE", "simulated")
|
||||
};
|
||||
#[cfg(feature = "daemon")]
|
||||
unsafe {
|
||||
std::env::set_var("NIX_JS_STORE_MODE", "daemon")
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_file_simple() {
|
||||
init();
|
||||
|
||||
let result =
|
||||
eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate");
|
||||
|
||||
@@ -40,8 +22,6 @@ fn to_file_simple() {
|
||||
|
||||
#[test]
|
||||
fn to_file_with_references() {
|
||||
init();
|
||||
|
||||
let result = eval_result(
|
||||
r#"
|
||||
let
|
||||
@@ -65,8 +45,6 @@ fn to_file_with_references() {
|
||||
|
||||
#[test]
|
||||
fn to_file_invalid_name_with_slash() {
|
||||
init();
|
||||
|
||||
let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#);
|
||||
|
||||
assert!(result.is_err());
|
||||
@@ -80,8 +58,6 @@ fn to_file_invalid_name_with_slash() {
|
||||
|
||||
#[test]
|
||||
fn to_file_invalid_name_dot() {
|
||||
init();
|
||||
|
||||
let result = eval_result(r#"builtins.toFile "." "content""#);
|
||||
|
||||
assert!(result.is_err());
|
||||
@@ -90,8 +66,6 @@ fn to_file_invalid_name_dot() {
|
||||
|
||||
#[test]
|
||||
fn to_file_invalid_name_dotdot() {
|
||||
init();
|
||||
|
||||
let result = eval_result(r#"builtins.toFile ".." "content""#);
|
||||
|
||||
assert!(result.is_err());
|
||||
@@ -100,8 +74,6 @@ fn to_file_invalid_name_dotdot() {
|
||||
|
||||
#[test]
|
||||
fn store_path_validation_not_in_store() {
|
||||
init();
|
||||
|
||||
let result = eval_result(r#"builtins.storePath "/tmp/foo""#);
|
||||
|
||||
assert!(result.is_err());
|
||||
@@ -115,8 +87,6 @@ fn store_path_validation_not_in_store() {
|
||||
|
||||
#[test]
|
||||
fn store_path_validation_malformed_hash() {
|
||||
init();
|
||||
|
||||
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
|
||||
.expect("Failed to create dummy file");
|
||||
|
||||
@@ -145,8 +115,6 @@ fn store_path_validation_malformed_hash() {
|
||||
|
||||
#[test]
|
||||
fn store_path_validation_missing_name() {
|
||||
init();
|
||||
|
||||
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
|
||||
.expect("Failed to create dummy file");
|
||||
|
||||
@@ -175,8 +143,6 @@ fn store_path_validation_missing_name() {
|
||||
|
||||
#[test]
|
||||
fn to_file_curried_application() {
|
||||
init();
|
||||
|
||||
let result = eval_result(
|
||||
r#"
|
||||
let
|
||||
@@ -199,8 +165,6 @@ fn to_file_curried_application() {
|
||||
|
||||
#[test]
|
||||
fn to_file_number_conversion() {
|
||||
init();
|
||||
|
||||
let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#)
|
||||
.expect("Failed to evaluate");
|
||||
|
||||
@@ -215,8 +179,6 @@ fn to_file_number_conversion() {
|
||||
|
||||
#[test]
|
||||
fn to_file_list_conversion() {
|
||||
init();
|
||||
|
||||
let result = eval_result(
|
||||
r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![cfg(feature = "daemon")]
|
||||
|
||||
mod utils;
|
||||
|
||||
use nix_js::value::Value;
|
||||
@@ -368,26 +366,18 @@ fn fixed_output_sha256_flat() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fixed_output_default_algo() {
|
||||
let result = eval_deep(
|
||||
r#"derivation {
|
||||
fn fixed_output_missing_hashalgo() {
|
||||
assert!(
|
||||
eval_deep_result(
|
||||
r#"derivation {
|
||||
name = "default";
|
||||
builder = "/bin/sh";
|
||||
system = "x86_64-linux";
|
||||
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
}"#,
|
||||
)
|
||||
.is_err()
|
||||
);
|
||||
|
||||
match result {
|
||||
Value::AttrSet(attrs) => {
|
||||
assert!(attrs.contains_key("outPath"));
|
||||
// Verify it defaults to sha256 (same as explicitly specifying it)
|
||||
if let Some(Value::String(out_path)) = attrs.get("outPath") {
|
||||
assert!(out_path.contains("/nix/store/"));
|
||||
}
|
||||
}
|
||||
_ => panic!("Expected AttrSet"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -428,6 +418,7 @@ fn fixed_output_rejects_multi_output() {
|
||||
builder = "/bin/sh";
|
||||
system = "x86_64-linux";
|
||||
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
outputHashAlgo = "sha256";
|
||||
outputs = ["out" "dev"];
|
||||
}"#,
|
||||
);
|
||||
@@ -643,7 +634,8 @@ fn fixed_output_with_structured_attrs() {
|
||||
name = "fixstruct";
|
||||
builder = "/bin/sh";
|
||||
system = "x86_64-linux";
|
||||
outputHash = "abc123";
|
||||
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
outputHashAlgo = "sha256";
|
||||
__structuredAttrs = true;
|
||||
data = { key = "value"; };
|
||||
}"#,
|
||||
|
||||
Reference in New Issue
Block a user