fix: drvDeep

This commit is contained in:
2026-02-08 15:35:37 +08:00
parent bd9eb638af
commit a79e20c417
21 changed files with 573 additions and 108 deletions

1
Cargo.lock generated
View File

@@ -1956,6 +1956,7 @@ dependencies = [
"nix-compat",
"nix-js-macros",
"nix-nar",
"num_enum",
"petgraph",
"regex",
"reqwest",

View File

@@ -32,6 +32,7 @@
hyperfine
just
samply
jq
nodejs
nodePackages.npm

View File

@@ -6,13 +6,13 @@ build = "build.rs"
[features]
default = ["daemon"]
daemon = ["dep:tokio", "dep:nix-compat"]
daemon = ["dep:tokio"]
[dependencies]
mimalloc = "0.1"
tokio = { version = "1.41", features = ["rt-multi-thread", "sync", "net", "io-util"], optional = true }
nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", features = ["wire", "async"], optional = true }
nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", features = ["wire", "async"] }
# REPL
anyhow = "1.0"
@@ -63,6 +63,7 @@ rowan = "0.15"
nix-js-macros = { path = "../nix-js-macros" }
ere = "0.2.4"
num_enum = "0.7.5"
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }

View File

@@ -167,11 +167,16 @@ export const coerceToString = (
if ("outPath" in v) {
// Recursively coerce the outPath value
const outPath = coerceToString(v.outPath, mode, copyToStore, outContext);
if ("type" in v && v.type === "derivation" && "drvPath" in v) {
const drvPath = force(v.drvPath);
if (typeof drvPath === "string" && outContext) {
if ("type" in v && v.type === "derivation" && "drvPath" in v && outContext) {
const drvPathValue = force(v.drvPath);
const drvPathStr = isStringWithContext(drvPathValue)
? drvPathValue.value
: typeof drvPathValue === "string"
? drvPathValue
: null;
if (drvPathStr) {
const outputName = "outputName" in v ? String(force(v.outputName)) : "out";
addBuiltContext(outContext, drvPath, outputName);
addBuiltContext(outContext, drvPathStr, outputName);
}
}
return outPath;

View File

@@ -1,7 +1,12 @@
import type { NixValue, NixAttrs } from "../types";
import { forceStringValue, forceList } from "../type-assert";
import { force, createThunk } from "../thunk";
import { type DerivationData, type OutputInfo, generateAterm } from "../derivation-helpers";
import {
type DerivationData,
type OutputInfo,
generateAterm,
generateAtermModulo,
} from "../derivation-helpers";
import { coerceToString, StringCoercionMode } from "./conversion";
import {
type NixStringContext,
@@ -14,6 +19,8 @@ import {
import { nixValueToJson } from "../conversion";
import { isNixPath } from "../types";
const drvHashCache = new Map<string, string>();
const forceAttrs = (value: NixValue): NixAttrs => {
const forced = force(value);
if (
@@ -56,13 +63,7 @@ const validateSystem = (attrs: NixAttrs): string => {
return forceStringValue(attrs.system);
};
const extractOutputs = (attrs: NixAttrs): string[] => {
if (!("outputs" in attrs)) {
return ["out"];
}
const outputsList = forceList(attrs.outputs);
const outputs = outputsList.map((o) => forceStringValue(o));
const validateOutputs = (outputs: string[]): void => {
if (outputs.length === 0) {
throw new Error("derivation: outputs list cannot be empty");
}
@@ -78,7 +79,34 @@ const extractOutputs = (attrs: NixAttrs): string[] => {
}
seen.add(output);
}
};
const extractOutputs = (attrs: NixAttrs, structuredAttrs: boolean): string[] => {
if (!("outputs" in attrs)) {
return ["out"];
}
let outputs: string[];
if (structuredAttrs) {
const outputsList = forceList(attrs.outputs);
outputs = outputsList.map((o) => forceStringValue(o));
} else {
const outputsStr = coerceToString(attrs.outputs, StringCoercionMode.ToString, false, new Set());
outputs = outputsStr.split(/\s+/).filter((s) => s.length > 0);
}
validateOutputs(outputs);
return outputs;
};
const extractOutputsForWrapper = (attrs: NixAttrs): string[] => {
if (!("outputs" in attrs)) {
return ["out"];
}
// FIXME: trace context?
const outputs = forceList(attrs.outputs).map(forceStringValue);
validateOutputs(outputs);
return outputs;
};
@@ -94,7 +122,7 @@ const structuredAttrsExcludedKeys = new Set([
"__structuredAttrs",
"__ignoreNulls",
"__contentAddressed",
"impure",
"__impure",
"args",
]);
@@ -107,7 +135,7 @@ const specialAttrs = new Set([
"__structuredAttrs",
"__ignoreNulls",
"__contentAddressed",
"impure",
"__impure",
]);
const sortedJsonStringify = (obj: Record<string, any>): string => {
@@ -142,43 +170,43 @@ const extractEnv = (
if (key === "allowedReferences") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'allowedReferences'; use ` +
`'outputChecks.<output>.allowedReferences' instead`
`the derivation attribute 'allowedReferences'; use ` +
`'outputChecks.<output>.allowedReferences' instead`,
);
}
if (key === "allowedRequisites") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'allowedRequisites'; use ` +
`'outputChecks.<output>.allowedRequisites' instead`
`the derivation attribute 'allowedRequisites'; use ` +
`'outputChecks.<output>.allowedRequisites' instead`,
);
}
if (key === "disallowedReferences") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'disallowedReferences'; use ` +
`'outputChecks.<output>.disallowedReferences' instead`
`the derivation attribute 'disallowedReferences'; use ` +
`'outputChecks.<output>.disallowedReferences' instead`,
);
}
if (key === "disallowedRequisites") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'disallowedRequisites'; use ` +
`'outputChecks.<output>.disallowedRequisites' instead`
`the derivation attribute 'disallowedRequisites'; use ` +
`'outputChecks.<output>.disallowedRequisites' instead`,
);
}
if (key === "maxSize") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'maxSize'; use ` +
`'outputChecks.<output>.maxSize' instead`
`the derivation attribute 'maxSize'; use ` +
`'outputChecks.<output>.maxSize' instead`,
);
}
if (key === "maxClosureSize") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'maxClosureSize'; use ` +
`'outputChecks.<output>.maxClosureSize' instead`
`the derivation attribute 'maxClosureSize'; use ` +
`'outputChecks.<output>.maxClosureSize' instead`,
);
}
}
@@ -213,8 +241,9 @@ const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOut
if (ignoreNulls && hashValue === null) {
return null;
}
const hash = forceStringValue(attrs.outputHash);
const hashRaw = forceStringValue(attrs.outputHash);
// FIXME: default value?
let hashAlgo = "sha256";
if ("outputHashAlgo" in attrs) {
const algoValue = force(attrs.outputHashAlgo);
@@ -235,7 +264,9 @@ const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOut
throw new Error(`derivation: invalid outputHashMode '${hashMode}' (must be 'flat' or 'recursive')`);
}
return { hash, hashAlgo, hashMode };
const parsed = Deno.core.ops.op_parse_hash(hashRaw, hashAlgo);
return { hash: parsed.hex, hashAlgo: parsed.algo, hashMode };
};
const validateFixedOutputConstraints = (fixedOutput: FixedOutputInfo | null, outputs: string[]) => {
@@ -255,7 +286,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false;
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false;
const outputs = extractOutputs(attrs);
const outputs = extractOutputs(attrs, structuredAttrs);
const fixedOutputInfo = extractFixedOutputInfo(attrs, ignoreNulls);
validateFixedOutputConstraints(fixedOutputInfo, outputs);
@@ -263,7 +294,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
throw new Error("ca derivations are not supported");
}
if ("impure" in attrs && force(attrs.impure) === true) {
if ("__impure" in attrs && force(attrs.__impure) === true) {
throw new Error("impure derivations are not supported");
}
@@ -328,8 +359,11 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
env,
};
const finalAterm = generateAterm(finalDrv);
const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm);
drvPath = Deno.core.ops.op_make_text_store_path(finalDrvHash, `${drvName}.drv`, collectDrvReferences());
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const fixedHashFingerprint = `fixed:out:${hashAlgoPrefix}${fixedOutputInfo.hashAlgo}:${fixedOutputInfo.hash}:${outPath}`;
const fixedModuloHash = Deno.core.ops.op_sha256_hex(fixedHashFingerprint);
drvHashCache.set(drvPath, fixedModuloHash);
} else {
const maskedOutputs = new Map<string, OutputInfo>(
outputs.map((o) => [
@@ -357,7 +391,16 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
env: maskedEnv,
};
const maskedAterm = generateAterm(maskedDrv);
const inputDrvHashes = new Map<string, string>();
for (const [drvPath, outputNames] of inputDrvs) {
const cachedHash = drvHashCache.get(drvPath);
if (!cachedHash) {
throw new Error(`Missing modulo hash for input derivation: ${drvPath}`);
}
inputDrvHashes.set(cachedHash, Array.from(outputNames).join(","));
}
const maskedAterm = generateAtermModulo(maskedDrv, inputDrvHashes);
const drvModuloHash = Deno.core.ops.op_sha256_hex(maskedAterm);
outputInfos = new Map<string, OutputInfo>();
@@ -378,9 +421,11 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
env,
};
const finalAterm = generateAterm(finalDrv);
const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
drvPath = Deno.core.ops.op_make_text_store_path(finalDrvHash, `${drvName}.drv`, collectDrvReferences());
const finalAtermModulo = generateAtermModulo(finalDrv, inputDrvHashes);
const cachedModuloHash = Deno.core.ops.op_sha256_hex(finalAtermModulo);
drvHashCache.set(drvPath, cachedModuloHash);
}
const result: NixAttrs = {};
@@ -401,7 +446,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
export const derivation = (args: NixValue): NixAttrs => {
const attrs = forceAttrs(args);
const outputs: string[] = extractOutputs(attrs);
const outputs: string[] = extractOutputsForWrapper(attrs);
const strictThunk = createThunk(() => derivationStrict(args), "derivationStrict");
@@ -426,10 +471,7 @@ export const derivation = (args: NixValue): NixAttrs => {
`output_${outputName}`,
);
}
commonAttrs.all = createThunk(
() => outputsList.map((o) => o.value),
"all_outputs",
);
commonAttrs.all = createThunk(() => outputsList.map((o) => o.value), "all_outputs");
commonAttrs.drvAttrs = attrs;
for (const { value: outputObj } of outputsList) {
@@ -439,13 +481,9 @@ export const derivation = (args: NixValue): NixAttrs => {
`output_${outputName}`,
);
}
outputObj.all = createThunk(
() => outputsList.map((o) => o.value),
"all_outputs",
);
outputObj.all = createThunk(() => outputsList.map((o) => o.value), "all_outputs");
outputObj.drvAttrs = attrs;
}
return outputsList[0].value;
};

View File

@@ -18,12 +18,12 @@ export const seq =
export const deepSeq =
(e1: NixValue) =>
(e2: NixValue): NixValue => {
const seen: Set<NixValue> = new Set;
const seen: Set<NixValue> = new Set();
const recurse = (e: NixValue) => {
if (!seen.has(e)) {
seen.add(e);
} else {
return
return;
}
const forced = force(e);
if (Array.isArray(forced)) {
@@ -35,7 +35,7 @@ export const deepSeq =
recurse(val);
}
}
}
};
recurse(e1);
return e2;
};

View File

@@ -266,7 +266,7 @@ export const builtins: any = {
storeDir: "INVALID_PATH",
__traceCaller: (e: NixValue) => {
console.log(`traceCaller: ${getTos()}`)
return e
console.log(`traceCaller: ${getTos()}`);
return e;
},
};

View File

@@ -3,7 +3,14 @@
* Implemented via Rust ops exposed through deno_core
*/
import { forceAttrs, forceBool, forceFunction, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
import {
forceAttrs,
forceBool,
forceFunction,
forceList,
forceStringNoCtx,
forceStringValue,
} from "../type-assert";
import type { NixValue, NixAttrs, NixPath } from "../types";
import { isNixPath, IS_PATH, CatchableError } from "../types";
import { force } from "../thunk";
@@ -488,9 +495,8 @@ export const findFile =
}
const resolvedPath = Deno.core.ops.op_resolve_path(pathVal, "");
const candidatePath = suffix.length > 0
? Deno.core.ops.op_resolve_path(suffix, resolvedPath)
: resolvedPath;
const candidatePath =
suffix.length > 0 ? Deno.core.ops.op_resolve_path(suffix, resolvedPath) : resolvedPath;
if (Deno.core.ops.op_path_exists(candidatePath)) {
return { [IS_PATH]: true, value: candidatePath };

View File

@@ -5,7 +5,14 @@
import { force } from "../thunk";
import { CatchableError, ATTR_POSITIONS } from "../types";
import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types";
import { forceList, forceAttrs, forceFunction, forceStringValue, forceString, forceStringNoCtx } from "../type-assert";
import {
forceList,
forceAttrs,
forceFunction,
forceStringValue,
forceString,
forceStringNoCtx,
} from "../type-assert";
import * as context from "./context";
import { compareValues } from "../operators";
import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check";
@@ -242,7 +249,7 @@ export const parseDrvName = (s: NixValue): NixAttrs => {
let name = fullName;
let version = "";
for (let i = 0; i < fullName.length; ++i) {
if (fullName[i] === '-' && i + 1 < fullName.length && !/[a-zA-Z]/.test(fullName[i + 1])) {
if (fullName[i] === "-" && i + 1 < fullName.length && !/[a-zA-Z]/.test(fullName[i + 1])) {
name = fullName.substring(0, i);
version = fullName.substring(i + 1);
break;
@@ -250,8 +257,8 @@ export const parseDrvName = (s: NixValue): NixAttrs => {
}
return {
name,
version
}
version,
};
};
export const parseFlakeName = (s: NixValue): never => {

View File

@@ -43,9 +43,12 @@ export const escapeString = (s: string): string => {
const quoteString = (s: string): string => `"${s}"`;
const cmpByKey = <T>(a: [string, T], b: [string, T]): number =>
a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
export const generateAterm = (drv: DerivationData): string => {
const outputEntries: string[] = [];
const sortedOutputs = Array.from(drv.outputs.entries()).sort();
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
for (const [name, info] of sortedOutputs) {
outputEntries.push(
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
@@ -54,18 +57,51 @@ export const generateAterm = (drv: DerivationData): string => {
const outputs = outputEntries.join(",");
const inputDrvEntries: string[] = [];
for (const [drvPath, outputs] of drv.inputDrvs) {
const outList = `[${Array.from(outputs).map(quoteString).join(",")}]`;
const sortedInputDrvs = Array.from(drv.inputDrvs.entries()).sort(cmpByKey);
for (const [drvPath, outputs] of sortedInputDrvs) {
const sortedOuts = Array.from(outputs).sort();
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
inputDrvEntries.push(`(${quoteString(drvPath)},${outList})`);
}
const inputDrvs = inputDrvEntries.join(",");
const inputSrcs = Array.from(drv.inputSrcs).map(quoteString).join(",");
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
const args = drv.args.map(escapeString).join(",");
const envs = Array.from(drv.env.entries())
.sort()
.sort(cmpByKey)
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${quoteString(drv.builder)},[${args}],[${envs}])`;
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
};
export const generateAtermModulo = (drv: DerivationData, inputDrvHashes: Map<string, string>): string => {
const outputEntries: string[] = [];
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
for (const [name, info] of sortedOutputs) {
outputEntries.push(
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
);
}
const outputs = outputEntries.join(",");
const inputDrvEntries: string[] = [];
const sortedInputDrvHashes = Array.from(inputDrvHashes.entries()).sort(cmpByKey);
for (const [drvHash, outputs] of sortedInputDrvHashes) {
const sortedOuts = outputs.split(",").sort();
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
inputDrvEntries.push(`(${quoteString(drvHash)},${outList})`);
}
const inputDrvs = inputDrvEntries.join(",");
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
const args = drv.args.map(escapeString).join(",");
const envs = Array.from(drv.env.entries())
.sort(cmpByKey)
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
};

View File

@@ -40,7 +40,7 @@ export const getTos = (): string => {
const tos = callStack[callStack.length - 2];
const { file, line, column } = Deno.core.ops.op_decode_span(tos.span);
return `${tos.message} at ${file}:${line}:${column}`;
}
};
/**
* Push an error context onto the stack

View File

@@ -4,7 +4,16 @@
* All functionality is exported via the global `Nix` object
*/
import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS, forceDeep, IS_CYCLE, forceShallow } from "./thunk";
import {
createThunk,
force,
isThunk,
IS_THUNK,
DEBUG_THUNKS,
forceDeep,
IS_CYCLE,
forceShallow,
} from "./thunk";
import {
select,
selectWithDefault,

View File

@@ -183,7 +183,9 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
*
* Context type handling:
* - Opaque: Added to inputSrcs
* - DrvDeep: Added to inputSrcs (entire derivation + all outputs)
* - DrvDeep: Computes FS closure (like Nix's computeFSClosure) - adds all paths
* in the dependency graph to inputSrcs, and all derivations with their
* outputs to inputDrvs
* - Built: Added to inputDrvs with specific output name
*/
export const extractInputDrvsAndSrcs = (
@@ -198,9 +200,28 @@ export const extractInputDrvsAndSrcs = (
case "opaque":
inputSrcs.add(elem.path);
break;
case "drvDeep":
inputSrcs.add(elem.drvPath);
case "drvDeep": {
const closure: {
input_drvs: [string, string[]][];
input_srcs: string[];
} = Deno.core.ops.op_compute_fs_closure(elem.drvPath);
for (const src of closure.input_srcs) {
inputSrcs.add(src);
}
for (const [drvPath, outputs] of closure.input_drvs) {
let existingOutputs = inputDrvs.get(drvPath);
if (!existingOutputs) {
existingOutputs = new Set<string>();
inputDrvs.set(drvPath, existingOutputs);
}
for (const output of outputs) {
existingOutputs.add(output);
}
}
break;
}
case "built": {
let outputs = inputDrvs.get(elem.drvPath);
if (!outputs) {

View File

@@ -197,9 +197,9 @@ export const forceShallow = (value: NixValue): NixStrictValue => {
return forced.map((item) => {
const forcedItem = force(item);
if (typeof forcedItem === "object" && forcedItem === forced) {
return CYCLE_MARKER
return CYCLE_MARKER;
} else {
return forcedItem
return forcedItem;
}
});
}

View File

@@ -39,12 +39,7 @@ export const forceFunction = (value: NixValue): NixFunction => {
if (isFunction(forced)) {
return forced;
}
if (
typeof forced === "object" &&
!Array.isArray(forced) &&
forced !== null &&
"__functor" in forced
) {
if (typeof forced === "object" && !Array.isArray(forced) && forced !== null && "__functor" in forced) {
const functorSet = forced as NixAttrs;
const functor = forceFunction(functorSet.__functor);
return (arg: NixValue) => forceFunction(functor(functorSet))(arg);
@@ -100,10 +95,10 @@ export const forceStringNoCtx = (value: NixValue): string => {
return forced;
}
if (isStringWithContext(forced)) {
throw new TypeError(`the string '${forced.value}' is not allowed to refer to a store path`)
throw new TypeError(`the string '${forced.value}' is not allowed to refer to a store path`);
}
throw new TypeError(`Expected string, got ${typeOf(forced)}`);
}
};
/**
* Force a value and assert it's a boolean

View File

@@ -95,7 +95,7 @@ const ATTR_POSITIONS = Symbol("attrPositions");
export const mkAttrsWithPos = (
attrs: NixAttrs,
positions: Record<string, string>,
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] }
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] },
): NixAttrs => {
if (dyns) {
const len = dyns.dynKeys.length;

View File

@@ -44,10 +44,15 @@ declare global {
function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string;
function op_make_placeholder(output: string): string;
function op_decode_span(span: string): { file: string | null; line: number | null; column: number | null };
function op_decode_span(span: string): {
file: string | null;
line: number | null;
column: number | null;
};
function op_make_store_path(ty: string, hash_hex: string, name: string): string;
function op_make_text_store_path(hash_hex: string, name: string, references: string[]): string;
function op_output_path_name(drv_name: string, output_name: string): string;
function op_parse_hash(hash_str: string, algo: string): { hex: string; algo: string };
function op_make_fixed_output_path(
hash_algo: string,
hash: string,
@@ -84,6 +89,12 @@ declare global {
): string;
function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string;
function op_write_derivation(drv_name: string, aterm: string, references: string[]): string;
function op_read_derivation_outputs(drv_path: string): string[];
function op_compute_fs_closure(drv_path: string): {
input_drvs: [string, string[]][];
input_srcs: string[];
};
function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string;
function op_walk_dir(path: string): [string, string][];

View File

@@ -57,10 +57,14 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_make_store_path::<Ctx>(),
op_make_text_store_path::<Ctx>(),
op_output_path_name(),
op_parse_hash(),
op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(),
op_store_path::<Ctx>(),
op_to_file::<Ctx>(),
op_write_derivation::<Ctx>(),
op_read_derivation_outputs(),
op_compute_fs_closure(),
op_copy_path_to_store::<Ctx>(),
op_get_env(),
op_walk_dir(),

View File

@@ -1,4 +1,5 @@
use std::path::{Component, Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use hashbrown::hash_map::{Entry, HashMap};
@@ -257,7 +258,7 @@ pub(super) fn op_make_placeholder(#[string] output: String) -> String {
let mut hasher = Sha256::new();
hasher.update(input.as_bytes());
let hash: [u8; 32] = hasher.finalize().into();
let encoded = crate::nix_hash::nix_base32_encode(&hash);
let encoded = nix_compat::nixbase32::encode(&hash);
format!("/{}", encoded)
}
@@ -348,6 +349,34 @@ pub(super) fn op_output_path_name(
crate::nix_hash::output_path_name(&drv_name, &output_name)
}
#[derive(serde::Serialize)]
pub(super) struct ParsedHash {
hex: String,
algo: String,
}
#[deno_core::op2]
#[serde]
pub(super) fn op_parse_hash(
#[string] hash_str: String,
#[string] algo: String,
) -> std::result::Result<ParsedHash, NixRuntimeError> {
use nix_compat::nixhash::{HashAlgo, NixHash};
let hash_algo = HashAlgo::from_str(&algo).ok();
let nix_hash = NixHash::from_str(&hash_str, hash_algo).map_err(|e| {
NixRuntimeError::from(format!(
"invalid hash '{}' for algorithm '{}': {}",
hash_str, algo, e
))
})?;
Ok(ParsedHash {
hex: hex::encode(nix_hash.digest_as_bytes()),
algo,
})
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_fixed_output_path<Ctx: RuntimeContext>(
@@ -492,6 +521,268 @@ pub(super) fn op_to_file<Ctx: RuntimeContext>(
Ok(store_path)
}
#[deno_core::op2]
#[string]
pub(super) fn op_write_derivation<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] drv_name: String,
#[string] aterm: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixRuntimeError> {
tracing::debug!(
"op_write_derivation: name={}.drv, references={:?}",
drv_name,
references
);
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_text_to_store(&format!("{}.drv", drv_name), &aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[serde]
pub(super) fn op_read_derivation_outputs(
#[string] drv_path: String,
) -> std::result::Result<Vec<String>, NixRuntimeError> {
let content = std::fs::read_to_string(&drv_path)
.map_err(|e| NixRuntimeError::from(format!("failed to read derivation {}: {}", drv_path, e)))?;
let outputs = parse_derivation_outputs(&content)
.ok_or_else(|| NixRuntimeError::from(format!("failed to parse derivation {}", drv_path)))?;
Ok(outputs)
}
fn parse_derivation_outputs(aterm: &str) -> Option<Vec<String>> {
let aterm = aterm.strip_prefix("Derive([")?;
let outputs_end = aterm.find("],[")?;
let outputs_section = &aterm[..outputs_end];
let mut outputs = Vec::new();
let mut pos = 0;
let bytes = outputs_section.as_bytes();
while pos < bytes.len() {
while pos < bytes.len() && bytes[pos] != b'(' {
pos += 1;
}
if pos >= bytes.len() {
break;
}
pos += 1;
if pos >= bytes.len() || bytes[pos] != b'"' {
break;
}
pos += 1;
let name_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
pos += 1;
}
let name = std::str::from_utf8(&bytes[name_start..pos]).ok()?;
outputs.push(name.to_string());
while pos < bytes.len() && bytes[pos] != b')' {
pos += 1;
}
pos += 1;
}
Some(outputs)
}
#[derive(serde::Serialize)]
pub(super) struct DerivationInputs {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
let aterm = aterm.strip_prefix("Derive([")?;
let mut bracket_count = 1;
let mut pos = 0;
let bytes = aterm.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
_ => {}
}
pos += 1;
}
if bracket_count != 0 {
return None;
}
let rest = &aterm[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_drvs = Vec::new();
let mut bracket_count = 1;
let mut start = 0;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'(' if bracket_count == 1 => {
start = pos;
}
b')' if bracket_count == 1 => {
let entry = &rest[start + 1..pos];
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
input_drvs.push((drv_path, outputs));
}
}
_ => {}
}
pos += 1;
}
let rest = &rest[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_srcs = Vec::new();
bracket_count = 1;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'"' if bracket_count == 1 => {
pos += 1;
let src_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
pos += 2;
} else {
pos += 1;
}
}
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
input_srcs.push(src.to_string());
}
_ => {}
}
pos += 1;
}
Some(DerivationInputs {
input_drvs,
input_srcs,
})
}
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
let entry = entry.strip_prefix('"')?;
let quote_end = entry.find('"')?;
let drv_path = entry[..quote_end].to_string();
let rest = &entry[quote_end + 1..];
let rest = rest.strip_prefix(",[")?;
let rest = rest.strip_suffix(']')?;
let mut outputs = Vec::new();
for part in rest.split(',') {
let part = part.trim();
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
outputs.push(name.to_string());
}
}
Some((drv_path, outputs))
}
#[derive(serde::Serialize)]
pub(super) struct FsClosureResult {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
#[deno_core::op2]
#[serde]
pub(super) fn op_compute_fs_closure(
#[string] drv_path: String,
) -> std::result::Result<FsClosureResult, NixRuntimeError> {
use std::collections::{BTreeMap, BTreeSet, VecDeque};
let mut all_input_srcs: BTreeSet<String> = BTreeSet::new();
let mut all_input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut queue: VecDeque<String> = VecDeque::new();
let mut visited: BTreeSet<String> = BTreeSet::new();
queue.push_back(drv_path);
while let Some(current_path) = queue.pop_front() {
if visited.contains(&current_path) {
continue;
}
visited.insert(current_path.clone());
all_input_srcs.insert(current_path.clone());
if !current_path.ends_with(".drv") {
continue;
}
let content = match std::fs::read_to_string(&current_path) {
Ok(c) => c,
Err(e) => {
return Err(NixRuntimeError::from(format!(
"failed to read derivation {}: {}",
current_path, e
)));
}
};
let inputs = parse_derivation_inputs(&content).ok_or_else(|| {
NixRuntimeError::from(format!("failed to parse derivation {}", current_path))
})?;
for src in inputs.input_srcs {
all_input_srcs.insert(src.clone());
if !visited.contains(&src) {
queue.push_back(src);
}
}
for (dep_drv, outputs) in inputs.input_drvs {
all_input_srcs.insert(dep_drv.clone());
let entry = all_input_drvs.entry(dep_drv.clone()).or_default();
for output in outputs {
entry.insert(output);
}
if !visited.contains(&dep_drv) {
queue.push_back(dep_drv);
}
}
}
let input_drvs: Vec<(String, Vec<String>)> = all_input_drvs
.into_iter()
.map(|(k, v)| (k, v.into_iter().collect()))
.collect();
let input_srcs: Vec<String> = all_input_srcs.into_iter().collect();
Ok(FsClosureResult {
input_drvs,
input_srcs,
})
}
#[deno_core::op2]
#[string]
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(

View File

@@ -9,9 +9,11 @@ use nix_compat::store_path::StorePath;
use nix_compat::wire::ProtocolVersion;
use nix_compat::wire::de::{NixRead, NixReader};
use nix_compat::wire::ser::{NixSerialize, NixWrite, NixWriter, NixWriterBuilder};
use num_enum::{IntoPrimitive, TryFromPrimitive};
use tokio::io::{AsyncReadExt, AsyncWriteExt, ReadHalf, WriteHalf, split};
use tokio::net::UnixStream;
use tokio::sync::Mutex;
use thiserror::Error;
use crate::error::{Error, Result};
@@ -472,6 +474,7 @@ pub struct NixDaemonClient {
protocol_version: ProtocolVersion,
reader: NixReader<ReadHalf<UnixStream>>,
writer: NixWriter<WriteHalf<UnixStream>>,
_marker: std::marker::PhantomData<std::cell::Cell<()>>,
}
impl NixDaemonClient {
@@ -503,18 +506,15 @@ impl NixDaemonClient {
protocol_version,
reader,
writer,
_marker: Default::default(),
})
}
/// Execute an operation that returns a typed result
///
/// This is the main method for implementing protocol operations:
/// 1. Send operation code
/// 2. Send operation parameters
/// 3. Receive response or error
async fn execute<T>(&mut self, operation: Operation) -> IoResult<T>
async fn execute<T, F1, F2>(&mut self, operation: Operation, write: F1, read: F2) -> IoResult<T>
where
T: nix_compat::wire::de::NixDeserialize,
F1: FnOnce() -> IoResult<()>,
F2: FnOnce() -> IoResult<T>
{
// Send operation
self.writer.write_value(&operation).await?;
@@ -542,7 +542,7 @@ impl NixDaemonClient {
///
/// The daemon sends either:
/// - STDERR_LAST followed by the result
/// - STDERR_ERROR followed by an error message
/// - STDERR_ERROR followed by a structured error
async fn read_response<T>(&mut self) -> IoResult<T>
where
T: nix_compat::wire::de::NixDeserialize,
@@ -551,23 +551,47 @@ impl NixDaemonClient {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
// Success, read the actual response
let result: T = self.reader.read_value().await?;
return Ok(result);
} else if msg == STDERR_ERROR {
// IoError, read error message
// The error is sent as a NixIoError struct, but we just read the message
let error_msg: String = self.reader.read_value().await?;
let error_msg = self.read_daemon_error().await?;
return Err(IoError::other(error_msg));
} else {
// Other STDERR_* codes (logging, etc.) - for now, we ignore them
// Read and discard the associated data
let _data: String = self.reader.read_value().await?;
continue;
}
}
}
async fn read_daemon_error(&mut self) -> IoResult<NixDaemonError> {
let type_marker: String = self.reader.read_value().await?;
assert_eq!(type_marker, "Error");
let level = NixDaemonErrorLevel::try_from_primitive(
self.reader.read_number().await?.try_into().unwrap(),
)
.unwrap();
// removed
let _name: String = self.reader.read_value().await?;
let msg: String = self.reader.read_value().await?;
let have_pos: u64 = self.reader.read_number().await?;
assert_eq!(have_pos, 0);
let nr_traces: u64 = self.reader.read_number().await?;
let mut traces = Vec::new();
for _ in 0..nr_traces {
let _trace_pos: u64 = self.reader.read_number().await?;
let trace_hint: String = self.reader.read_value().await?;
traces.push(trace_hint);
}
Ok(NixDaemonError {
level,
msg,
traces,
})
}
/// Check if a path is valid in the store
pub async fn is_valid_path(&mut self, path: &str) -> IoResult<bool> {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
@@ -581,19 +605,15 @@ impl NixDaemonClient {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
// QueryPathInfo returns Option<UnkeyedValidPathInfo> which is serialized
// as a bool followed by the value if true
self.writer.write_value(&Operation::QueryPathInfo).await?;
self.writer.write_value(&store_path).await?;
self.writer.flush().await?;
// Read response - it's serialized as bool + optional value
loop {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
let has_value: bool = self.reader.read_value().await?;
if has_value {
// Manually deserialize UnkeyedValidPathInfo
use nix_compat::narinfo::Signature;
use nix_compat::nixhash::CAHash;
@@ -621,7 +641,7 @@ impl NixDaemonClient {
return Ok(None);
}
} else if msg == STDERR_ERROR {
let error_msg: String = self.reader.read_value().await?;
let error_msg = self.read_daemon_error().await?;
return Err(IoError::other(error_msg));
} else {
let _data: String = self.reader.read_value().await?;
@@ -635,18 +655,16 @@ impl NixDaemonClient {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
// EnsurePath returns void (no value)
self.writer.write_value(&Operation::EnsurePath).await?;
self.writer.write_value(&store_path).await?;
self.writer.flush().await?;
// Read response - expect STDERR_LAST with no value
loop {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
return Ok(());
} else if msg == STDERR_ERROR {
let error_msg: String = self.reader.read_value().await?;
let error_msg = self.read_daemon_error().await?;
return Err(IoError::other(error_msg));
} else {
let _data: String = self.reader.read_value().await?;
@@ -729,7 +747,7 @@ impl NixDaemonClient {
if msg == STDERR_LAST {
return Ok(());
} else if msg == STDERR_ERROR {
let error_msg: String = self.reader.read_value().await?;
let error_msg = self.read_daemon_error().await?;
return Err(IoError::other(error_msg));
} else {
let _data: String = self.reader.read_value().await?;
@@ -787,3 +805,24 @@ impl NixDaemonConnection {
client.add_to_store_nar(request, nar_data).await
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, IntoPrimitive, TryFromPrimitive)]
#[repr(u8)]
pub enum NixDaemonErrorLevel {
Error = 0,
Warn,
Notice,
Info,
Talkative,
Chatty,
Debug,
Vomit,
}
#[derive(Debug, Error)]
#[error("{msg}")]
pub struct NixDaemonError {
level: NixDaemonErrorLevel,
msg: String,
traces: Vec<String>,
}

View File

@@ -643,7 +643,7 @@ fn fixed_output_with_structured_attrs() {
name = "fixstruct";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "abc123";
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
__structuredAttrs = true;
data = { key = "value"; };
}"#,