Compare commits

...

26 Commits

Author SHA1 Message Date
a68681b4f5 fix: derivation references 2026-01-31 20:19:31 +08:00
ba3e2ae3de feat: set v8 stack size to 8 MiB 2026-01-31 19:29:09 +08:00
c5aee21514 fix: remove incorrect dynamic attr check 2026-01-31 18:59:11 +08:00
8f01ce2eb4 fix: handle __functor in forceFunction 2026-01-31 18:58:44 +08:00
a08f0e78a3 feat: builtins.placeholder 2026-01-31 18:54:13 +08:00
547f8f3828 fix: disable TCO test 2026-01-31 17:51:01 +08:00
aa368cb12e fix: toJSON test 2026-01-31 17:29:57 +08:00
0360bbe4aa fix: canonicalize paths 2026-01-31 17:23:28 +08:00
db64763d77 fix: escape "${" 2026-01-31 17:23:07 +08:00
1aba28d97b fix: symlink resolution 2026-01-31 16:55:44 +08:00
6838f9a0cf fix: unsafeGetAttrPos call on functionArgs 2026-01-31 16:46:16 +08:00
cb539c52c3 chore: fmt 2026-01-31 16:23:23 +08:00
b8f8b5764d fix: print test 2026-01-31 15:36:03 +08:00
1cfa8223c6 fix: path related tests 2026-01-31 15:03:30 +08:00
13874ca6ca fix: structuredAttrs 2026-01-31 13:53:02 +08:00
5703329850 fix: copy path to store in concatStringsWithContext 2026-01-31 12:47:32 +08:00
f0812c9063 refactor: recursive attrset; fix attrset merging 2026-01-31 12:07:13 +08:00
97854afafa fix: getenv test 2026-01-30 22:52:03 +08:00
9545b0fcae fix: recursive attrs 2026-01-30 22:51:42 +08:00
aee46b0b49 feat: use CppNix's test suite 2026-01-30 22:47:47 +08:00
1f835e7b06 fix: derivation semantic 2026-01-30 22:45:37 +08:00
9ee2dd5c08 fix: inherit in recursive attribute sets 2026-01-29 18:40:13 +08:00
084968c08a fix: implication operator (->) 2026-01-29 17:35:19 +08:00
058ef44259 refactor(codegen): less allocation 2026-01-29 11:42:40 +08:00
86953dd9d3 refactor: thunk scope 2026-01-27 10:45:40 +08:00
d1f87260a6 fix: infinite recursion on perl (WIP) 2026-01-27 10:45:40 +08:00
454 changed files with 7410 additions and 1452 deletions

View File

@@ -4,3 +4,7 @@ members = [
"nix-js", "nix-js",
"nix-js-macros" "nix-js-macros"
] ]
[profile.profiling]
inherits = "release"
debug = true

View File

@@ -12,4 +12,4 @@
[no-exit-message] [no-exit-message]
@evalr expr: @evalr expr:
RUST_LOG=info cargo run --bin eval --release -- '{{expr}}' RUST_LOG=silent cargo run --bin eval --release -- '{{expr}}'

View File

@@ -31,6 +31,7 @@
valgrind valgrind
hyperfine hyperfine
just just
samply
nodejs nodejs
nodePackages.npm nodePackages.npm

View File

@@ -92,6 +92,7 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
let mut mut_variants = Vec::new(); let mut mut_variants = Vec::new();
let mut as_ref_arms = Vec::new(); let mut as_ref_arms = Vec::new();
let mut as_mut_arms = Vec::new(); let mut as_mut_arms = Vec::new();
let mut span_arms = Vec::new();
let mut from_impls = Vec::new(); let mut from_impls = Vec::new();
let mut to_trait_impls = Vec::new(); let mut to_trait_impls = Vec::new();
@@ -112,6 +113,7 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
mut_variants.push(quote! { #name(&'a mut #inner_type) }); mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) }); as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) }); as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
span_arms.push(quote! { Self::#name(inner) => inner.span });
from_impls.push(quote! { from_impls.push(quote! {
impl From<#inner_type> for #base_name { impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) } fn from(val: #inner_type) -> Self { #base_name::#name(val) }
@@ -140,6 +142,7 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
mut_variants.push(quote! { #name(&'a mut #inner_type) }); mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) }); as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) }); as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
span_arms.push(quote! { Self::#name(inner) => inner.span });
from_impls.push(quote! { from_impls.push(quote! {
impl From<#inner_type> for #base_name { impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) } fn from(val: #inner_type) -> Self { #base_name::#name(val) }
@@ -172,6 +175,7 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
mut_variants.push(quote! { #name(&'a mut #inner_type) }); mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) }); as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) }); as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
span_arms.push(quote! { Self::#name(inner) => inner.span });
from_impls.push(quote! { from_impls.push(quote! {
impl From<#inner_type> for #base_name { impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) } fn from(val: #inner_type) -> Self { #base_name::#name(val) }
@@ -223,6 +227,12 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
#( #as_mut_arms ),* #( #as_mut_arms ),*
} }
} }
pub fn span(&self) -> rnix::TextRange {
match self {
#( #span_arms ),*
}
}
} }
// `From` implementations for converting variant structs into the main enum. // `From` implementations for converting variant structs into the main enum.

View File

@@ -75,7 +75,7 @@ name = "builtins"
harness = false harness = false
[[bench]] [[bench]]
name = "scc_optimization" name = "thunk_scope"
harness = false harness = false
[[bench]] [[bench]]

View File

@@ -31,7 +31,6 @@ export const hasAttr =
(set: NixValue): boolean => (set: NixValue): boolean =>
Object.hasOwn(forceAttrs(set), forceStringValue(s)); Object.hasOwn(forceAttrs(set), forceStringValue(s));
let counter = 0;
export const mapAttrs = export const mapAttrs =
(f: NixValue) => (f: NixValue) =>
(attrs: NixValue): NixAttrs => { (attrs: NixValue): NixAttrs => {
@@ -39,8 +38,7 @@ export const mapAttrs =
const forcedF = forceFunction(f); const forcedF = forceFunction(f);
const newAttrs: NixAttrs = {}; const newAttrs: NixAttrs = {};
for (const key in forcedAttrs) { for (const key in forcedAttrs) {
newAttrs[key] = createThunk(() => forceFunction(forcedF(key))(forcedAttrs[key]), `created by mapAttrs (${counter})`); newAttrs[key] = createThunk(() => forceFunction(forcedF(key))(forcedAttrs[key]), "created by mapAttrs");
counter += 1;
} }
return newAttrs; return newAttrs;
}; };

View File

@@ -46,7 +46,7 @@ const validateBuilder = (attrs: NixAttrs, outContext: NixStringContext): string
if (!("builder" in attrs)) { if (!("builder" in attrs)) {
throw new Error("derivation: missing required attribute 'builder'"); throw new Error("derivation: missing required attribute 'builder'");
} }
return coerceToString(attrs.builder, StringCoercionMode.ToString, false, outContext); return coerceToString(attrs.builder, StringCoercionMode.ToString, true, outContext);
}; };
const validateSystem = (attrs: NixAttrs): string => { const validateSystem = (attrs: NixAttrs): string => {
@@ -87,17 +87,18 @@ const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] =>
return []; return [];
} }
const argsList = forceList(attrs.args); const argsList = forceList(attrs.args);
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, false, outContext)); return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext));
}; };
const extractEnv = ( const structuredAttrsExcludedKeys = new Set([
attrs: NixAttrs, "__structuredAttrs",
structuredAttrs: boolean, "__ignoreNulls",
ignoreNulls: boolean, "__contentAddressed",
outContext: NixStringContext, "impure",
drvName: string, "args",
): Map<string, string> => { ]);
const specialAttrs = new Set([
const specialAttrs = new Set([
"name", "name",
"builder", "builder",
"system", "system",
@@ -107,14 +108,30 @@ const extractEnv = (
"__ignoreNulls", "__ignoreNulls",
"__contentAddressed", "__contentAddressed",
"impure", "impure",
]); ]);
const sortedJsonStringify = (obj: Record<string, any>): string => {
const sortedKeys = Object.keys(obj).sort();
const sortedObj: Record<string, any> = {};
for (const key of sortedKeys) {
sortedObj[key] = obj[key];
}
return JSON.stringify(sortedObj);
};
const extractEnv = (
attrs: NixAttrs,
structuredAttrs: boolean,
ignoreNulls: boolean,
outContext: NixStringContext,
drvName: string,
): Map<string, string> => {
const env = new Map<string, string>(); const env = new Map<string, string>();
if (structuredAttrs) { if (structuredAttrs) {
const jsonAttrs: Record<string, any> = {}; const jsonAttrs: Record<string, any> = {};
for (const [key, value] of Object.entries(attrs)) { for (const [key, value] of Object.entries(attrs)) {
if (!specialAttrs.has(key)) { if (!structuredAttrsExcludedKeys.has(key)) {
const forcedValue = force(value); const forcedValue = force(value);
if (ignoreNulls && forcedValue === null) { if (ignoreNulls && forcedValue === null) {
continue; continue;
@@ -165,7 +182,7 @@ const extractEnv = (
); );
} }
} }
env.set("__json", JSON.stringify(jsonAttrs)); env.set("__json", sortedJsonStringify(jsonAttrs));
} else { } else {
for (const [key, value] of Object.entries(attrs)) { for (const [key, value] of Object.entries(attrs)) {
if (!specialAttrs.has(key)) { if (!specialAttrs.has(key)) {
@@ -173,7 +190,7 @@ const extractEnv = (
if (ignoreNulls && forcedValue === null) { if (ignoreNulls && forcedValue === null) {
continue; continue;
} }
env.set(key, coerceToString(value, StringCoercionMode.ToString, false, outContext)); env.set(key, coerceToString(value, StringCoercionMode.ToString, true, outContext));
} }
} }
} }
@@ -187,14 +204,32 @@ interface FixedOutputInfo {
hashMode: string; hashMode: string;
} }
const extractFixedOutputInfo = (attrs: NixAttrs): FixedOutputInfo | null => { const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOutputInfo | null => {
if (!("outputHash" in attrs)) { if (!("outputHash" in attrs)) {
return null; return null;
} }
const hashValue = force(attrs.outputHash);
if (ignoreNulls && hashValue === null) {
return null;
}
const hash = forceStringValue(attrs.outputHash); const hash = forceStringValue(attrs.outputHash);
const hashAlgo = "outputHashAlgo" in attrs ? forceStringValue(attrs.outputHashAlgo) : "sha256";
const hashMode = "outputHashMode" in attrs ? forceStringValue(attrs.outputHashMode) : "flat"; let hashAlgo = "sha256";
if ("outputHashAlgo" in attrs) {
const algoValue = force(attrs.outputHashAlgo);
if (!(ignoreNulls && algoValue === null)) {
hashAlgo = forceStringValue(attrs.outputHashAlgo);
}
}
let hashMode = "flat";
if ("outputHashMode" in attrs) {
const modeValue = force(attrs.outputHashMode);
if (!(ignoreNulls && modeValue === null)) {
hashMode = forceStringValue(attrs.outputHashMode);
}
}
if (hashMode !== "flat" && hashMode !== "recursive") { if (hashMode !== "flat" && hashMode !== "recursive") {
throw new Error(`derivation: invalid outputHashMode '${hashMode}' (must be 'flat' or 'recursive')`); throw new Error(`derivation: invalid outputHashMode '${hashMode}' (must be 'flat' or 'recursive')`);
@@ -217,14 +252,13 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const builder = validateBuilder(attrs, collectedContext); const builder = validateBuilder(attrs, collectedContext);
const platform = validateSystem(attrs); const platform = validateSystem(attrs);
const outputs = extractOutputs(attrs);
const fixedOutputInfo = extractFixedOutputInfo(attrs);
validateFixedOutputConstraints(fixedOutputInfo, outputs);
const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false; const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false;
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false; const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false;
const outputs = extractOutputs(attrs);
const fixedOutputInfo = extractFixedOutputInfo(attrs, ignoreNulls);
validateFixedOutputConstraints(fixedOutputInfo, outputs);
if ("__contentAddressed" in attrs && force(attrs.__contentAddressed) === true) { if ("__contentAddressed" in attrs && force(attrs.__contentAddressed) === true) {
throw new Error("ca derivations are not supported"); throw new Error("ca derivations are not supported");
} }
@@ -236,15 +270,28 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const drvArgs = extractArgs(attrs, collectedContext); const drvArgs = extractArgs(attrs, collectedContext);
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName); const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName);
if (!structuredAttrs) {
env.set("name", drvName); env.set("name", drvName);
env.set("builder", builder); env.set("builder", builder);
env.set("system", platform); env.set("system", platform);
if (outputs.length > 1 || outputs[0] !== "out") { if (outputs.length > 1 || outputs[0] !== "out") {
env.set("outputs", outputs.join(" ")); env.set("outputs", outputs.join(" "));
} }
}
const { inputDrvs, inputSrcs } = extractInputDrvsAndSrcs(collectedContext); const { inputDrvs, inputSrcs } = extractInputDrvsAndSrcs(collectedContext);
const collectDrvReferences = (): string[] => {
const refs = new Set<string>();
for (const src of inputSrcs) {
refs.add(src);
}
for (const drvPath of inputDrvs.keys()) {
refs.add(drvPath);
}
return Array.from(refs).sort();
};
let outputInfos: Map<string, OutputInfo>; let outputInfos: Map<string, OutputInfo>;
let drvPath: string; let drvPath: string;
@@ -282,7 +329,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
}; };
const finalAterm = generateAterm(finalDrv); const finalAterm = generateAterm(finalDrv);
const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm); const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm);
drvPath = Deno.core.ops.op_make_store_path("text", finalDrvHash, `${drvName}.drv`); drvPath = Deno.core.ops.op_make_text_store_path(finalDrvHash, `${drvName}.drv`, collectDrvReferences());
} else { } else {
const maskedOutputs = new Map<string, OutputInfo>( const maskedOutputs = new Map<string, OutputInfo>(
outputs.map((o) => [ outputs.map((o) => [
@@ -333,7 +380,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const finalAterm = generateAterm(finalDrv); const finalAterm = generateAterm(finalDrv);
const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm); const finalDrvHash = Deno.core.ops.op_sha256_hex(finalAterm);
drvPath = Deno.core.ops.op_make_store_path("text", finalDrvHash, `${drvName}.drv`); drvPath = Deno.core.ops.op_make_text_store_path(finalDrvHash, `${drvName}.drv`, collectDrvReferences());
} }
const result: NixAttrs = {}; const result: NixAttrs = {};
@@ -353,77 +400,52 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
export const derivation = (args: NixValue): NixAttrs => { export const derivation = (args: NixValue): NixAttrs => {
const attrs = forceAttrs(args); const attrs = forceAttrs(args);
const strict = derivationStrict(args);
const outputs: string[] = extractOutputs(attrs); const outputs: string[] = extractOutputs(attrs);
const drvName = validateName(attrs);
const collectedContext: NixStringContext = new Set();
const builder = validateBuilder(attrs, collectedContext);
const platform = validateSystem(attrs);
const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false;
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false;
const drvArgs = extractArgs(attrs, collectedContext);
const specialAttrs = new Set([ const strictThunk = createThunk(() => derivationStrict(args), "derivationStrict");
"name",
"builder",
"system",
"args",
"outputs",
"__structuredAttrs",
"__ignoreNulls",
"__contentAddressed",
"impure",
]);
const baseAttrs: NixAttrs = { const commonAttrs: NixAttrs = { ...attrs };
const outputToAttrListElement = (outputName: string): { name: string; value: NixAttrs } => {
const value: NixAttrs = {
...commonAttrs,
outPath: createThunk(() => (force(strictThunk) as NixAttrs)[outputName], `outPath_${outputName}`),
drvPath: createThunk(() => (force(strictThunk) as NixAttrs).drvPath, "drvPath"),
type: "derivation", type: "derivation",
drvPath: strict.drvPath,
name: drvName,
builder,
system: platform,
};
if (drvArgs.length > 0) {
baseAttrs.args = drvArgs;
}
if (!structuredAttrs) {
for (const [key, value] of Object.entries(attrs)) {
if (!specialAttrs.has(key) && !outputs.includes(key)) {
const forcedValue = force(value);
if (!(ignoreNulls && forcedValue === null)) {
baseAttrs[key] = value;
}
}
}
}
const outputsList: NixAttrs[] = [];
for (const outputName of outputs) {
const outputObj: NixAttrs = {
...baseAttrs,
outPath: strict[outputName],
outputName, outputName,
}; };
outputsList.push(outputObj); return { name: outputName, value };
} };
baseAttrs.drvAttrs = attrs; const outputsList = outputs.map(outputToAttrListElement);
for (const [i, outputName] of outputs.entries()) {
baseAttrs[outputName] = createThunk(() => outputsList[i], `output_${outputName}`);
}
baseAttrs.all = createThunk(() => outputsList, "all_outputs");
for (const outputObj of outputsList) { for (const { name: outputName } of outputsList) {
commonAttrs[outputName] = createThunk(
() => outputsList.find((o) => o.name === outputName)!.value,
`output_${outputName}`,
);
}
commonAttrs.all = createThunk(
() => outputsList.map((o) => o.value),
"all_outputs",
);
commonAttrs.drvAttrs = attrs;
for (const { value: outputObj } of outputsList) {
for (const { name: outputName } of outputsList) {
outputObj[outputName] = createThunk(
() => outputsList.find((o) => o.name === outputName)!.value,
`output_${outputName}`,
);
}
outputObj.all = createThunk(
() => outputsList.map((o) => o.value),
"all_outputs",
);
outputObj.drvAttrs = attrs; outputObj.drvAttrs = attrs;
for (const [i, outputName] of outputs.entries()) {
outputObj[outputName] = createThunk(() => outputsList[i], `output_${outputName}`);
}
outputObj.all = createThunk(() => outputsList, "all_outputs");
} }
return outputsList[0]; return outputsList[0].value;
}; };

View File

@@ -5,6 +5,7 @@
import { CatchableError, HAS_CONTEXT, type NixValue } from "../types"; import { CatchableError, HAS_CONTEXT, type NixValue } from "../types";
import { force } from "../thunk"; import { force } from "../thunk";
import { coerceToString, StringCoercionMode } from "./conversion"; import { coerceToString, StringCoercionMode } from "./conversion";
import { printValue } from "../print";
export const seq = export const seq =
(e1: NixValue) => (e1: NixValue) =>
@@ -40,7 +41,7 @@ export const throwFunc = (s: NixValue): never => {
export const trace = export const trace =
(e1: NixValue) => (e1: NixValue) =>
(e2: NixValue): NixValue => { (e2: NixValue): NixValue => {
console.log(`trace: ${coerceToString(e1, StringCoercionMode.Base)}`); console.error(`trace: ${printValue(e1)}`);
return e2; return e2;
}; };

View File

@@ -18,7 +18,8 @@ import * as misc from "./misc";
import * as derivation from "./derivation"; import * as derivation from "./derivation";
import type { NixValue } from "../types"; import type { NixValue } from "../types";
import { createThunk, force } from "../thunk"; import { createThunk, force, isThunk } from "../thunk";
import { getTos } from "../helpers";
/** /**
* Symbol used to mark functions as primops (primitive operations) * Symbol used to mark functions as primops (primitive operations)
@@ -263,4 +264,9 @@ export const builtins: any = {
nixPath: [], nixPath: [],
nixVersion: "2.31.2", nixVersion: "2.31.2",
storeDir: "INVALID_PATH", storeDir: "INVALID_PATH",
__traceCaller: (e: NixValue) => {
console.log(`traceCaller: ${getTos()}`)
return e
},
}; };

View File

@@ -3,7 +3,7 @@
* Implemented via Rust ops exposed through deno_core * Implemented via Rust ops exposed through deno_core
*/ */
import { forceAttrs, forceBool, forceList, forceStringNoCtx, forceStringValue } from "../type-assert"; import { forceAttrs, forceBool, forceFunction, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
import type { NixValue, NixAttrs, NixPath } from "../types"; import type { NixValue, NixAttrs, NixPath } from "../types";
import { isNixPath, IS_PATH, CatchableError } from "../types"; import { isNixPath, IS_PATH, CatchableError } from "../types";
import { force } from "../thunk"; import { force } from "../thunk";
@@ -12,7 +12,8 @@ import { getPathValue } from "../path";
import type { NixStringContext, StringWithContext } from "../string-context"; import type { NixStringContext, StringWithContext } from "../string-context";
import { mkStringWithContext } from "../string-context"; import { mkStringWithContext } from "../string-context";
import { isPath } from "./type-check"; import { isPath } from "./type-check";
import { getCorepkg } from "../corepkgs";
const importCache = new Map<string, NixValue>();
export const importFunc = (path: NixValue): NixValue => { export const importFunc = (path: NixValue): NixValue => {
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
@@ -30,9 +31,17 @@ Dependency tracking for imported derivations may be incomplete.`,
); );
} }
const cached = importCache.get(pathStr);
if (cached !== undefined) {
return cached;
}
// Call Rust op - returns JS code string // Call Rust op - returns JS code string
const code = Deno.core.ops.op_import(pathStr); const code = Deno.core.ops.op_import(pathStr);
return Function(`return (${code})`)(); const result = Function(`return (${code})`)();
importCache.set(pathStr, result);
return result;
}; };
export const scopedImport = export const scopedImport =
@@ -324,7 +333,7 @@ export const pathExists = (path: NixValue): boolean => {
* Parameters (attribute set): * Parameters (attribute set):
* - path (required): Path to add to the store * - path (required): Path to add to the store
* - name (optional): Name to use in store path (defaults to basename) * - name (optional): Name to use in store path (defaults to basename)
* - filter (optional): Function (path, type) -> bool (NOT IMPLEMENTED YET) * - filter (optional): Function (path, type) -> bool
* - recursive (optional): Boolean, default true (NAR vs flat hashing) * - recursive (optional): Boolean, default true (NAR vs flat hashing)
* - sha256 (optional): Expected SHA-256 hash (hex-encoded) * - sha256 (optional): Expected SHA-256 hash (hex-encoded)
* *
@@ -357,9 +366,30 @@ export const path = (args: NixValue): string => {
// Optional: sha256 parameter // Optional: sha256 parameter
const sha256 = "sha256" in attrs ? forceStringValue(attrs.sha256) : null; const sha256 = "sha256" in attrs ? forceStringValue(attrs.sha256) : null;
// TODO: Handle filter parameter // Handle filter parameter
if ("filter" in attrs) { if ("filter" in attrs) {
throw new Error("builtins.path: 'filter' parameter is not yet implemented"); const filterFn = forceFunction(attrs.filter);
const entries: [string, string][] = Deno.core.ops.op_walk_dir(pathStr);
const includePaths: string[] = [];
for (const [relPath, fileType] of entries) {
const fullPath = pathStr + "/" + relPath;
const innerFn = forceFunction(filterFn(fullPath));
const shouldInclude = force(innerFn(fileType));
if (shouldInclude === true) {
includePaths.push(relPath);
}
}
const storePath: string = Deno.core.ops.op_add_filtered_path(
pathStr,
name,
recursive,
sha256,
includePaths,
);
return storePath;
} }
// Call Rust op to add path to store // Call Rust op to add path to store
@@ -452,13 +482,8 @@ export const findFile =
} }
if (lookupPathStr.startsWith("nix/")) { if (lookupPathStr.startsWith("nix/")) {
const corepkgName = lookupPathStr.substring(4);
const corepkgContent = getCorepkg(corepkgName);
if (corepkgContent !== undefined) {
// FIXME: special path type // FIXME: special path type
return { [IS_PATH]: true, value: `<nix/${corepkgName}>` }; return { [IS_PATH]: true, value: `<${lookupPathStr}>` };
}
} }
throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`); throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`);

View File

@@ -141,10 +141,9 @@ export const all =
export const any = export const any =
(pred: NixValue) => (pred: NixValue) =>
(list: NixValue): boolean => { (list: NixValue): boolean => {
const forcedList = forceList(list); // CppNix forces `pred` eagerly
if (forcedList.length) {
const f = forceFunction(pred); const f = forceFunction(pred);
const forcedList = forceList(list);
// `false` when no element
return forcedList.some((e) => forceBool(f(e))); return forcedList.some((e) => forceBool(f(e)));
}
return true;
}; };

View File

@@ -2,8 +2,8 @@
* Miscellaneous builtin functions * Miscellaneous builtin functions
*/ */
import { createThunk, force } from "../thunk"; import { force } from "../thunk";
import { CatchableError } from "../types"; import { CatchableError, ATTR_POSITIONS } from "../types";
import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types"; import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types";
import { forceList, forceAttrs, forceFunction, forceStringValue, forceString, forceStringNoCtx } from "../type-assert"; import { forceList, forceAttrs, forceFunction, forceStringValue, forceString, forceStringNoCtx } from "../type-assert";
import * as context from "./context"; import * as context from "./context";
@@ -20,7 +20,8 @@ import {
export const addErrorContext = export const addErrorContext =
(e1: NixValue) => (e1: NixValue) =>
(e2: NixValue): NixValue => { (e2: NixValue): NixValue => {
console.log("[WARNING]: addErrorContext not implemented"); // FIXME:
// console.log("[WARNING]: addErrorContext not implemented");
return e2; return e2;
}; };
@@ -164,6 +165,14 @@ export const functionArgs = (f: NixValue): NixAttrs => {
for (const key of func.args!.optional) { for (const key of func.args!.optional) {
ret[key] = true; ret[key] = true;
} }
const positions = func.args!.positions;
if (positions && Object.keys(positions).length > 0) {
Object.defineProperty(ret, ATTR_POSITIONS, {
value: positions,
enumerable: false,
writable: false,
});
}
return ret; return ret;
} }
return {}; return {};
@@ -253,8 +262,9 @@ export const parseFlakeRef = (s: NixValue): never => {
throw new Error("Not implemented: parseFlakeRef"); throw new Error("Not implemented: parseFlakeRef");
}; };
export const placeholder = (output: NixValue): never => { export const placeholder = (output: NixValue): NixValue => {
throw new Error("Not implemented: placeholder"); const outputStr = forceStringNoCtx(output);
return Deno.core.ops.op_make_placeholder(outputStr);
}; };
export const replaceStrings = export const replaceStrings =

View File

@@ -1,6 +1,7 @@
import { HAS_CONTEXT, NixStringContext } from "./string-context"; import { HAS_CONTEXT, NixStringContext } from "./string-context";
import { force } from "./thunk"; import { force } from "./thunk";
import type { NixValue } from "./types"; import type { NixValue } from "./types";
import { isStringWithContext } from "./types";
export const nixValueToJson = ( export const nixValueToJson = (
value: NixValue, value: NixValue,
@@ -35,7 +36,6 @@ export const nixValueToJson = (
if (seen.has(v)) { if (seen.has(v)) {
throw new Error("derivation: circular reference detected in __structuredAttrs"); throw new Error("derivation: circular reference detected in __structuredAttrs");
} }
// FIXME: tryAttrsToString
seen.add(v); seen.add(v);
} }
@@ -44,9 +44,31 @@ export const nixValueToJson = (
} }
if (typeof v === "object") { if (typeof v === "object") {
if ("__toString" in v && typeof force(v.__toString) === "function") {
const toStringMethod = force(v.__toString) as (self: typeof v) => NixValue;
const result = force(toStringMethod(v));
if (typeof result === "string") {
return result;
}
if (isStringWithContext(result)) {
if (outContext) {
for (const elem of result.context) {
outContext.add(elem);
}
}
return result.value;
}
return nixValueToJson(result, seen, outContext);
}
if ("outPath" in v) {
return nixValueToJson(v.outPath, seen, outContext);
}
const result: Record<string, any> = {}; const result: Record<string, any> = {};
for (const [key, val] of Object.entries(v)) { const keys = Object.keys(v).sort();
result[key] = nixValueToJson(val, seen, outContext); for (const key of keys) {
result[key] = nixValueToJson((v as Record<string, NixValue>)[key], seen, outContext);
} }
return result; return result;
} }

View File

@@ -1,73 +0,0 @@
export const FETCHURL_NIX = `{
system ? "", # obsolete
url,
hash ? "", # an SRI hash
# Legacy hash specification
md5 ? "",
sha1 ? "",
sha256 ? "",
sha512 ? "",
outputHash ?
if hash != "" then
hash
else if sha512 != "" then
sha512
else if sha1 != "" then
sha1
else if md5 != "" then
md5
else
sha256,
outputHashAlgo ?
if hash != "" then
""
else if sha512 != "" then
"sha512"
else if sha1 != "" then
"sha1"
else if md5 != "" then
"md5"
else
"sha256",
executable ? false,
unpack ? false,
name ? baseNameOf (toString url),
impure ? false,
}:
derivation (
{
builder = "builtin:fetchurl";
# New-style output content requirements.
outputHashMode = if unpack || executable then "recursive" else "flat";
inherit
name
url
executable
unpack
;
system = "builtin";
# No need to double the amount of network traffic
preferLocalBuild = true;
# This attribute does nothing; it's here to avoid changing evaluation results.
impureEnvVars = [
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
];
# To make "nix-prefetch-url" work.
urls = [ url ];
}
// (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
)
`;

View File

@@ -1,9 +0,0 @@
import { FETCHURL_NIX } from "./fetchurl.nix";
export const COREPKGS: Record<string, string> = {
"fetchurl.nix": FETCHURL_NIX,
};
export const getCorepkg = (name: string): string | undefined => {
return COREPKGS[name];
};

View File

@@ -19,12 +19,10 @@ interface StackFrame {
const callStack: StackFrame[] = []; const callStack: StackFrame[] = [];
const MAX_STACK_DEPTH = 1000; const MAX_STACK_DEPTH = 1000;
export const STACK_TRACE = { enabled: false };
function enrichError(error: unknown): Error { function enrichError(error: unknown): Error {
const err = error instanceof Error ? error : new Error(String(error)); const err = error instanceof Error ? error : new Error(String(error));
if (!STACK_TRACE.enabled || callStack.length === 0) { if (callStack.length === 0) {
return err; return err;
} }
@@ -38,13 +36,17 @@ function enrichError(error: unknown): Error {
return err; return err;
} }
export const getTos = (): string => {
const tos = callStack[callStack.length - 2];
const { file, line, column } = Deno.core.ops.op_decode_span(tos.span);
return `${tos.message} at ${file}:${line}:${column}`;
}
/** /**
* Push an error context onto the stack * Push an error context onto the stack
* Used for tracking evaluation context (e.g., "while evaluating the condition") * Used for tracking evaluation context (e.g., "while evaluating the condition")
*/ */
export const pushContext = (message: string, span: string): void => { export const pushContext = (message: string, span: string): void => {
if (!STACK_TRACE.enabled) return;
if (callStack.length >= MAX_STACK_DEPTH) { if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift(); callStack.shift();
} }
@@ -55,7 +57,6 @@ export const pushContext = (message: string, span: string): void => {
* Pop an error context from the stack * Pop an error context from the stack
*/ */
export const popContext = (): void => { export const popContext = (): void => {
if (!STACK_TRACE.enabled) return;
callStack.pop(); callStack.pop();
}; };
@@ -64,10 +65,6 @@ export const popContext = (): void => {
* Automatically pushes context before execution and pops after * Automatically pushes context before execution and pops after
*/ */
export const withContext = <T>(message: string, span: string, fn: () => T): T => { export const withContext = <T>(message: string, span: string, fn: () => T): T => {
if (!STACK_TRACE.enabled) {
return fn();
}
pushContext(message, span); pushContext(message, span);
try { try {
return fn(); return fn();
@@ -87,23 +84,20 @@ export const withContext = <T>(message: string, span: string, fn: () => T): T =>
* - Path mode: Store contexts are forbidden (will throw error) * - Path mode: Store contexts are forbidden (will throw error)
* - String mode: All contexts are preserved and merged * - String mode: All contexts are preserved and merged
* *
* If first element is a path, result is a path (with constraint: no store context allowed)
*
* @param parts - Array of values to concatenate * @param parts - Array of values to concatenate
* @param forceString - If true, result is always a string (paths are copied to store)
* @returns String or Path with merged contexts from all parts * @returns String or Path with merged contexts from all parts
*/ */
export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath => { export const concatStringsWithContext = (parts: NixValue[], forceString: boolean): NixString | NixPath => {
if (parts.length === 0) { if (parts.length === 0) {
return ""; return "";
} }
const forced = parts.map(force); const forced = parts.map(force);
// Check if first element is a path const firstIsPath = !forceString && isNixPath(forced[0]);
const firstIsPath = isNixPath(forced[0]);
if (firstIsPath) { if (firstIsPath) {
// Path concatenation mode: result will be a path
let result = (forced[0] as NixPath).value; let result = (forced[0] as NixPath).value;
for (let i = 1; i < forced.length; i++) { for (let i = 1; i < forced.length; i++) {
@@ -114,13 +108,11 @@ export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath
} else if (typeof part === "string") { } else if (typeof part === "string") {
result += part; result += part;
} else if (isStringWithContext(part)) { } else if (isStringWithContext(part)) {
// Lix constraint: cannot mix store context with paths
if (part.context.size > 0) { if (part.context.size > 0) {
throw new TypeError("a string that refers to a store path cannot be appended to a path"); throw new TypeError("a string that refers to a store path cannot be appended to a path");
} }
result += part.value; result += part.value;
} else { } else {
// Coerce to string
const tempContext: NixStringContext = new Set(); const tempContext: NixStringContext = new Set();
const coerced = coerceToString(part, StringCoercionMode.Interpolation, false, tempContext); const coerced = coerceToString(part, StringCoercionMode.Interpolation, false, tempContext);
@@ -135,19 +127,15 @@ export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath
return mkPath(result); return mkPath(result);
} }
// String concatenation mode
// Note: firstIsPath is already false at this point, otherwise we would have
// returned in the path concatenation branch above
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
const strParts: string[] = []; const strParts: string[] = [];
for (const part of parts) { for (const part of forced) {
const forced = force(part); if (isNixPath(part)) {
if (isNixPath(forced)) { const str = coerceToString(part, StringCoercionMode.Interpolation, true, context);
const str = coerceToString(forced, StringCoercionMode.Interpolation, true, context);
strParts.push(str); strParts.push(str);
} else { } else {
const str = coerceToString(forced, StringCoercionMode.Interpolation, false, context); const str = coerceToString(part, StringCoercionMode.Interpolation, false, context);
strParts.push(str); strParts.push(str);
} }
} }
@@ -183,7 +171,7 @@ export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
}; };
export const select = (obj: NixValue, attrpath: NixValue[], span?: string): NixValue => { export const select = (obj: NixValue, attrpath: NixValue[], span?: string): NixValue => {
if (STACK_TRACE.enabled && span) { if (span) {
const pathStrings = attrpath.map((a) => forceStringValue(a)); const pathStrings = attrpath.map((a) => forceStringValue(a));
const path = pathStrings.join("."); const path = pathStrings.join(".");
const message = path ? `while selecting attribute [${path}]` : "while selecting attribute"; const message = path ? `while selecting attribute [${path}]` : "while selecting attribute";
@@ -229,7 +217,7 @@ export const selectWithDefault = (
default_val: NixValue, default_val: NixValue,
span?: string, span?: string,
): NixValue => { ): NixValue => {
if (STACK_TRACE.enabled && span) { if (span) {
const pathStrings = attrpath.map((a) => forceStringValue(a)); const pathStrings = attrpath.map((a) => forceStringValue(a));
const path = pathStrings.join("."); const path = pathStrings.join(".");
const message = path ? `while selecting attribute [${path}]` : "while selecting attribute"; const message = path ? `while selecting attribute [${path}]` : "while selecting attribute";
@@ -337,7 +325,7 @@ export const validateParams = (
}; };
export const call = (func: NixValue, arg: NixValue, span?: string): NixValue => { export const call = (func: NixValue, arg: NixValue, span?: string): NixValue => {
if (STACK_TRACE.enabled && span) { if (span) {
if (callStack.length >= MAX_STACK_DEPTH) { if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift(); callStack.shift();
} }

View File

@@ -4,7 +4,7 @@
* All functionality is exported via the global `Nix` object * All functionality is exported via the global `Nix` object
*/ */
import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS } from "./thunk"; import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS, forceDeepSafe, IS_CYCLE } from "./thunk";
import { import {
select, select,
selectWithDefault, selectWithDefault,
@@ -14,7 +14,6 @@ import {
concatStringsWithContext, concatStringsWithContext,
call, call,
assert, assert,
STACK_TRACE,
pushContext, pushContext,
popContext, popContext,
withContext, withContext,
@@ -35,13 +34,14 @@ export type NixRuntime = typeof Nix;
export const Nix = { export const Nix = {
createThunk, createThunk,
force, force,
forceDeepSafe,
forceBool, forceBool,
isThunk, isThunk,
IS_THUNK, IS_THUNK,
IS_CYCLE,
HAS_CONTEXT, HAS_CONTEXT,
IS_PATH, IS_PATH,
DEBUG_THUNKS, DEBUG_THUNKS,
STACK_TRACE,
assert, assert,
call, call,

View File

@@ -224,15 +224,17 @@ export const op = {
const attrsA = av as NixAttrs; const attrsA = av as NixAttrs;
const attrsB = bv as NixAttrs; const attrsB = bv as NixAttrs;
// If both denote a derivation (type = "derivation"), compare their outPaths // Derivation comparison: compare outPaths only
const isDerivationA = "type" in attrsA && force(attrsA.type) === "derivation"; // Safe to force 'type' because it's always a string literal, never a computed value
const isDerivationB = "type" in attrsB && force(attrsB.type) === "derivation"; if ("type" in attrsA && "type" in attrsB) {
const typeValA = force(attrsA.type);
if (isDerivationA && isDerivationB) { const typeValB = force(attrsB.type);
if (typeValA === "derivation" && typeValB === "derivation") {
if ("outPath" in attrsA && "outPath" in attrsB) { if ("outPath" in attrsA && "outPath" in attrsB) {
return op.eq(attrsA.outPath, attrsB.outPath); return op.eq(attrsA.outPath, attrsB.outPath);
} }
} }
}
// Otherwise, compare attributes one by one // Otherwise, compare attributes one by one
const keysA = Object.keys(attrsA).sort(); const keysA = Object.keys(attrsA).sort();

View File

@@ -1,7 +1,38 @@
import { IS_PATH, type NixPath } from "./types"; import { IS_PATH, type NixPath } from "./types";
const canonicalizePath = (path: string): string => {
const parts: string[] = [];
let i = 0;
const len = path.length;
while (i < len) {
while (i < len && path[i] === "/") i++;
if (i >= len) break;
let j = i;
while (j < len && path[j] !== "/") j++;
const component = path.slice(i, j);
i = j;
if (component === ".") {
continue;
} else if (component === "..") {
if (parts.length > 0) {
parts.pop();
}
} else {
parts.push(component);
}
}
if (parts.length === 0) {
return "/";
}
return "/" + parts.join("/");
};
export const mkPath = (value: string): NixPath => { export const mkPath = (value: string): NixPath => {
return { [IS_PATH]: true, value }; return { [IS_PATH]: true, value: canonicalizePath(value) };
}; };
export const getPathValue = (p: NixPath): string => { export const getPathValue = (p: NixPath): string => {

View File

@@ -0,0 +1,107 @@
import { isThunk, IS_CYCLE } from "./thunk";
import { isStringWithContext } from "./string-context";
import { isNixPath, type NixValue } from "./types";
import { is_primop, get_primop_metadata } from "./builtins/index";
export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet()): string => {
if (isThunk(value)) {
return "«thunk»";
}
if (value === null) {
return "null";
}
if (typeof value === "boolean") {
return value ? "true" : "false";
}
if (typeof value === "bigint") {
return value.toString();
}
if (typeof value === "number") {
return value.toString();
}
if (typeof value === "string") {
return printString(value);
}
if (typeof value === "function") {
if (is_primop(value)) {
const meta = get_primop_metadata(value);
if (meta && meta.applied > 0) {
return "<PRIMOP-APP>";
}
return "<PRIMOP>";
}
return "<LAMBDA>";
}
if (typeof value === "object") {
if (IS_CYCLE in value && (value as any)[IS_CYCLE] === true) {
return "«repeated»";
}
if (seen.has(value)) {
return "«repeated»";
}
seen.add(value);
if (isNixPath(value)) {
return value.value;
}
if (isStringWithContext(value)) {
return printString(value.value);
}
if (Array.isArray(value)) {
const items = value.map((v) => printValue(v, seen)).join(" ");
return `[ ${items} ]`;
}
const entries = Object.entries(value)
.map(([k, v]) => `${printSymbol(k)} = ${printValue(v, seen)};`)
.join(" ");
return `{${entries ? ` ${entries} ` : " "}}`;
}
return "<unknown>";
};
const printString = (s: string): string => {
let result = '"';
for (const c of s) {
switch (c) {
case "\\":
result += "\\\\";
break;
case '"':
result += '\\"';
break;
case "\n":
result += "\\n";
break;
case "\r":
result += "\\r";
break;
case "\t":
result += "\\t";
break;
default:
result += c;
}
}
return result + '"';
};
const SYMBOL_REGEX = /^[a-zA-Z_][a-zA-Z0-9_'-]*$/;
const printSymbol = (s: string): string => {
if (SYMBOL_REGEX.test(s)) {
return s;
}
return printString(s);
};

View File

@@ -165,6 +165,10 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
} }
} }
for (const info of result.values()) {
info.outputs.sort();
}
return result; return result;
}; };

View File

@@ -4,6 +4,8 @@
*/ */
import type { NixValue, NixThunkInterface, NixStrictValue } from "./types"; import type { NixValue, NixThunkInterface, NixStrictValue } from "./types";
import { HAS_CONTEXT } from "./string-context";
import { IS_PATH } from "./types";
/** /**
* Symbol used to mark objects as thunks * Symbol used to mark objects as thunks
@@ -12,8 +14,9 @@ import type { NixValue, NixThunkInterface, NixStrictValue } from "./types";
export const IS_THUNK = Symbol("is_thunk"); export const IS_THUNK = Symbol("is_thunk");
const forceStack: NixThunk[] = []; const forceStack: NixThunk[] = [];
const MAX_FORCE_DEPTH = 1000;
export const DEBUG_THUNKS = { enabled: false }; export const DEBUG_THUNKS = { enabled: true };
/** /**
* NixThunk class - represents a lazy, unevaluated expression * NixThunk class - represents a lazy, unevaluated expression
@@ -97,14 +100,29 @@ export const force = (value: NixValue): NixStrictValue => {
if (DEBUG_THUNKS.enabled) { if (DEBUG_THUNKS.enabled) {
forceStack.push(thunk); forceStack.push(thunk);
if (forceStack.length > MAX_FORCE_DEPTH) {
let msg = `force depth exceeded ${MAX_FORCE_DEPTH} at ${thunk}\n`;
msg += "Force chain (most recent first):\n";
for (let i = forceStack.length - 1; i >= Math.max(0, forceStack.length - 20); i--) {
const t = forceStack[i];
msg += ` ${i + 1}. ${t}`;
msg += "\n";
}
throw new Error(msg);
}
} }
try { try {
const result = force(func()); const result = force(func());
thunk.result = result; thunk.result = result;
return result; return result;
} catch (e) {
thunk.func = func;
throw e;
} finally { } finally {
if (DEBUG_THUNKS.enabled) {
forceStack.pop(); forceStack.pop();
} }
}
}; };
/** /**
@@ -116,3 +134,53 @@ export const force = (value: NixValue): NixStrictValue => {
export const createThunk = (func: () => NixValue, label?: string): NixThunkInterface => { export const createThunk = (func: () => NixValue, label?: string): NixThunkInterface => {
return new NixThunk(func, label); return new NixThunk(func, label);
}; };
/**
* Symbol to mark cyclic references detected during deep forcing
*/
export const IS_CYCLE = Symbol("is_cycle");
/**
* Marker object for cyclic references
*/
export const CYCLE_MARKER = { [IS_CYCLE]: true };
/**
* Deeply force a value, handling cycles by returning a special marker.
* Uses WeakSet to track seen objects and avoid infinite recursion.
* Returns a fully forced value where thunks are replaced with their results.
* Cyclic references are replaced with CYCLE_MARKER, preserving the container type.
*/
export const forceDeepSafe = (value: NixValue, seen: WeakSet<object> = new WeakSet()): NixStrictValue => {
const forced = force(value);
if (forced === null || typeof forced !== "object") {
return forced;
}
if (seen.has(forced)) {
if (Array.isArray(forced)) {
return [CYCLE_MARKER];
}
return CYCLE_MARKER;
}
seen.add(forced);
if (HAS_CONTEXT in forced || IS_PATH in forced) {
return forced;
}
if (Array.isArray(forced)) {
return forced.map((item) => forceDeepSafe(item, seen));
}
if (typeof forced === "object") {
const result: Record<string, NixValue> = {};
for (const [key, val] of Object.entries(forced)) {
result[key] = forceDeepSafe(val, seen);
}
return result;
}
return forced;
};

View File

@@ -16,7 +16,6 @@ import type {
} from "./types"; } from "./types";
import { isStringWithContext, isNixPath } from "./types"; import { isStringWithContext, isNixPath } from "./types";
import { force } from "./thunk"; import { force } from "./thunk";
import { getStringValue } from "./string-context";
import { isAttrs, isFunction, typeOf } from "./builtins/type-check"; import { isAttrs, isFunction, typeOf } from "./builtins/type-check";
/** /**
@@ -32,15 +31,25 @@ export const forceList = (value: NixValue): NixList => {
}; };
/** /**
* Force a value and assert it's a function * Force a value and assert it's a function or functor
* @throws TypeError if value is not a function after forcing * @throws TypeError if value is not a function or functor after forcing
*/ */
export const forceFunction = (value: NixValue): NixFunction => { export const forceFunction = (value: NixValue): NixFunction => {
const forced = force(value); const forced = force(value);
if (!isFunction(forced)) { if (isFunction(forced)) {
throw new TypeError(`Expected function, got ${typeOf(forced)}`);
}
return forced; return forced;
}
if (
typeof forced === "object" &&
!Array.isArray(forced) &&
forced !== null &&
"__functor" in forced
) {
const functorSet = forced as NixAttrs;
const functor = forceFunction(functorSet.__functor);
return (arg: NixValue) => forceFunction(functor(functorSet))(arg);
}
throw new TypeError(`Expected function, got ${typeOf(forced)}`);
}; };
/** /**

View File

@@ -39,9 +39,11 @@ export class NixArgs {
optional: string[]; optional: string[];
allowed: Set<string>; allowed: Set<string>;
ellipsis: boolean; ellipsis: boolean;
constructor(required: string[], optional: string[], ellipsis: boolean) { positions: Record<string, string>;
constructor(required: string[], optional: string[], positions: Record<string, string>, ellipsis: boolean) {
this.required = required; this.required = required;
this.optional = optional; this.optional = optional;
this.positions = positions;
this.ellipsis = ellipsis; this.ellipsis = ellipsis;
this.allowed = new Set(required.concat(optional)); this.allowed = new Set(required.concat(optional));
} }
@@ -67,10 +69,11 @@ export const mkFunction = (
f: (arg: NixValue) => NixValue, f: (arg: NixValue) => NixValue,
required: string[], required: string[],
optional: string[], optional: string[],
positions: Record<string, string>,
ellipsis: boolean, ellipsis: boolean,
): NixFunction => { ): NixFunction => {
const func = f as NixFunction; const func = f as NixFunction;
func.args = new NixArgs(required, optional, ellipsis); func.args = new NixArgs(required, optional, positions, ellipsis);
return func; return func;
}; };

View File

@@ -42,8 +42,10 @@ declare global {
function op_read_dir(path: string): Record<string, string>; function op_read_dir(path: string): Record<string, string>;
function op_path_exists(path: string): boolean; function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string; function op_sha256_hex(data: string): string;
function op_make_placeholder(output: string): string;
function op_decode_span(span: string): { file: string | null; line: number | null; column: number | null }; function op_decode_span(span: string): { file: string | null; line: number | null; column: number | null };
function op_make_store_path(ty: string, hash_hex: string, name: string): string; function op_make_store_path(ty: string, hash_hex: string, name: string): string;
function op_make_text_store_path(hash_hex: string, name: string, references: string[]): string;
function op_output_path_name(drv_name: string, output_name: string): string; function op_output_path_name(drv_name: string, output_name: string): string;
function op_make_fixed_output_path( function op_make_fixed_output_path(
hash_algo: string, hash_algo: string,
@@ -83,6 +85,14 @@ declare global {
function op_to_file(name: string, contents: string, references: string[]): string; function op_to_file(name: string, contents: string, references: string[]): string;
function op_copy_path_to_store(path: string): string; function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string; function op_get_env(key: string): string;
function op_walk_dir(path: string): [string, string][];
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
include_paths: string[],
): string;
} }
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,21 @@
use std::path::Path; use std::path::Path;
use std::ptr::NonNull; use std::ptr::NonNull;
use hashbrown::{HashMap, HashSet}; use hashbrown::HashMap;
use itertools::Itertools as _; use itertools::Itertools as _;
use petgraph::graphmap::DiGraphMap;
use rnix::TextRange; use rnix::TextRange;
use string_interner::DefaultStringInterner; use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, compile}; use crate::codegen::{CodegenContext, compile};
use crate::error::{Error, Result, Source}; use crate::error::{Error, Result, Source};
use crate::ir::{ use crate::ir::{
Arg, ArgId, Bool, Builtin, Downgrade as _, DowngradeContext, ExprId, ExprRef, Ir, Null, SymId, Arg, ArgId, Bool, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, Null, SymId, Thunk,
Thunk, ToIr as _, synthetic_span, ToIr as _, synthetic_span,
}; };
use crate::runtime::{Runtime, RuntimeContext}; use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{Store, StoreBackend, StoreConfig}; use crate::store::{Store, StoreBackend, StoreConfig};
use crate::value::Value; use crate::value::Value;
#[derive(Debug)]
pub(crate) struct SccInfo {
/// list of SCCs (exprs, recursive)
pub(crate) sccs: Vec<(Vec<ExprId>, bool)>,
}
pub struct Context { pub struct Context {
ctx: Ctx, ctx: Ctx,
runtime: Runtime<Ctx>, runtime: Runtime<Ctx>,
@@ -44,7 +37,7 @@ impl Context {
tracing::debug!("Executing JavaScript"); tracing::debug!("Executing JavaScript");
self.runtime self.runtime
.eval(format!("Nix.force({code})"), &mut self.ctx) .eval(format!("Nix.forceDeepSafe({code})"), &mut self.ctx)
} }
pub fn compile_code(&mut self, source: Source) -> Result<String> { pub fn compile_code(&mut self, source: Source) -> Result<String> {
@@ -256,14 +249,6 @@ impl RuntimeContext for Ctx {
} }
} }
struct DependencyTracker {
graph: DiGraphMap<ExprId, ()>,
current_binding: Option<ExprId>,
let_scope_exprs: HashSet<ExprId>,
// The outer binding that owns this tracker (for nested let scopes in function params)
owner_binding: Option<ExprId>,
}
enum Scope<'ctx> { enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>), Global(&'ctx HashMap<SymId, ExprId>),
Let(HashMap<SymId, ExprId>), Let(HashMap<SymId, ExprId>),
@@ -289,10 +274,10 @@ impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
pub struct DowngradeCtx<'ctx> { pub struct DowngradeCtx<'ctx> {
ctx: &'ctx mut Ctx, ctx: &'ctx mut Ctx,
irs: Vec<Option<Ir>>, irs: Vec<Ir>,
scopes: Vec<Scope<'ctx>>, scopes: Vec<Scope<'ctx>>,
arg_id: usize, arg_id: usize,
dep_tracker_stack: Vec<DependencyTracker>, thunk_scopes: Vec<Vec<(ExprId, ExprId)>>,
} }
impl<'ctx> DowngradeCtx<'ctx> { impl<'ctx> DowngradeCtx<'ctx> {
@@ -301,7 +286,7 @@ impl<'ctx> DowngradeCtx<'ctx> {
scopes: vec![Scope::Global(global)], scopes: vec![Scope::Global(global)],
irs: vec![], irs: vec![],
arg_id: 0, arg_id: 0,
dep_tracker_stack: Vec::new(), thunk_scopes: vec![Vec::new()],
ctx, ctx,
} }
} }
@@ -309,18 +294,18 @@ impl<'ctx> DowngradeCtx<'ctx> {
impl DowngradeContext for DowngradeCtx<'_> { impl DowngradeContext for DowngradeCtx<'_> {
fn new_expr(&mut self, expr: Ir) -> ExprId { fn new_expr(&mut self, expr: Ir) -> ExprId {
self.irs.push(Some(expr)); self.irs.push(expr);
ExprId(self.ctx.irs.len() + self.irs.len() - 1) ExprId(self.ctx.irs.len() + self.irs.len() - 1)
} }
fn new_arg(&mut self, span: TextRange) -> ExprId { fn new_arg(&mut self, span: TextRange) -> ExprId {
self.irs.push(Some( self.irs.push(
Arg { Arg {
inner: ArgId(self.arg_id), inner: ArgId(self.arg_id),
span, span,
} }
.to_ir(), .to_ir(),
)); );
self.arg_id += 1; self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1) ExprId(self.ctx.irs.len() + self.irs.len() - 1)
} }
@@ -332,8 +317,6 @@ impl DowngradeContext for DowngradeCtx<'_> {
self.irs self.irs
.get(id.0 - self.ctx.irs.len()) .get(id.0 - self.ctx.irs.len())
.expect("ExprId out of bounds") .expect("ExprId out of bounds")
.as_ref()
.expect("maybe_thunk called on an extracted expr")
} }
} }
@@ -346,14 +329,15 @@ impl DowngradeContext for DowngradeCtx<'_> {
| Ir::Float(_) | Ir::Float(_)
| Ir::Bool(_) | Ir::Bool(_)
| Ir::Null(_) | Ir::Null(_)
| Ir::Str(_) => id, | Ir::Str(_)
_ => self.new_expr( | Ir::Thunk(_) => id,
Thunk { _ => {
inner: id, let span = ir.span();
span: ir.span(), let slot = self.reserve_slots(1).next().expect("reserve_slots failed");
self.replace_ir(slot, Thunk { inner: slot, span }.to_ir());
self.register_thunk(slot, id);
slot
} }
.to_ir(),
),
} }
} }
@@ -375,45 +359,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
} }
Scope::Let(let_scope) => { Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) { if let Some(&expr) = let_scope.get(&sym) {
// Find which tracker contains this expression return Ok(self.new_expr(Thunk { inner: expr, span }.to_ir()));
let expr_tracker_idx = self
.dep_tracker_stack
.iter()
.position(|t| t.let_scope_exprs.contains(&expr));
// Find the innermost tracker with a current_binding
let current_tracker_idx = self
.dep_tracker_stack
.iter()
.rposition(|t| t.current_binding.is_some());
// Record dependency if both exist
if let (Some(expr_idx), Some(curr_idx)) =
(expr_tracker_idx, current_tracker_idx)
{
let current_binding = self.dep_tracker_stack[curr_idx]
.current_binding
.expect("current_binding not set");
let owner_binding = self.dep_tracker_stack[curr_idx].owner_binding;
// If referencing from inner scope to outer scope
if curr_idx >= expr_idx {
let tracker = &mut self.dep_tracker_stack[expr_idx];
let from_node = current_binding;
let to_node = expr;
if curr_idx > expr_idx {
// Cross-scope reference: use owner_binding if available
if let Some(owner) = owner_binding {
tracker.graph.add_edge(owner, expr, ());
}
} else {
// Same-level reference: record directly
tracker.graph.add_edge(from_node, to_node, ());
}
}
}
return Ok(self.new_expr(ExprRef { inner: expr, span }.to_ir()));
} }
} }
&Scope::Param(param_sym, expr) => { &Scope::Param(param_sym, expr) => {
@@ -456,22 +402,9 @@ impl DowngradeContext for DowngradeCtx<'_> {
}) })
} }
fn extract_ir(&mut self, id: ExprId) -> Ir {
let local_id = id.0 - self.ctx.irs.len();
self.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.take()
.expect("extract_expr called on an already extracted expr")
}
fn replace_ir(&mut self, id: ExprId, expr: Ir) { fn replace_ir(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len(); let local_id = id.0 - self.ctx.irs.len();
let _ = self *self.irs.get_mut(local_id).expect("ExprId out of bounds") = expr;
.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.insert(expr);
} }
fn get_current_source(&self) -> Source { fn get_current_source(&self) -> Source {
@@ -481,16 +414,25 @@ impl DowngradeContext for DowngradeCtx<'_> {
#[allow(refining_impl_trait)] #[allow(refining_impl_trait)]
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> { fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len(); let start = self.ctx.irs.len() + self.irs.len();
self.irs.extend(std::iter::repeat_with(|| None).take(slots)); let range = (start..start + slots).map(ExprId);
(start..start + slots).map(ExprId) let span = synthetic_span();
// Fill reserved slots with placeholder value
self.irs.extend(
range
.clone()
.map(|slot| Thunk { inner: slot, span }.to_ir()),
);
range
} }
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> { fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
let root = root.downgrade(&mut self)?; use crate::ir::TopLevel;
self.ctx let body = root.downgrade(&mut self)?;
.irs let thunks = self.pop_thunk_scope();
.extend(self.irs.into_iter().map(Option::unwrap)); let span = self.get_ir(body).span();
Ok(root) let top_level = self.new_expr(TopLevel { body, thunks, span }.to_ir());
self.ctx.irs.extend(self.irs);
Ok(top_level)
} }
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
@@ -515,83 +457,26 @@ impl DowngradeContext for DowngradeCtx<'_> {
where where
F: FnOnce(&mut Self) -> R, F: FnOnce(&mut Self) -> R,
{ {
let namespace = self.maybe_thunk(namespace);
self.scopes.push(Scope::With(namespace)); self.scopes.push(Scope::With(namespace));
let mut guard = ScopeGuard { ctx: self }; let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx()) f(guard.as_ctx())
} }
fn push_dep_tracker(&mut self, slots: &[ExprId]) { fn push_thunk_scope(&mut self) {
let mut graph = DiGraphMap::new(); self.thunk_scopes.push(Vec::new());
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
graph.add_node(expr);
let_scope_exprs.insert(expr);
} }
self.dep_tracker_stack.push(DependencyTracker { fn pop_thunk_scope(&mut self) -> Vec<(ExprId, ExprId)> {
graph, self.thunk_scopes
current_binding: None,
let_scope_exprs,
owner_binding: None,
});
}
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId) {
let mut graph = DiGraphMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
graph.add_node(expr);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
graph,
current_binding: None,
let_scope_exprs,
owner_binding: Some(owner),
});
}
fn get_current_binding(&self) -> Option<ExprId> {
self.dep_tracker_stack
.last()
.and_then(|t| t.current_binding)
}
fn set_current_binding(&mut self, expr: Option<ExprId>) {
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
tracker.current_binding = expr;
}
}
fn pop_dep_tracker(&mut self) -> Result<SccInfo> {
let tracker = self
.dep_tracker_stack
.pop() .pop()
.expect("pop_dep_tracker without active tracker"); .expect("pop_thunk_scope without active scope")
use petgraph::algo::kosaraju_scc;
let sccs = kosaraju_scc(&tracker.graph);
let mut sccs_topo = Vec::new();
for scc_nodes in sccs.iter() {
let mut scc_exprs = Vec::new();
let mut is_recursive = scc_nodes.len() > 1;
for &expr in scc_nodes {
scc_exprs.push(expr);
if !is_recursive && tracker.graph.contains_edge(expr, expr) {
is_recursive = true;
}
} }
sccs_topo.push((scc_exprs, is_recursive)); fn register_thunk(&mut self, slot: ExprId, inner: ExprId) {
} self.thunk_scopes
.last_mut()
Ok(SccInfo { sccs: sccs_topo }) .expect("register_thunk without active scope")
.push((slot, inner));
} }
} }

View File

@@ -44,6 +44,13 @@ impl From<Source> for NamedSource<Arc<str>> {
} }
impl Source { impl Source {
pub fn new_file(path: PathBuf) -> std::io::Result<Self> {
Ok(Source {
src: std::fs::read_to_string(&path)?.into(),
ty: crate::error::SourceType::File(Arc::new(path)),
})
}
pub fn new_eval(src: String) -> Result<Self> { pub fn new_eval(src: String) -> Result<Self> {
Ok(Self { Ok(Self {
ty: std::env::current_dir() ty: std::env::current_dir()
@@ -214,8 +221,8 @@ pub struct StackFrame {
pub src: NamedSource<Arc<str>>, pub src: NamedSource<Arc<str>>,
} }
const MAX_STACK_FRAMES: usize = 10; const MAX_STACK_FRAMES: usize = 20;
const FRAMES_AT_START: usize = 5; const FRAMES_AT_START: usize = 15;
const FRAMES_AT_END: usize = 5; const FRAMES_AT_END: usize = 5;
pub(crate) fn parse_js_error(error: Box<JsError>, ctx: &impl RuntimeContext) -> Error { pub(crate) fn parse_js_error(error: Box<JsError>, ctx: &impl RuntimeContext) -> Error {
@@ -234,7 +241,11 @@ pub(crate) fn parse_js_error(error: Box<JsError>, ctx: &impl RuntimeContext) ->
} else { } else {
(None, None, Vec::new()) (None, None, Vec::new())
}; };
let stack_trace = truncate_stack_trace(frames); let stack_trace = if std::env::var("NIX_JS_STACK_TRACE").is_ok() {
truncate_stack_trace(frames)
} else {
Vec::new()
};
let message = error.get_message().to_string(); let message = error.get_message().to_string();
let js_backtrace = error.stack.map(|stack| { let js_backtrace = error.stack.map(|stack| {
stack stack
@@ -272,7 +283,7 @@ fn parse_frames(stack: &str, ctx: &impl RuntimeContext) -> Vec<NixStackFrame> {
let mut frames = Vec::new(); let mut frames = Vec::new();
for line in stack.lines() { for line in stack.lines() {
// Format: NIX_STACK_FRAME:start:end[:extra_data] // Format: NIX_STACK_FRAME:source_id:start:end[:extra_data]
let Some(rest) = line.strip_prefix("NIX_STACK_FRAME:") else { let Some(rest) = line.strip_prefix("NIX_STACK_FRAME:") else {
continue; continue;
}; };

View File

@@ -3,9 +3,7 @@ use hashbrown::HashMap;
use rnix::{TextRange, ast}; use rnix::{TextRange, ast};
use string_interner::symbol::SymbolU32; use string_interner::symbol::SymbolU32;
use crate::context::SccInfo; use crate::error::{Result, Source};
use crate::error::{Error, Result, Source};
use crate::value::format_symbol;
use nix_js_macros::ir; use nix_js_macros::ir;
mod downgrade; mod downgrade;
@@ -29,7 +27,6 @@ pub trait DowngradeContext {
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId>; fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId>;
fn get_ir(&self, id: ExprId) -> &Ir; fn get_ir(&self, id: ExprId) -> &Ir;
fn extract_ir(&mut self, id: ExprId) -> Ir;
fn replace_ir(&mut self, id: ExprId, expr: Ir); fn replace_ir(&mut self, id: ExprId, expr: Ir);
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>; fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>;
fn get_current_source(&self) -> Source; fn get_current_source(&self) -> Source;
@@ -44,11 +41,9 @@ pub trait DowngradeContext {
where where
F: FnOnce(&mut Self) -> R; F: FnOnce(&mut Self) -> R;
fn push_dep_tracker(&mut self, slots: &[ExprId]); fn push_thunk_scope(&mut self);
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId); fn pop_thunk_scope(&mut self) -> Vec<(ExprId, ExprId)>;
fn get_current_binding(&self) -> Option<ExprId>; fn register_thunk(&mut self, slot: ExprId, inner: ExprId);
fn set_current_binding(&mut self, expr: Option<ExprId>);
fn pop_dep_tracker(&mut self) -> Result<SccInfo>;
} }
ir! { ir! {
@@ -69,147 +64,17 @@ ir! {
If { pub cond: ExprId, pub consq: ExprId, pub alter: ExprId }, If { pub cond: ExprId, pub consq: ExprId, pub alter: ExprId },
Call { pub func: ExprId, pub arg: ExprId }, Call { pub func: ExprId, pub arg: ExprId },
Assert { pub assertion: ExprId, pub expr: ExprId, pub assertion_raw: String }, Assert { pub assertion: ExprId, pub expr: ExprId, pub assertion_raw: String },
ConcatStrings { pub parts: Vec<ExprId> }, ConcatStrings { pub parts: Vec<ExprId>, pub force_string: bool },
Path { pub expr: ExprId }, Path { pub expr: ExprId },
Func { pub body: ExprId, pub param: Option<Param>, pub arg: ExprId }, Func { pub body: ExprId, pub param: Option<Param>, pub arg: ExprId, pub thunks: Vec<(ExprId, ExprId)> },
Let { pub binding_sccs: SccInfo, pub body: ExprId }, TopLevel { pub body: ExprId, pub thunks: Vec<(ExprId, ExprId)> },
Arg(ArgId), Arg(ArgId),
ExprRef(ExprId),
Thunk(ExprId), Thunk(ExprId),
Builtins, Builtins,
Builtin(SymId), Builtin(SymId),
CurPos, CurPos,
} }
impl Ir {
pub fn span(&self) -> TextRange {
match self {
Ir::Int(i) => i.span,
Ir::Float(f) => f.span,
Ir::Bool(b) => b.span,
Ir::Null(n) => n.span,
Ir::Str(s) => s.span,
Ir::AttrSet(a) => a.span,
Ir::List(l) => l.span,
Ir::HasAttr(h) => h.span,
Ir::BinOp(b) => b.span,
Ir::UnOp(u) => u.span,
Ir::Select(s) => s.span,
Ir::If(i) => i.span,
Ir::Call(c) => c.span,
Ir::Assert(a) => a.span,
Ir::ConcatStrings(c) => c.span,
Ir::Path(p) => p.span,
Ir::Func(f) => f.span,
Ir::Let(l) => l.span,
Ir::Arg(a) => a.span,
Ir::ExprRef(e) => e.span,
Ir::Thunk(t) => t.span,
Ir::Builtins(b) => b.span,
Ir::Builtin(b) => b.span,
Ir::CurPos(c) => c.span,
}
}
}
impl AttrSet {
fn _insert(
&mut self,
mut path: impl Iterator<Item = Attr>,
name: Attr,
value: ExprId,
ctx: &mut impl DowngradeContext,
) -> Result<()> {
if let Some(attr) = path.next() {
// If the path is not yet exhausted, we need to recurse deeper.
match attr {
Attr::Str(ident, span) => {
// If the next attribute is a static string.
if let Some(&(id, _)) = self.stcs.get(&ident) {
// If a sub-attrset already exists, recurse into it.
let mut ir = ctx.extract_ir(id);
let result = ir
.as_mut()
.try_unwrap_attr_set()
.map_err(|_| {
// This path segment exists but is not an attrset, which is an error.
Error::downgrade_error(format!(
"attribute '{}' already defined but is not an attribute set",
format_symbol(ctx.get_sym(ident)),
),
ctx.get_current_source(),
span
)
})
.and_then(|attrs| attrs._insert(path, name, value, ctx));
ctx.replace_ir(id, ir);
result?;
} else {
// Create a new sub-attrset because this path doesn't exist yet.
// FIXME: span
let mut attrs = AttrSet {
stcs: Default::default(),
dyns: Default::default(),
span,
};
attrs._insert(path, name, value, ctx)?;
let attrs_expr = ctx.new_expr(attrs.to_ir());
self.stcs.insert(ident, (attrs_expr, span));
}
Ok(())
}
Attr::Dynamic(dynamic, span) => {
// If the next attribute is a dynamic expression, we must create a new sub-attrset.
// We cannot merge with existing dynamic attributes at this stage.
// FIXME: span
let mut attrs = AttrSet {
stcs: Default::default(),
dyns: Default::default(),
span,
};
attrs._insert(path, name, value, ctx)?;
self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir()), span));
Ok(())
}
}
} else {
// This is the final attribute in the path, so insert the value here.
match name {
Attr::Str(ident, span) => {
if self.stcs.insert(ident, (value, span)).is_some() {
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(ident)),
),
ctx.get_current_source(),
span,
));
}
}
Attr::Dynamic(dynamic, span) => {
self.dyns.push((dynamic, value, span));
}
}
Ok(())
}
}
fn insert(
&mut self,
path: Vec<Attr>,
value: ExprId,
ctx: &mut impl DowngradeContext,
) -> Result<()> {
let mut path = path.into_iter();
// The last part of the path is the name of the attribute to be inserted.
let name = path
.next_back()
.expect("empty attrpath passed. this is a bug");
self._insert(path, name, value, ctx)
}
}
#[repr(transparent)] #[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ExprId(pub usize); pub struct ExprId(pub usize);
@@ -221,6 +86,7 @@ pub type SymId = SymbolU32;
pub struct ArgId(pub usize); pub struct ArgId(pub usize);
/// Represents a key in an attribute path. /// Represents a key in an attribute path.
#[allow(unused)]
#[derive(Debug, TryUnwrap)] #[derive(Debug, TryUnwrap)]
pub enum Attr { pub enum Attr {
/// A dynamic attribute key, which is an expression that must evaluate to a string. /// A dynamic attribute key, which is an expression that must evaluate to a string.
@@ -307,7 +173,7 @@ impl From<ast::UnaryOpKind> for UnOpKind {
/// Describes the parameters of a function. /// Describes the parameters of a function.
#[derive(Debug)] #[derive(Debug)]
pub struct Param { pub struct Param {
pub required: Vec<SymId>, pub required: Vec<(SymId, TextRange)>,
pub optional: Vec<SymId>, pub optional: Vec<(SymId, TextRange)>,
pub ellipsis: bool, pub ellipsis: bool,
} }

View File

@@ -139,7 +139,14 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
part part
} }
} else { } else {
ctx.new_expr(ConcatStrings { parts, span }.to_ir()) ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: false,
}
.to_ir(),
)
}; };
Ok(ctx.new_expr(Path { expr, span }.to_ir())) Ok(ctx.new_expr(Path { expr, span }.to_ir()))
} }
@@ -158,7 +165,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())), ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())),
ast::InterpolPart::Interpolation(interpol) => { ast::InterpolPart::Interpolation(interpol) => {
let inner = interpol.expr().unwrap().downgrade(ctx)?; let inner = interpol.expr().unwrap().downgrade(ctx)?;
Ok(ctx.new_expr(Thunk { inner, span }.to_ir())) Ok(ctx.maybe_thunk(inner))
} }
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
@@ -166,7 +173,14 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
Ok(if is_single_literal { Ok(if is_single_literal {
parts.into_iter().next().unwrap() parts.into_iter().next().unwrap()
} else { } else {
ctx.new_expr(ConcatStrings { parts, span }.to_ir()) ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: true,
}
.to_ir(),
)
}) })
} }
} }
@@ -220,31 +234,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
// rec { a = 1; b = a; } => let a = 1; b = a; in { inherit a b; } // rec { a = 1; b = a; } => let a = 1; b = a; in { inherit a b; }
let entries: Vec<_> = self.entries().collect(); let entries: Vec<_> = self.entries().collect();
let (binding_sccs, body) = downgrade_let_bindings(entries, ctx, |ctx, binding_keys| { downgrade_rec_bindings(entries, ctx, span)
// Create plain attrset as body with inherit
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span,
};
for sym in binding_keys {
// FIXME: span
let expr = ctx.lookup(*sym, synthetic_span())?;
attrs.stcs.insert(*sym, (expr, synthetic_span()));
}
Ok(ctx.new_expr(attrs.to_ir()))
})?;
Ok(ctx.new_expr(
Let {
body,
binding_sccs,
span,
}
.to_ir(),
))
} }
} }
@@ -308,17 +298,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
let expr = self.expr().unwrap().downgrade(ctx)?; let expr = self.expr().unwrap().downgrade(ctx)?;
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?; let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let default = if let Some(default) = self.default_expr() { let default = if let Some(default) = self.default_expr() {
let span = default.syntax().text_range();
let default_expr = default.downgrade(ctx)?; let default_expr = default.downgrade(ctx)?;
Some( Some(ctx.maybe_thunk(default_expr))
ctx.new_expr(
Thunk {
inner: default_expr,
span,
}
.to_ir(),
),
)
} else { } else {
None None
}; };
@@ -340,10 +321,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet { impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> { fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range(); let span = self.syntax().text_range();
let bindings = downgrade_static_attrs(self, ctx)?; let entries: Vec<_> = self.entries().collect();
let binding_keys: Vec<_> = bindings.keys().copied().collect(); let attrset_expr = downgrade_let_bindings(entries, ctx, span, |ctx, binding_keys| {
// Create plain attrset as body with inherit
let attrset_expr = ctx.with_let_scope(bindings, |ctx| {
let mut attrs = AttrSet { let mut attrs = AttrSet {
stcs: HashMap::new(), stcs: HashMap::new(),
dyns: Vec::new(), dyns: Vec::new(),
@@ -351,12 +331,11 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
}; };
for sym in binding_keys { for sym in binding_keys {
// FIXME: span let expr = ctx.lookup(*sym, synthetic_span())?;
let expr = ctx.lookup(sym, synthetic_span())?; attrs.stcs.insert(*sym, (expr, synthetic_span()));
attrs.stcs.insert(sym, (expr, synthetic_span()));
} }
Result::Ok(ctx.new_expr(attrs.to_ir())) Ok(ctx.new_expr(attrs.to_ir()))
})?; })?;
let body_sym = ctx.new_sym("body".to_string()); let body_sym = ctx.new_sym("body".to_string());
@@ -378,17 +357,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
let body_expr = self.body().unwrap(); let body_expr = self.body().unwrap();
let span = self.syntax().text_range(); let span = self.syntax().text_range();
let (binding_sccs, body) = downgrade_let_bindings(entries, ctx, span, |ctx, _binding_keys| {
downgrade_let_bindings(entries, ctx, |ctx, _binding_keys| body_expr.downgrade(ctx))?; body_expr.downgrade(ctx)
})
Ok(ctx.new_expr(
Let {
body,
binding_sccs,
span,
}
.to_ir(),
))
} }
} }
@@ -412,9 +383,10 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
let raw_param = self.param().unwrap(); let raw_param = self.param().unwrap();
let arg = ctx.new_arg(raw_param.syntax().text_range()); let arg = ctx.new_arg(raw_param.syntax().text_range());
ctx.push_thunk_scope();
let param; let param;
let body; let body;
let span = self.body().unwrap().syntax().text_range();
match raw_param { match raw_param {
ast::Param::IdentParam(id) => { ast::Param::IdentParam(id) => {
@@ -436,7 +408,6 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
let PatternBindings { let PatternBindings {
body: inner_body, body: inner_body,
scc_info,
required, required,
optional, optional,
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| { } = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
@@ -449,24 +420,18 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
ellipsis, ellipsis,
}); });
body = ctx.new_expr( body = inner_body;
Let {
body: inner_body,
binding_sccs: scc_info,
span,
}
.to_ir(),
);
} }
} }
let thunks = ctx.pop_thunk_scope();
let span = self.syntax().text_range(); let span = self.syntax().text_range();
// The function's body and parameters are now stored directly in the `Func` node.
Ok(ctx.new_expr( Ok(ctx.new_expr(
Func { Func {
body, body,
param, param,
arg, arg,
thunks,
span, span,
} }
.to_ir(), .to_ir(),

File diff suppressed because it is too large Load Diff

View File

@@ -41,6 +41,45 @@ pub fn nix_base32_encode(bytes: &[u8]) -> String {
result result
} }
pub fn nix_base32_decode(input: &str) -> Option<Vec<u8>> {
let len = input.len() * 5 / 8;
let mut bytes = vec![0u8; len];
for (n, ch) in input.chars().rev().enumerate() {
let digit = NIX_BASE32_CHARS.iter().position(|&c| c == ch as u8)? as u16;
let b = n * 5;
let i = b / 8;
let j = b % 8;
if i < len {
bytes[i] |= (digit << j) as u8;
}
if j > 3 && i + 1 < len {
bytes[i + 1] |= (digit >> (8 - j)) as u8;
}
}
Some(bytes)
}
pub fn decode_hash_to_hex(hash_str: &str) -> Option<String> {
if let Some(rest) = hash_str.strip_prefix("sha256:") {
return decode_hash_to_hex(rest);
}
if let Some(base64_str) = hash_str.strip_prefix("sha256-") {
use base64::{Engine, engine::general_purpose::STANDARD};
let bytes = STANDARD.decode(base64_str).ok()?;
return Some(hex::encode(bytes));
}
if hash_str.len() == 64 && hash_str.chars().all(|c| c.is_ascii_hexdigit()) {
return Some(hash_str.to_string());
}
if hash_str.len() == 52 {
let bytes = nix_base32_decode(hash_str)?;
return Some(hex::encode(bytes));
}
None
}
pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String { pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String {
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name); let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name);
@@ -54,6 +93,20 @@ pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) ->
format!("{}/{}-{}", store_dir, encoded, name) format!("{}/{}-{}", store_dir, encoded, name)
} }
pub fn make_text_store_path(
store_dir: &str,
hash_hex: &str,
name: &str,
references: &[String],
) -> String {
let mut ty = String::from("text");
for reference in references {
ty.push(':');
ty.push_str(reference);
}
make_store_path(store_dir, &ty, hash_hex, name)
}
pub fn output_path_name(drv_name: &str, output_name: &str) -> String { pub fn output_path_name(drv_name: &str, output_name: &str) -> String {
if output_name == "out" { if output_name == "out" {
drv_name.to_string() drv_name.to_string()

View File

@@ -49,8 +49,10 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_path_exists(), op_path_exists(),
op_resolve_path(), op_resolve_path(),
op_sha256_hex(), op_sha256_hex(),
op_make_placeholder(),
op_decode_span::<Ctx>(), op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(), op_make_store_path::<Ctx>(),
op_make_text_store_path::<Ctx>(),
op_output_path_name(), op_output_path_name(),
op_make_fixed_output_path::<Ctx>(), op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(), op_add_path::<Ctx>(),
@@ -58,6 +60,8 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_to_file::<Ctx>(), op_to_file::<Ctx>(),
op_copy_path_to_store::<Ctx>(), op_copy_path_to_store::<Ctx>(),
op_get_env(), op_get_env(),
op_walk_dir(),
op_add_filtered_path::<Ctx>(),
]; ];
ops.extend(crate::fetcher::register_ops::<Ctx>()); ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -130,22 +134,19 @@ fn op_import<Ctx: RuntimeContext>(
} }
let current_dir = ctx.get_current_dir(); let current_dir = ctx.get_current_dir();
let mut absolute_path = current_dir let mut absolute_path = current_dir.join(&path);
.join(&path) // Do NOT resolve symlinks (eval-okay-symlink-resolution)
.canonicalize() // TODO: is this correct?
.map_err(|e| format!("Failed to resolve path {}: {}", path, e))?; // .canonicalize()
// .map_err(|e| format!("Failed to resolve path {}: {}", path, e))?;
if absolute_path.is_dir() { if absolute_path.is_dir() {
absolute_path.push("default.nix") absolute_path.push("default.nix")
} }
tracing::info!("Importing file: {}", absolute_path.display()); tracing::info!("Importing file: {}", absolute_path.display());
let content = std::fs::read_to_string(absolute_path.as_path()) let source = Source::new_file(absolute_path.clone())
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?; .map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
let source = Source {
ty: crate::error::SourceType::File(absolute_path.into()),
src: content.into(),
};
tracing::debug!("Compiling file"); tracing::debug!("Compiling file");
ctx.add_source(source.clone()); ctx.add_source(source.clone());
@@ -161,7 +162,17 @@ fn op_read_file(#[string] path: String) -> std::result::Result<String, NixError>
#[deno_core::op2(fast)] #[deno_core::op2(fast)]
fn op_path_exists(#[string] path: String) -> bool { fn op_path_exists(#[string] path: String) -> bool {
std::path::Path::new(&path).exists() let must_be_dir = path.ends_with('/') || path.ends_with("/.");
let p = Path::new(&path);
if must_be_dir {
match std::fs::metadata(p) {
Ok(m) => m.is_dir(),
Err(_) => false,
}
} else {
std::fs::symlink_metadata(p).is_ok()
}
} }
#[deno_core::op2] #[deno_core::op2]
@@ -274,6 +285,18 @@ fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_hash::sha256_hex(&data) crate::nix_hash::sha256_hex(&data)
} }
#[deno_core::op2]
#[string]
fn op_make_placeholder(#[string] output: String) -> String {
use sha2::{Digest, Sha256};
let input = format!("nix-output:{}", output);
let mut hasher = Sha256::new();
hasher.update(input.as_bytes());
let hash: [u8; 32] = hasher.finalize().into();
let encoded = crate::nix_hash::nix_base32_encode(&hash);
format!("/{}", encoded)
}
#[deno_core::op2] #[deno_core::op2]
#[serde] #[serde]
fn op_decode_span<Ctx: RuntimeContext>( fn op_decode_span<Ctx: RuntimeContext>(
@@ -338,6 +361,20 @@ fn op_make_store_path<Ctx: RuntimeContext>(
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name) crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
} }
#[deno_core::op2]
#[string]
fn op_make_text_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_hex: String,
#[string] name: String,
#[serde] references: Vec<String>,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_hash::make_text_store_path(store_dir, &hash_hex, &name, &references)
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_output_path_name(#[string] drv_name: String, #[string] output_name: String) -> String { fn op_output_path_name(#[string] drv_name: String, #[string] output_name: String) -> String {
@@ -400,7 +437,8 @@ fn op_add_path<Ctx: RuntimeContext>(
}); });
let computed_hash = if recursive { let computed_hash = if recursive {
compute_nar_hash(path_obj)? crate::nar::compute_nar_hash(path_obj)
.map_err(|e| NixError::from(format!("failed to compute NAR hash: {}", e)))?
} else { } else {
if !path_obj.is_file() { if !path_obj.is_file() {
return Err(NixError::from( return Err(NixError::from(
@@ -415,60 +453,35 @@ fn op_add_path<Ctx: RuntimeContext>(
hex::encode(hasher.finalize()) hex::encode(hasher.finalize())
}; };
if let Some(expected_hash) = sha256 if let Some(expected_hash) = sha256 {
&& computed_hash != expected_hash let expected_hex = crate::nix_hash::decode_hash_to_hex(&expected_hash)
{ .ok_or_else(|| NixError::from(format!("invalid hash format: {}", expected_hash)))?;
if computed_hash != expected_hex {
return Err(NixError::from(format!( return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}", "hash mismatch for path '{}': expected {}, got {}",
path, expected_hash, computed_hash path, expected_hex, computed_hash
))); )));
} }
}
let ctx: &Ctx = state.get_ctx(); let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store(); let store = ctx.get_store();
let store_path = store let store_path = if recursive {
store
.add_to_store_from_path(&computed_name, path_obj, vec![]) .add_to_store_from_path(&computed_name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?; .map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?
} else {
let contents = fs::read(path_obj)
.map_err(|e| NixError::from(format!("failed to read '{}': {}", path, e)))?;
store
.add_to_store(&computed_name, &contents, false, vec![])
.map_err(|e| NixError::from(format!("failed to add to store: {}", e)))?
};
Ok(store_path) Ok(store_path)
} }
fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixError> {
use sha2::{Digest, Sha256};
use std::fs;
if path.is_file() {
let contents =
fs::read(path).map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
Ok(hex::encode(hasher.finalize()))
} else if path.is_dir() {
let mut entries: Vec<_> = fs::read_dir(path)
.map_err(|e| NixError::from(format!("failed to read directory: {}", e)))?
.filter_map(std::result::Result::ok)
.collect();
entries.sort_by_key(|e| e.file_name());
let mut hasher = Sha256::new();
for entry in entries {
let entry_path = entry.path();
let entry_name = entry.file_name();
hasher.update(entry_name.to_string_lossy().as_bytes());
let entry_hash = compute_nar_hash(&entry_path)?;
hasher.update(entry_hash.as_bytes());
}
Ok(hex::encode(hasher.finalize()))
} else {
Ok(String::new())
}
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_store_path<Ctx: RuntimeContext>( fn op_store_path<Ctx: RuntimeContext>(
@@ -539,7 +552,167 @@ fn op_copy_path_to_store<Ctx: RuntimeContext>(
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_get_env(#[string] key: String) -> std::result::Result<String, NixError> { fn op_get_env(#[string] key: String) -> std::result::Result<String, NixError> {
Ok(std::env::var(key).map_err(|err| format!("Failed to read env var: {err}"))?) match std::env::var(key) {
Ok(val) => Ok(val),
Err(std::env::VarError::NotPresent) => Ok("".into()),
Err(err) => Err(format!("Failed to read env var: {err}").into()),
}
}
#[deno_core::op2]
#[serde]
fn op_walk_dir(#[string] path: String) -> std::result::Result<Vec<(String, String)>, NixError> {
fn walk_recursive(
base: &Path,
current: &Path,
results: &mut Vec<(String, String)>,
) -> std::result::Result<(), NixError> {
let entries = std::fs::read_dir(current)
.map_err(|e| NixError::from(format!("failed to read directory: {}", e)))?;
for entry in entries {
let entry =
entry.map_err(|e| NixError::from(format!("failed to read entry: {}", e)))?;
let path = entry.path();
let rel_path = path
.strip_prefix(base)
.map_err(|e| NixError::from(format!("failed to get relative path: {}", e)))?
.to_string_lossy()
.to_string();
let file_type = entry
.file_type()
.map_err(|e| NixError::from(format!("failed to get file type: {}", e)))?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else {
"regular"
};
results.push((rel_path.clone(), type_str.to_string()));
if file_type.is_dir() {
walk_recursive(base, &path, results)?;
}
}
Ok(())
}
let path = Path::new(&path);
if !path.is_dir() {
return Err(NixError::from(format!(
"{} is not a directory",
path.display()
)));
}
let mut results = Vec::new();
walk_recursive(path, path, &mut results)?;
Ok(results)
}
#[deno_core::op2]
#[string]
fn op_add_filtered_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] src_path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
#[serde] include_paths: Vec<String>,
) -> std::result::Result<String, NixError> {
use sha2::{Digest, Sha256};
use std::fs;
let src = Path::new(&src_path);
if !src.exists() {
return Err(NixError::from(format!(
"path '{}' does not exist",
src_path
)));
}
let computed_name = name.unwrap_or_else(|| {
src.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string()
});
let temp_dir = tempfile::tempdir()
.map_err(|e| NixError::from(format!("failed to create temp dir: {}", e)))?;
let dest = temp_dir.path().join(&computed_name);
fs::create_dir_all(&dest)
.map_err(|e| NixError::from(format!("failed to create dest dir: {}", e)))?;
for rel_path in &include_paths {
let src_file = src.join(rel_path);
let dest_file = dest.join(rel_path);
if let Some(parent) = dest_file.parent() {
fs::create_dir_all(parent)
.map_err(|e| NixError::from(format!("failed to create dir: {}", e)))?;
}
let metadata = fs::symlink_metadata(&src_file)
.map_err(|e| NixError::from(format!("failed to read metadata: {}", e)))?;
if metadata.is_symlink() {
let target = fs::read_link(&src_file)
.map_err(|e| NixError::from(format!("failed to read symlink: {}", e)))?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &dest_file)
.map_err(|e| NixError::from(format!("failed to create symlink: {}", e)))?;
#[cfg(not(unix))]
return Err(NixError::from("symlinks not supported on this platform"));
} else if metadata.is_dir() {
fs::create_dir_all(&dest_file)
.map_err(|e| NixError::from(format!("failed to create dir: {}", e)))?;
} else {
fs::copy(&src_file, &dest_file)
.map_err(|e| NixError::from(format!("failed to copy file: {}", e)))?;
}
}
let computed_hash = if recursive {
crate::nar::compute_nar_hash(&dest)
.map_err(|e| NixError::from(format!("failed to compute NAR hash: {}", e)))?
} else {
if !dest.is_file() {
return Err(NixError::from(
"when 'recursive' is false, path must be a regular file",
));
}
let contents =
fs::read(&dest).map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
let expected_hex = crate::nix_hash::decode_hash_to_hex(&expected_hash)
.ok_or_else(|| NixError::from(format!("invalid hash format: {}", expected_hash)))?;
if computed_hash != expected_hex {
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
src_path, expected_hex, computed_hash
)));
}
}
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&computed_name, &dest, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?;
Ok(store_path)
} }
pub(crate) struct Runtime<Ctx: RuntimeContext> { pub(crate) struct Runtime<Ctx: RuntimeContext> {
@@ -548,6 +721,7 @@ pub(crate) struct Runtime<Ctx: RuntimeContext> {
primop_metadata_symbol: v8::Global<v8::Symbol>, primop_metadata_symbol: v8::Global<v8::Symbol>,
has_context_symbol: v8::Global<v8::Symbol>, has_context_symbol: v8::Global<v8::Symbol>,
is_path_symbol: v8::Global<v8::Symbol>, is_path_symbol: v8::Global<v8::Symbol>,
is_cycle_symbol: v8::Global<v8::Symbol>,
_marker: PhantomData<Ctx>, _marker: PhantomData<Ctx>,
} }
@@ -556,6 +730,8 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
// Initialize V8 once // Initialize V8 once
static INIT: Once = Once::new(); static INIT: Once = Once::new();
INIT.call_once(|| { INIT.call_once(|| {
// First flag is always not recognized
assert_eq!(deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]), [""]);
JsRuntime::init_platform( JsRuntime::init_platform(
Some(v8::new_default_platform(0, false).make_shared()), Some(v8::new_default_platform(0, false).make_shared()),
false, false,
@@ -567,7 +743,13 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
..Default::default() ..Default::default()
}); });
let (is_thunk_symbol, primop_metadata_symbol, has_context_symbol, is_path_symbol) = { let (
is_thunk_symbol,
primop_metadata_symbol,
has_context_symbol,
is_path_symbol,
is_cycle_symbol,
) = {
deno_core::scope!(scope, &mut js_runtime); deno_core::scope!(scope, &mut js_runtime);
Self::get_symbols(scope)? Self::get_symbols(scope)?
}; };
@@ -578,6 +760,7 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
primop_metadata_symbol, primop_metadata_symbol,
has_context_symbol, has_context_symbol,
is_path_symbol, is_path_symbol,
is_cycle_symbol,
_marker: PhantomData, _marker: PhantomData,
}) })
} }
@@ -605,6 +788,7 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
let primop_metadata_symbol = v8::Local::new(scope, &self.primop_metadata_symbol); let primop_metadata_symbol = v8::Local::new(scope, &self.primop_metadata_symbol);
let has_context_symbol = v8::Local::new(scope, &self.has_context_symbol); let has_context_symbol = v8::Local::new(scope, &self.has_context_symbol);
let is_path_symbol = v8::Local::new(scope, &self.is_path_symbol); let is_path_symbol = v8::Local::new(scope, &self.is_path_symbol);
let is_cycle_symbol = v8::Local::new(scope, &self.is_cycle_symbol);
Ok(to_value( Ok(to_value(
local_value, local_value,
@@ -613,10 +797,11 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
primop_metadata_symbol, primop_metadata_symbol,
has_context_symbol, has_context_symbol,
is_path_symbol, is_path_symbol,
is_cycle_symbol,
)) ))
} }
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT, IS_PATH) /// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT, IS_PATH, IS_CYCLE)
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn get_symbols( fn get_symbols(
scope: &ScopeRef, scope: &ScopeRef,
@@ -625,6 +810,7 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
v8::Global<v8::Symbol>, v8::Global<v8::Symbol>,
v8::Global<v8::Symbol>, v8::Global<v8::Symbol>,
v8::Global<v8::Symbol>, v8::Global<v8::Symbol>,
v8::Global<v8::Symbol>,
)> { )> {
let global = scope.get_current_context().global(scope); let global = scope.get_current_context().global(scope);
let nix_key = v8::String::new(scope, "Nix") let nix_key = v8::String::new(scope, "Nix")
@@ -655,8 +841,9 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
let primop_metadata = get_symbol("PRIMOP_METADATA")?; let primop_metadata = get_symbol("PRIMOP_METADATA")?;
let has_context = get_symbol("HAS_CONTEXT")?; let has_context = get_symbol("HAS_CONTEXT")?;
let is_path = get_symbol("IS_PATH")?; let is_path = get_symbol("IS_PATH")?;
let is_cycle = get_symbol("IS_CYCLE")?;
Ok((is_thunk, primop_metadata, has_context, is_path)) Ok((is_thunk, primop_metadata, has_context, is_path, is_cycle))
} }
} }
@@ -667,6 +854,7 @@ fn to_value<'a>(
primop_metadata_symbol: LocalSymbol<'a>, primop_metadata_symbol: LocalSymbol<'a>,
has_context_symbol: LocalSymbol<'a>, has_context_symbol: LocalSymbol<'a>,
is_path_symbol: LocalSymbol<'a>, is_path_symbol: LocalSymbol<'a>,
is_cycle_symbol: LocalSymbol<'a>,
) -> Value { ) -> Value {
match () { match () {
_ if val.is_big_int() => { _ if val.is_big_int() => {
@@ -703,6 +891,7 @@ fn to_value<'a>(
primop_metadata_symbol, primop_metadata_symbol,
has_context_symbol, has_context_symbol,
is_path_symbol, is_path_symbol,
is_cycle_symbol,
) )
}) })
.collect(); .collect();
@@ -720,6 +909,10 @@ fn to_value<'a>(
return Value::Thunk; return Value::Thunk;
} }
if is_cycle(val, scope, is_cycle_symbol) {
return Value::Repeated;
}
if let Some(path_val) = extract_path(val, scope, is_path_symbol) { if let Some(path_val) = extract_path(val, scope, is_path_symbol) {
return Value::Path(path_val); return Value::Path(path_val);
} }
@@ -749,6 +942,7 @@ fn to_value<'a>(
primop_metadata_symbol, primop_metadata_symbol,
has_context_symbol, has_context_symbol,
is_path_symbol, is_path_symbol,
is_cycle_symbol,
), ),
) )
}) })
@@ -768,6 +962,15 @@ fn is_thunk<'a>(val: LocalValue<'a>, scope: &ScopeRef<'a, '_>, symbol: LocalSymb
matches!(obj.get(scope, symbol.into()), Some(v) if v.is_true()) matches!(obj.get(scope, symbol.into()), Some(v) if v.is_true())
} }
fn is_cycle<'a>(val: LocalValue<'a>, scope: &ScopeRef<'a, '_>, symbol: LocalSymbol<'a>) -> bool {
if !val.is_object() {
return false;
}
let obj = val.to_object(scope).expect("infallible conversion");
matches!(obj.get(scope, symbol.into()), Some(v) if v.is_true())
}
fn extract_string_with_context<'a>( fn extract_string_with_context<'a>(
val: LocalValue<'a>, val: LocalValue<'a>,
scope: &ScopeRef<'a, '_>, scope: &ScopeRef<'a, '_>,

View File

@@ -35,7 +35,7 @@ impl Display for Symbol {
if self.normal() { if self.normal() {
write!(f, "{}", self.0) write!(f, "{}", self.0)
} else { } else {
write!(f, r#""{}""#, self.0) write!(f, "{}", escape_quote_string(&self.0))
} }
} }
} }
@@ -117,15 +117,27 @@ impl Debug for AttrSet {
impl Display for AttrSet { impl Display for AttrSet {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
use Value::*;
write!(f, "{{")?; write!(f, "{{")?;
for (k, v) in self.data.iter() { for (k, v) in self.data.iter() {
write!(f, " {k} = ")?; write!(f, " {k} = {v};")?;
match v {
List(_) => write!(f, "[ ... ];")?,
AttrSet(_) => write!(f, "{{ ... }};")?,
v => write!(f, "{v};")?,
} }
write!(f, " }}")
}
}
impl AttrSet {
pub fn display_compat(&self) -> AttrSetCompatDisplay<'_> {
AttrSetCompatDisplay(self)
}
}
pub struct AttrSetCompatDisplay<'a>(&'a AttrSet);
impl Display for AttrSetCompatDisplay<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "{{")?;
for (k, v) in self.0.data.iter() {
write!(f, " {k} = {};", v.display_compat())?;
} }
write!(f, " }}") write!(f, " }}")
} }
@@ -159,6 +171,24 @@ impl Display for List {
} }
} }
impl List {
pub fn display_compat(&self) -> ListCompatDisplay<'_> {
ListCompatDisplay(self)
}
}
pub struct ListCompatDisplay<'a>(&'a List);
impl Display for ListCompatDisplay<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "[ ")?;
for v in self.0.data.iter() {
write!(f, "{} ", v.display_compat())?;
}
write!(f, "]")
}
}
/// Represents any possible Nix value that can be returned from an evaluation. /// Represents any possible Nix value that can be returned from an evaluation.
#[derive(IsVariant, Unwrap, Clone, Debug, PartialEq)] #[derive(IsVariant, Unwrap, Clone, Debug, PartialEq)]
pub enum Value { pub enum Value {
@@ -191,6 +221,25 @@ pub enum Value {
Repeated, Repeated,
} }
fn escape_quote_string(s: &str) -> String {
let mut ret = String::with_capacity(s.len() + 2);
ret.push('"');
let mut iter = s.chars().peekable();
while let Some(c) = iter.next() {
match c {
'\\' => ret.push_str("\\\\"),
'"' => ret.push_str("\\\""),
'\n' => ret.push_str("\\n"),
'\r' => ret.push_str("\\r"),
'\t' => ret.push_str("\\t"),
'$' if iter.peek() == Some(&'{') => ret.push_str("\\$"),
c => ret.push(c),
}
}
ret.push('"');
ret
}
impl Display for Value { impl Display for Value {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
use Value::*; use Value::*;
@@ -199,7 +248,7 @@ impl Display for Value {
&Float(x) => write!(f, "{x}"), &Float(x) => write!(f, "{x}"),
&Bool(x) => write!(f, "{x}"), &Bool(x) => write!(f, "{x}"),
Null => write!(f, "null"), Null => write!(f, "null"),
String(x) => write!(f, r#""{x}""#), String(x) => write!(f, "{}", escape_quote_string(x)),
Path(x) => write!(f, "{x}"), Path(x) => write!(f, "{x}"),
AttrSet(x) => write!(f, "{x}"), AttrSet(x) => write!(f, "{x}"),
List(x) => write!(f, "{x}"), List(x) => write!(f, "{x}"),
@@ -207,7 +256,36 @@ impl Display for Value {
Func => write!(f, "«lambda»"), Func => write!(f, "«lambda»"),
PrimOp(name) => write!(f, "«primop {name}»"), PrimOp(name) => write!(f, "«primop {name}»"),
PrimOpApp(name) => write!(f, "«partially applied primop {name}»"), PrimOpApp(name) => write!(f, "«partially applied primop {name}»"),
Repeated => write!(f, "<REPEATED>"), Repeated => write!(f, "«repeated»"),
}
}
}
impl Value {
pub fn display_compat(&self) -> ValueCompatDisplay<'_> {
ValueCompatDisplay(self)
}
}
pub struct ValueCompatDisplay<'a>(&'a Value);
impl Display for ValueCompatDisplay<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
use Value::*;
match self.0 {
&Int(x) => write!(f, "{x}"),
&Float(x) => write!(f, "{x}"),
&Bool(x) => write!(f, "{x}"),
Null => write!(f, "null"),
String(x) => write!(f, "{}", escape_quote_string(x)),
Path(x) => write!(f, "{x}"),
AttrSet(x) => write!(f, "{}", x.display_compat()),
List(x) => write!(f, "{}", x.display_compat()),
Thunk => write!(f, "«thunk»"),
Func => write!(f, "<LAMBDA>"),
PrimOp(_) => write!(f, "<PRIMOP>"),
PrimOpApp(_) => write!(f, "<PRIMOP-APP>"),
Repeated => write!(f, "«repeated»"),
} }
} }
} }

View File

@@ -3,6 +3,8 @@ mod utils;
use nix_js::value::Value; use nix_js::value::Value;
use utils::eval; use utils::eval;
use crate::utils::eval_result;
#[test] #[test]
fn arithmetic() { fn arithmetic() {
assert_eq!(eval("1 + 1"), Value::Int(2)); assert_eq!(eval("1 + 1"), Value::Int(2));
@@ -63,3 +65,8 @@ fn nested_let() {
Value::Int(3) Value::Int(3)
); );
} }
#[test]
fn rec_inherit_fails() {
assert!(eval_result("{ inherit x; }").is_err());
}

View File

@@ -470,8 +470,8 @@ fn structured_attrs_basic() {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
assert!(attrs.contains_key("drvPath")); assert!(attrs.contains_key("drvPath"));
assert!(attrs.contains_key("outPath")); assert!(attrs.contains_key("outPath"));
assert!(!attrs.contains_key("foo")); assert!(attrs.contains_key("foo"));
assert!(!attrs.contains_key("count")); assert!(attrs.contains_key("count"));
} }
_ => panic!("Expected AttrSet"), _ => panic!("Expected AttrSet"),
} }
@@ -492,7 +492,7 @@ fn structured_attrs_nested() {
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
assert!(attrs.contains_key("drvPath")); assert!(attrs.contains_key("drvPath"));
assert!(!attrs.contains_key("data")); assert!(attrs.contains_key("data"));
} }
_ => panic!("Expected AttrSet"), _ => panic!("Expected AttrSet"),
} }
@@ -554,7 +554,7 @@ fn ignore_nulls_true() {
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
assert!(attrs.contains_key("foo")); assert!(attrs.contains_key("foo"));
assert!(!attrs.contains_key("nullValue")); assert!(attrs.contains_key("nullValue"));
} }
_ => panic!("Expected AttrSet"), _ => panic!("Expected AttrSet"),
} }
@@ -600,8 +600,8 @@ fn ignore_nulls_with_structured_attrs() {
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
assert!(attrs.contains_key("drvPath")); assert!(attrs.contains_key("drvPath"));
assert!(!attrs.contains_key("foo")); assert!(attrs.contains_key("foo"));
assert!(!attrs.contains_key("nullValue")); assert!(attrs.contains_key("nullValue"));
} }
_ => panic!("Expected AttrSet"), _ => panic!("Expected AttrSet"),
} }
@@ -627,8 +627,8 @@ fn all_features_combined() {
assert!(attrs.contains_key("out")); assert!(attrs.contains_key("out"));
assert!(attrs.contains_key("dev")); assert!(attrs.contains_key("dev"));
assert!(attrs.contains_key("outPath")); assert!(attrs.contains_key("outPath"));
assert!(!attrs.contains_key("data")); assert!(attrs.contains_key("data"));
assert!(!attrs.contains_key("nullValue")); assert!(attrs.contains_key("nullValue"));
} }
_ => panic!("Expected AttrSet"), _ => panic!("Expected AttrSet"),
} }
@@ -651,7 +651,7 @@ fn fixed_output_with_structured_attrs() {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
assert!(attrs.contains_key("outPath")); assert!(attrs.contains_key("outPath"));
assert!(attrs.contains_key("drvPath")); assert!(attrs.contains_key("drvPath"));
assert!(!attrs.contains_key("data")); assert!(attrs.contains_key("data"));
} }
_ => panic!("Expected AttrSet"), _ => panic!("Expected AttrSet"),
} }

348
nix-js/tests/lang.rs Normal file
View File

@@ -0,0 +1,348 @@
#![allow(non_snake_case)]
mod utils;
use std::path::PathBuf;
use nix_js::context::Context;
use nix_js::error::Source;
use nix_js::value::Value;
fn get_lang_dir() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/lang")
}
fn eval_file(name: &str) -> Result<(Value, Source), String> {
let lang_dir = get_lang_dir();
let nix_path = lang_dir.join(format!("{name}.nix"));
let expr = format!(r#"import "{}""#, nix_path.display());
let mut ctx = Context::new().map_err(|e| e.to_string())?;
let source = Source {
ty: nix_js::error::SourceType::File(nix_path.into()),
src: expr.into(),
};
ctx.eval_code(source.clone())
.map(|val| (val, source))
.map_err(|e| e.to_string())
}
fn read_expected(name: &str) -> String {
let lang_dir = get_lang_dir();
let exp_path = lang_dir.join(format!("{name}.exp"));
std::fs::read_to_string(exp_path)
.expect("expected file should exist")
.trim_end()
.to_string()
}
fn format_value(value: &Value) -> String {
value.display_compat().to_string()
}
macro_rules! eval_okay_test {
($(#[$attr:meta])* $name:ident$(, $pre:expr)?) => {
$(#[$attr])*
#[test]
fn $name() {
$(($pre)();)?
let test_name = concat!("eval-okay-", stringify!($name))
.replace("_", "-")
.replace("r#", "");
let result = eval_file(&test_name);
match result {
Ok((value, source)) => {
let actual = format_value(&value);
let actual = actual.replace(
source
.get_dir()
.parent()
.unwrap()
.to_string_lossy()
.as_ref(),
"/pwd",
);
let expected = read_expected(&test_name);
assert_eq!(actual, expected, "Output mismatch for {}", test_name);
}
Err(e) => {
panic!("Test {} failed to evaluate: {}", test_name, e);
}
}
}
};
}
macro_rules! eval_fail_test {
($name:ident) => {
#[test]
fn $name() {
let test_name = concat!("eval-fail-", stringify!($name))
.replace("_", "-")
.replace("r#", "");
let result = eval_file(&test_name);
assert!(
result.is_err(),
"Test {} should have failed but succeeded with: {:?}",
test_name,
result
);
}
};
}
eval_okay_test!(any_all);
eval_okay_test!(arithmetic);
eval_okay_test!(attrnames);
eval_okay_test!(attrs);
eval_okay_test!(attrs2);
eval_okay_test!(attrs3);
eval_okay_test!(attrs4);
eval_okay_test!(attrs5);
eval_okay_test!(
#[ignore = "__overrides is not supported"]
attrs6
);
eval_okay_test!(
#[ignore = "requires --arg/--argstr CLI flags"]
autoargs
);
eval_okay_test!(backslash_newline_1);
eval_okay_test!(backslash_newline_2);
eval_okay_test!(baseNameOf);
eval_okay_test!(builtins);
eval_okay_test!(builtins_add);
eval_okay_test!(callable_attrs);
eval_okay_test!(catattrs);
eval_okay_test!(closure);
eval_okay_test!(comments);
eval_okay_test!(concat);
eval_okay_test!(concatmap);
eval_okay_test!(concatstringssep);
eval_okay_test!(context);
eval_okay_test!(context_introspection);
eval_okay_test!(
#[ignore = "not implemented: convertHash"]
convertHash
);
eval_okay_test!(curpos);
eval_okay_test!(deepseq);
eval_okay_test!(delayed_with);
eval_okay_test!(delayed_with_inherit);
eval_okay_test!(deprecate_cursed_or);
eval_okay_test!(derivation_legacy);
eval_okay_test!(dynamic_attrs);
eval_okay_test!(dynamic_attrs_2);
eval_okay_test!(dynamic_attrs_bare);
eval_okay_test!(elem);
eval_okay_test!(empty_args);
eval_okay_test!(eq);
eval_okay_test!(eq_derivations);
eval_okay_test!(filter);
eval_okay_test!(
#[ignore = "not implemented: flakeRefToString"]
flake_ref_to_string
);
eval_okay_test!(flatten);
eval_okay_test!(float);
eval_okay_test!(floor_ceil);
eval_okay_test!(foldlStrict);
eval_okay_test!(foldlStrict_lazy_elements);
eval_okay_test!(foldlStrict_lazy_initial_accumulator);
eval_okay_test!(fromjson);
eval_okay_test!(fromjson_escapes);
eval_okay_test!(
#[ignore = "not implemented: fromTOML"]
fromTOML
);
eval_okay_test!(
#[ignore = "not implemented: fromTOML"]
fromTOML_timestamps
);
eval_okay_test!(functionargs);
eval_okay_test!(
#[ignore = "not implemented: hashFile"]
hashfile
);
eval_okay_test!(
#[ignore = "not implemented: hashString"]
hashstring
);
eval_okay_test!(getattrpos);
eval_okay_test!(getattrpos_functionargs);
eval_okay_test!(getattrpos_undefined);
eval_okay_test!(getenv, || {
unsafe { std::env::set_var("TEST_VAR", "foo") };
});
eval_okay_test!(
#[ignore = "not implemented: hashString"]
groupBy
);
eval_okay_test!(r#if);
eval_okay_test!(ind_string);
eval_okay_test!(
#[ignore = "not implemented: scopedImport"]
import
);
eval_okay_test!(inherit_attr_pos);
eval_okay_test!(
#[ignore = "__overrides is not supported"]
inherit_from
);
eval_okay_test!(intersectAttrs);
eval_okay_test!(r#let);
eval_okay_test!(list);
eval_okay_test!(listtoattrs);
eval_okay_test!(logic);
eval_okay_test!(map);
eval_okay_test!(mapattrs);
eval_okay_test!(merge_dynamic_attrs);
eval_okay_test!(nested_with);
eval_okay_test!(new_let);
eval_okay_test!(null_dynamic_attrs);
eval_okay_test!(
#[ignore = "__overrides is not supported"]
overrides
);
eval_okay_test!(
#[ignore = "not implemented: parseFlakeRef"]
parse_flake_ref
);
eval_okay_test!(partition);
eval_okay_test!(path);
eval_okay_test!(pathexists);
eval_okay_test!(path_string_interpolation, || {
unsafe {
std::env::set_var("HOME", "/fake-home");
}
});
eval_okay_test!(patterns);
eval_okay_test!(print);
eval_okay_test!(readDir);
eval_okay_test!(readfile);
eval_okay_test!(readFileType);
eval_okay_test!(redefine_builtin);
eval_okay_test!(regex_match);
eval_okay_test!(regex_split);
eval_okay_test!(regression_20220122);
eval_okay_test!(regression_20220125);
eval_okay_test!(regrettable_rec_attrset_merge);
eval_okay_test!(remove);
eval_okay_test!(repeated_empty_attrs);
eval_okay_test!(repeated_empty_list);
eval_okay_test!(replacestrings);
eval_okay_test!(
#[ignore = "requires -I CLI flags"]
search_path
);
eval_okay_test!(scope_1);
eval_okay_test!(scope_2);
eval_okay_test!(scope_3);
eval_okay_test!(scope_4);
eval_okay_test!(scope_6);
eval_okay_test!(scope_7);
eval_okay_test!(seq);
eval_okay_test!(sort);
eval_okay_test!(splitversion);
eval_okay_test!(string);
eval_okay_test!(strings_as_attrs_names);
eval_okay_test!(substring);
eval_okay_test!(substring_context);
eval_okay_test!(symlink_resolution);
eval_okay_test!(
#[ignore = "TCO not implemented, also disabled in CppNix"]
tail_call_1
);
eval_okay_test!(tojson);
eval_okay_test!(
#[ignore = "not implemented: toXML"]
toxml
);
eval_okay_test!(
#[ignore = "not implemented: toXML"]
toxml2
);
eval_okay_test!(tryeval);
eval_okay_test!(types);
eval_okay_test!(versions);
eval_okay_test!(with);
eval_okay_test!(
#[ignore = "not implemented: hashString"]
zipAttrsWith
);
eval_fail_test!(fail_abort);
eval_fail_test!(fail_addDrvOutputDependencies_empty_context);
eval_fail_test!(fail_addDrvOutputDependencies_multi_elem_context);
eval_fail_test!(fail_addDrvOutputDependencies_wrong_element_kind);
eval_fail_test!(fail_addErrorContext_example);
eval_fail_test!(fail_assert);
eval_fail_test!(fail_assert_equal_attrs_names);
eval_fail_test!(fail_assert_equal_attrs_names_2);
eval_fail_test!(fail_assert_equal_derivations);
eval_fail_test!(fail_assert_equal_derivations_extra);
eval_fail_test!(fail_assert_equal_floats);
eval_fail_test!(fail_assert_equal_function_direct);
eval_fail_test!(fail_assert_equal_int_float);
eval_fail_test!(fail_assert_equal_ints);
eval_fail_test!(fail_assert_equal_list_length);
eval_fail_test!(fail_assert_equal_paths);
eval_fail_test!(fail_assert_equal_type);
eval_fail_test!(fail_assert_equal_type_nested);
eval_fail_test!(fail_assert_nested_bool);
eval_fail_test!(fail_attr_name_type);
eval_fail_test!(fail_attrset_merge_drops_later_rec);
eval_fail_test!(fail_bad_string_interpolation_1);
eval_fail_test!(fail_bad_string_interpolation_2);
eval_fail_test!(fail_bad_string_interpolation_3);
eval_fail_test!(fail_bad_string_interpolation_4);
eval_fail_test!(fail_blackhole);
eval_fail_test!(fail_call_primop);
eval_fail_test!(fail_deepseq);
eval_fail_test!(fail_derivation_name);
eval_fail_test!(fail_dup_dynamic_attrs);
eval_fail_test!(fail_duplicate_traces);
eval_fail_test!(fail_eol_1);
eval_fail_test!(fail_eol_2);
eval_fail_test!(fail_eol_3);
eval_fail_test!(fail_fetchTree_negative);
eval_fail_test!(fail_fetchurl_baseName);
eval_fail_test!(fail_fetchurl_baseName_attrs);
eval_fail_test!(fail_fetchurl_baseName_attrs_name);
eval_fail_test!(fail_flake_ref_to_string_negative_integer);
eval_fail_test!(fail_foldlStrict_strict_op_application);
eval_fail_test!(fail_fromJSON_keyWithNullByte);
eval_fail_test!(fail_fromJSON_overflowing);
eval_fail_test!(fail_fromJSON_valueWithNullByte);
eval_fail_test!(fail_fromTOML_keyWithNullByte);
eval_fail_test!(fail_fromTOML_timestamps);
eval_fail_test!(fail_fromTOML_valueWithNullByte);
eval_fail_test!(fail_hashfile_missing);
eval_fail_test!(fail_infinite_recursion_lambda);
eval_fail_test!(fail_list);
eval_fail_test!(fail_missing_arg);
eval_fail_test!(fail_mutual_recursion);
eval_fail_test!(fail_nested_list_items);
eval_fail_test!(fail_nonexist_path);
eval_fail_test!(fail_not_throws);
eval_fail_test!(fail_overflowing_add);
eval_fail_test!(fail_overflowing_div);
eval_fail_test!(fail_overflowing_mul);
eval_fail_test!(fail_overflowing_sub);
eval_fail_test!(fail_path_slash);
eval_fail_test!(fail_pipe_operators);
eval_fail_test!(fail_recursion);
eval_fail_test!(fail_remove);
eval_fail_test!(fail_scope_5);
eval_fail_test!(fail_seq);
eval_fail_test!(fail_set);
eval_fail_test!(fail_set_override);
eval_fail_test!(fail_string_nul_1);
eval_fail_test!(fail_string_nul_2);
eval_fail_test!(fail_substring);
eval_fail_test!(fail_toJSON);
eval_fail_test!(fail_toJSON_non_utf_8);
eval_fail_test!(fail_to_path);
eval_fail_test!(fail_undeclared_arg);
eval_fail_test!(fail_using_set_as_attr_name);

1
nix-js/tests/lang/data Normal file
View File

@@ -0,0 +1 @@
foo

View File

@@ -0,0 +1 @@
"a"

View File

@@ -0,0 +1 @@
"X"

View File

@@ -0,0 +1 @@
"b"

View File

@@ -0,0 +1 @@
"X"

View File

@@ -0,0 +1 @@
"X"

View File

@@ -0,0 +1 @@
"c"

View File

@@ -0,0 +1 @@
"X"

View File

@@ -0,0 +1 @@
"X"

View File

@@ -0,0 +1,8 @@
error:
… while calling the 'abort' builtin
at /pwd/lang/eval-fail-abort.nix:1:14:
1| if true then abort "this should fail" else 1
| ^
2|
error: evaluation aborted with the following error message: 'this should fail'

View File

@@ -0,0 +1 @@
if true then abort "this should fail" else 1

View File

@@ -0,0 +1,8 @@
error:
… while calling the 'addDrvOutputDependencies' builtin
at /pwd/lang/eval-fail-addDrvOutputDependencies-empty-context.nix:1:1:
1| builtins.addDrvOutputDependencies ""
| ^
2|
error: context of string '' must have exactly one element, but has 0

View File

@@ -0,0 +1 @@
builtins.addDrvOutputDependencies ""

View File

@@ -0,0 +1,9 @@
error:
… while calling the 'addDrvOutputDependencies' builtin
at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:25:1:
24| in
25| builtins.addDrvOutputDependencies combo-path
| ^
26|
error: context of string '/nix/store/pg9yqs4yd85yhdm3f4i5dyaqp5jahrsz-fail.drv/nix/store/2dxd5frb715z451vbf7s8birlf3argbk-fail-2.drv' must have exactly one element, but has 2

View File

@@ -0,0 +1,25 @@
let
drv0 = derivation {
name = "fail";
builder = "/bin/false";
system = "x86_64-linux";
outputs = [
"out"
"foo"
];
};
drv1 = derivation {
name = "fail-2";
builder = "/bin/false";
system = "x86_64-linux";
outputs = [
"out"
"foo"
];
};
combo-path = "${drv0.drvPath}${drv1.drvPath}";
in
builtins.addDrvOutputDependencies combo-path

View File

@@ -0,0 +1,9 @@
error:
… while calling the 'addDrvOutputDependencies' builtin
at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:13:1:
12| in
13| builtins.addDrvOutputDependencies drv.outPath
| ^
14|
error: `addDrvOutputDependencies` can only act on derivations, not on a derivation output such as 'out'

View File

@@ -0,0 +1,13 @@
let
drv = derivation {
name = "fail";
builder = "/bin/false";
system = "x86_64-linux";
outputs = [
"out"
"foo"
];
};
in
builtins.addDrvOutputDependencies drv.outPath

View File

@@ -0,0 +1,24 @@
error:
… while counting down; n = 10
… while counting down; n = 9
… while counting down; n = 8
… while counting down; n = 7
… while counting down; n = 6
… while counting down; n = 5
… while counting down; n = 4
… while counting down; n = 3
… while counting down; n = 2
… while counting down; n = 1
(stack trace truncated; use '--show-trace' to show the full, detailed trace)
error: kaboom

View File

@@ -0,0 +1,9 @@
let
countDown =
n:
if n == 0 then
throw "kaboom"
else
builtins.addErrorContext "while counting down; n = ${toString n}" ("x" + countDown (n - 1));
in
countDown 10

View File

@@ -0,0 +1,8 @@
error:
… while evaluating the condition of the assertion '({ a = true; } == { a = true; b = true; })'
at /pwd/lang/eval-fail-assert-equal-attrs-names-2.nix:1:1:
1| assert
| ^
2| {
error: attribute names of attribute set '{ a = true; }' differs from attribute set '{ a = true; b = true; }'

View File

@@ -0,0 +1,8 @@
assert
{
a = true;
} == {
a = true;
b = true;
};
throw "unreachable"

View File

@@ -0,0 +1,8 @@
error:
… while evaluating the condition of the assertion '({ a = true; b = true; } == { a = true; })'
at /pwd/lang/eval-fail-assert-equal-attrs-names.nix:1:1:
1| assert
| ^
2| {
error: attribute names of attribute set '{ a = true; b = true; }' differs from attribute set '{ a = true; }'

View File

@@ -0,0 +1,8 @@
assert
{
a = true;
b = true;
} == {
a = true;
};
throw "unreachable"

View File

@@ -0,0 +1,26 @@
error:
… while evaluating the condition of the assertion '({ foo = { outPath = "/nix/store/0"; type = "derivation"; }; } == { foo = { devious = true; outPath = "/nix/store/1"; type = "derivation"; }; })'
at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:1:1:
1| assert
| ^
2| {
… while comparing attribute 'foo'
… where left hand side is
at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:3:5:
2| {
3| foo = {
| ^
4| type = "derivation";
… where right hand side is
at /pwd/lang/eval-fail-assert-equal-derivations-extra.nix:8:5:
7| } == {
8| foo = {
| ^
9| type = "derivation";
… while comparing a derivation by its 'outPath' attribute
error: string '"/nix/store/0"' is not equal to string '"/nix/store/1"'

View File

@@ -0,0 +1,14 @@
assert
{
foo = {
type = "derivation";
outPath = "/nix/store/0";
};
} == {
foo = {
type = "derivation";
outPath = "/nix/store/1";
devious = true;
};
};
throw "unreachable"

View File

@@ -0,0 +1,26 @@
error:
… while evaluating the condition of the assertion '({ foo = { ignored = (abort "not ignored"); outPath = "/nix/store/0"; type = "derivation"; }; } == { foo = { ignored = (abort "not ignored"); outPath = "/nix/store/1"; type = "derivation"; }; })'
at /pwd/lang/eval-fail-assert-equal-derivations.nix:1:1:
1| assert
| ^
2| {
… while comparing attribute 'foo'
… where left hand side is
at /pwd/lang/eval-fail-assert-equal-derivations.nix:3:5:
2| {
3| foo = {
| ^
4| type = "derivation";
… where right hand side is
at /pwd/lang/eval-fail-assert-equal-derivations.nix:9:5:
8| } == {
9| foo = {
| ^
10| type = "derivation";
… while comparing a derivation by its 'outPath' attribute
error: string '"/nix/store/0"' is not equal to string '"/nix/store/1"'

View File

@@ -0,0 +1,15 @@
assert
{
foo = {
type = "derivation";
outPath = "/nix/store/0";
ignored = abort "not ignored";
};
} == {
foo = {
type = "derivation";
outPath = "/nix/store/1";
ignored = abort "not ignored";
};
};
throw "unreachable"

View File

@@ -0,0 +1,22 @@
error:
… while evaluating the condition of the assertion '({ b = 1; } == { b = 1.01; })'
at /pwd/lang/eval-fail-assert-equal-floats.nix:1:1:
1| assert { b = 1.0; } == { b = 1.01; };
| ^
2| abort "unreachable"
… while comparing attribute 'b'
… where left hand side is
at /pwd/lang/eval-fail-assert-equal-floats.nix:1:10:
1| assert { b = 1.0; } == { b = 1.01; };
| ^
2| abort "unreachable"
… where right hand side is
at /pwd/lang/eval-fail-assert-equal-floats.nix:1:26:
1| assert { b = 1.0; } == { b = 1.01; };
| ^
2| abort "unreachable"
error: a float with value '1' is not equal to a float with value '1.01'

View File

@@ -0,0 +1,2 @@
assert { b = 1.0; } == { b = 1.01; };
abort "unreachable"

View File

@@ -0,0 +1,9 @@
error:
… while evaluating the condition of the assertion '((x: x) == (x: x))'
at /pwd/lang/eval-fail-assert-equal-function-direct.nix:3:1:
2| # This only compares a direct comparison and makes no claims about functions in nested structures.
3| assert (x: x) == (x: x);
| ^
4| abort "unreachable"
error: distinct functions and immediate comparisons of identical functions compare as unequal

View File

@@ -0,0 +1,4 @@
# Note: functions in nested structures, e.g. attributes, may be optimized away by pointer identity optimization.
# This only compares a direct comparison and makes no claims about functions in nested structures.
assert (x: x) == (x: x);
abort "unreachable"

View File

@@ -0,0 +1,8 @@
error:
… while evaluating the condition of the assertion '(1 == 1.1)'
at /pwd/lang/eval-fail-assert-equal-int-float.nix:1:1:
1| assert 1 == 1.1;
| ^
2| throw "unreachable"
error: an integer with value '1' is not equal to a float with value '1.1'

View File

@@ -0,0 +1,2 @@
assert 1 == 1.1;
throw "unreachable"

View File

@@ -0,0 +1,22 @@
error:
… while evaluating the condition of the assertion '({ b = 1; } == { b = 2; })'
at /pwd/lang/eval-fail-assert-equal-ints.nix:1:1:
1| assert { b = 1; } == { b = 2; };
| ^
2| abort "unreachable"
… while comparing attribute 'b'
… where left hand side is
at /pwd/lang/eval-fail-assert-equal-ints.nix:1:10:
1| assert { b = 1; } == { b = 2; };
| ^
2| abort "unreachable"
… where right hand side is
at /pwd/lang/eval-fail-assert-equal-ints.nix:1:24:
1| assert { b = 1; } == { b = 2; };
| ^
2| abort "unreachable"
error: an integer with value '1' is not equal to an integer with value '2'

View File

@@ -0,0 +1,2 @@
assert { b = 1; } == { b = 2; };
abort "unreachable"

View File

@@ -0,0 +1,8 @@
error:
… while evaluating the condition of the assertion '([ (1) (0) ] == [ (10) ])'
at /pwd/lang/eval-fail-assert-equal-list-length.nix:1:1:
1| assert
| ^
2| [
error: list of size '2' is not equal to list of size '1', left hand side is '[ 1 0 ]', right hand side is '[ 10 ]'

View File

@@ -0,0 +1,6 @@
assert
[
1
0
] == [ 10 ];
throw "unreachable"

View File

@@ -0,0 +1,8 @@
error:
… while evaluating the condition of the assertion '(/pwd/lang/foo == /pwd/lang/bar)'
at /pwd/lang/eval-fail-assert-equal-paths.nix:1:1:
1| assert ./foo == ./bar;
| ^
2| throw "unreachable"
error: path '/pwd/lang/foo' is not equal to path '/pwd/lang/bar'

View File

@@ -0,0 +1,2 @@
assert ./foo == ./bar;
throw "unreachable"

View File

@@ -0,0 +1,22 @@
error:
… while evaluating the condition of the assertion '({ ding = false; } == { ding = null; })'
at /pwd/lang/eval-fail-assert-equal-type-nested.nix:1:1:
1| assert { ding = false; } == { ding = null; };
| ^
2| abort "unreachable"
… while comparing attribute 'ding'
… where left hand side is
at /pwd/lang/eval-fail-assert-equal-type-nested.nix:1:10:
1| assert { ding = false; } == { ding = null; };
| ^
2| abort "unreachable"
… where right hand side is
at /pwd/lang/eval-fail-assert-equal-type-nested.nix:1:31:
1| assert { ding = false; } == { ding = null; };
| ^
2| abort "unreachable"
error: a Boolean of value 'false' is not equal to null of value 'null'

View File

@@ -0,0 +1,2 @@
assert { ding = false; } == { ding = null; };
abort "unreachable"

View File

@@ -0,0 +1,8 @@
error:
… while evaluating the condition of the assertion '(false == null)'
at /pwd/lang/eval-fail-assert-equal-type.nix:1:1:
1| assert false == null;
| ^
2| abort "unreachable"
error: a Boolean of value 'false' is not equal to null of value 'null'

View File

@@ -0,0 +1,2 @@
assert false == null;
abort "unreachable"

View File

@@ -0,0 +1,66 @@
error:
… while evaluating the condition of the assertion '({ a = { b = [ ({ c = { d = true; }; }) ]; }; } == { a = { b = [ ({ c = { d = false; }; }) ]; }; })'
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:1:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… while comparing attribute 'a'
… where left hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:10:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… where right hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:44:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… while comparing attribute 'b'
… where left hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:10:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… where right hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:44:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… while comparing list element 0
… while comparing attribute 'c'
… where left hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:20:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… where right hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:54:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… while comparing attribute 'd'
… where left hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:20:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
… where right hand side is
at /pwd/lang/eval-fail-assert-nested-bool.nix:1:54:
1| assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
| ^
2|
error: boolean 'true' is not equal to boolean 'false'

View File

@@ -0,0 +1,3 @@
assert { a.b = [ { c.d = true; } ]; } == { a.b = [ { c.d = false; } ]; };
abort "unreachable"

View File

@@ -0,0 +1,30 @@
error:
… while evaluating the attribute 'body'
at /pwd/lang/eval-fail-assert.nix:7:3:
6|
7| body = x "x";
| ^
8| }
… from call site
at /pwd/lang/eval-fail-assert.nix:7:10:
6|
7| body = x "x";
| ^
8| }
… while calling 'x'
at /pwd/lang/eval-fail-assert.nix:3:5:
2| x =
3| arg:
| ^
4| assert arg == "y";
… while evaluating the condition of the assertion '(arg == "y")'
at /pwd/lang/eval-fail-assert.nix:4:5:
3| arg:
4| assert arg == "y";
| ^
5| 123;
error: string '"x"' is not equal to string '"y"'

View File

@@ -0,0 +1,8 @@
let {
x =
arg:
assert arg == "y";
123;
body = x "x";
}

View File

@@ -0,0 +1,21 @@
error:
… while evaluating the attribute 'puppy."${key}"'
at /pwd/lang/eval-fail-attr-name-type.nix:3:5:
2| attrs = {
3| puppy.doggy = { };
| ^
4| };
… while evaluating an attribute name
at /pwd/lang/eval-fail-attr-name-type.nix:7:15:
6| in
7| attrs.puppy.${key}
| ^
8|
error: expected a string but found an integer: 1
at /pwd/lang/eval-fail-attr-name-type.nix:7:15:
6| in
7| attrs.puppy.${key}
| ^
8|

View File

@@ -0,0 +1,7 @@
let
attrs = {
puppy.doggy = { };
};
key = 1;
in
attrs.puppy.${key}

View File

@@ -0,0 +1,6 @@
error: undefined variable 'd'
at /pwd/lang/eval-fail-attrset-merge-drops-later-rec.nix:4:9:
3| a = rec {
4| c = d + 2;
| ^
5| d = 3;

View File

@@ -0,0 +1,8 @@
{
a.b = 1;
a = rec {
c = d + 2;
d = 3;
};
}
.c

View File

@@ -0,0 +1,8 @@
error:
… while evaluating a path segment
at /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:2:
1| "${x: x}"
| ^
2|
error: cannot coerce a function to a string: «lambda @ /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:4»

View File

@@ -0,0 +1 @@
"${x: x}"

View File

@@ -0,0 +1 @@
error: path '/pwd/lang/fnord' does not exist

View File

@@ -0,0 +1 @@
"${./fnord}"

View File

@@ -0,0 +1,8 @@
error:
… while evaluating a path segment
at /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:3:
1| ''${x: x}''
| ^
2|
error: cannot coerce a function to a string: «lambda @ /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:5»

View File

@@ -0,0 +1 @@
''${x: x}''

View File

@@ -0,0 +1,9 @@
error:
… while evaluating a path segment
at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:19:3:
18| # The error message should not be too long.
19| ''${pkgs}''
| ^
20|
error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «8 attributes elided» }; «8 attributes elided» }; «8 attributes elided» }

View File

@@ -0,0 +1,19 @@
let
# Basically a "billion laughs" attack, but toned down to simulated `pkgs`.
ha = x: y: {
a = x y;
b = x y;
c = x y;
d = x y;
e = x y;
f = x y;
g = x y;
h = x y;
j = x y;
};
has = ha (ha (ha (ha (x: x)))) "ha";
# A large structure that has already been evaluated.
pkgs = builtins.deepSeq has has;
in
# The error message should not be too long.
''${pkgs}''

View File

@@ -0,0 +1,14 @@
error:
… while evaluating the attribute 'body'
at /pwd/lang/eval-fail-blackhole.nix:2:3:
1| let {
2| body = x;
| ^
3| x = y;
error: infinite recursion encountered
at /pwd/lang/eval-fail-blackhole.nix:3:7:
2| body = x;
3| x = y;
| ^
4| y = x;

View File

@@ -0,0 +1,5 @@
let {
body = x;
x = y;
y = x;
}

View File

@@ -0,0 +1,10 @@
error:
… while calling the 'length' builtin
at /pwd/lang/eval-fail-call-primop.nix:1:1:
1| builtins.length 1
| ^
2|
… while evaluating the first argument passed to builtins.length
error: expected a list but found an integer: 1

Some files were not shown because too many files have changed in this diff Show More