feat: better error handling

This commit is contained in:
2026-01-18 19:56:51 +08:00
parent 208b996627
commit e310133421
29 changed files with 1375 additions and 486 deletions

155
Cargo.lock generated
View File

@@ -2,6 +2,15 @@
# It is not intended for manual editing.
version = 4
[[package]]
name = "addr2line"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b"
dependencies = [
"gimli",
]
[[package]]
name = "adler2"
version = "2.0.1"
@@ -79,6 +88,30 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b7e4c2464d97fe331d41de9d5db0def0a96f4d823b8b32a2efd503578988973"
[[package]]
name = "backtrace"
version = "0.3.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6"
dependencies = [
"addr2line",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
"windows-link",
]
[[package]]
name = "backtrace-ext"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
dependencies = [
"backtrace",
]
[[package]]
name = "base64"
version = "0.22.1"
@@ -1125,6 +1158,12 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "gimli"
version = "0.32.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7"
[[package]]
name = "glob"
version = "0.3.3"
@@ -1523,6 +1562,12 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "is_ci"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
[[package]]
name = "is_executable"
version = "1.0.5"
@@ -1735,6 +1780,36 @@ dependencies = [
"autocfg",
]
[[package]]
name = "miette"
version = "7.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7"
dependencies = [
"backtrace",
"backtrace-ext",
"cfg-if",
"miette-derive",
"owo-colors",
"supports-color",
"supports-hyperlinks",
"supports-unicode",
"terminal_size",
"textwrap",
"unicode-width 0.1.14",
]
[[package]]
name = "miette-derive"
version = "7.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "mimalloc"
version = "0.1.48"
@@ -1830,6 +1905,7 @@ name = "nix-js"
version = "0.1.0"
dependencies = [
"anyhow",
"base64",
"bzip2",
"criterion",
"deno_core",
@@ -1840,6 +1916,7 @@ dependencies = [
"hashbrown 0.16.1",
"hex",
"itertools 0.14.0",
"miette",
"mimalloc",
"nix-compat",
"nix-js-macros",
@@ -1848,11 +1925,13 @@ dependencies = [
"regex",
"reqwest",
"rnix",
"rowan",
"rusqlite",
"rustyline",
"serde",
"serde_json",
"sha2",
"sourcemap",
"string-interner",
"tar",
"tempfile",
@@ -1971,6 +2050,15 @@ dependencies = [
"syn",
]
[[package]]
name = "object"
version = "0.37.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.21.3"
@@ -1995,6 +2083,12 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
[[package]]
name = "owo-colors"
version = "4.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
[[package]]
name = "parking_lot"
version = "0.12.5"
@@ -2493,6 +2587,12 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rustc-demangle"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d"
[[package]]
name = "rustc-hash"
version = "1.1.0"
@@ -2598,7 +2698,7 @@ dependencies = [
"nix",
"radix_trie",
"unicode-segmentation",
"unicode-width",
"unicode-width 0.1.14",
"utf8parse",
"windows-sys 0.52.0",
]
@@ -2876,6 +2976,27 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "supports-color"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6"
dependencies = [
"is_ci",
]
[[package]]
name = "supports-hyperlinks"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91"
[[package]]
name = "supports-unicode"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2"
[[package]]
name = "symlink"
version = "0.1.0"
@@ -3007,12 +3128,32 @@ dependencies = [
"writeable",
]
[[package]]
name = "terminal_size"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0"
dependencies = [
"rustix 1.1.3",
"windows-sys 0.60.2",
]
[[package]]
name = "text-size"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
[[package]]
name = "textwrap"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
dependencies = [
"unicode-linebreak",
"unicode-width 0.2.2",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -3327,6 +3468,12 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-linebreak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
@@ -3339,6 +3486,12 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-width"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
[[package]]
name = "unicode-xid"
version = "0.2.6"

View File

@@ -99,6 +99,14 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
match variant {
VariantInput::Unit(name) => {
let inner_type = name.clone();
struct_defs.push(quote! {
#[derive(Debug)]
pub struct #name {
pub span: rnix::TextRange,
}
});
enum_variants.push(quote! { #name(#inner_type) });
ref_variants.push(quote! { #name(&'a #inner_type) });
mut_variants.push(quote! { #name(&'a mut #inner_type) });
@@ -116,14 +124,45 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
});
}
VariantInput::Tuple(name, ty) => {
enum_variants.push(quote! { #name(#ty) });
ref_variants.push(quote! { #name(&'a #ty) });
mut_variants.push(quote! { #name(&'a mut #ty) });
let field_name = format_ident!("inner");
struct_defs.push(quote! {
#[derive(Debug)]
pub struct #name {
pub #field_name: #ty,
pub span: rnix::TextRange,
}
});
let inner_type = name.clone();
enum_variants.push(quote! { #name(#inner_type) });
ref_variants.push(quote! { #name(&'a #inner_type) });
mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
from_impls.push(quote! {
impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
}
VariantInput::Struct(name, fields) => {
});
to_trait_impls.push(quote! {
impl #to_trait_name for #name {
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
}
});
}
VariantInput::Struct(name, mut fields) => {
let inner_type = name.clone();
fields.named.push(syn::Field {
attrs: vec![],
vis: syn::Visibility::Public(syn::token::Pub::default()),
mutability: syn::FieldMutability::None,
ident: Some(format_ident!("span")),
colon_token: Some(syn::token::Colon::default()),
ty: syn::parse_quote!(rnix::TextRange),
});
struct_defs.push(quote! {
#[derive(Debug)]
pub struct #name #fields

View File

@@ -24,6 +24,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] }
derive_more = { version = "2", features = ["full"] }
thiserror = "2"
miette = { version = "7.4", features = ["fancy"] }
hashbrown = "0.16"
petgraph = "0.8"
@@ -40,6 +41,9 @@ nix-nar = "0.3"
sha2 = "0.10"
hex = "0.4"
sourcemap = "9.0"
base64 = "0.22"
# Fetcher dependencies
reqwest = { version = "0.12", features = ["blocking", "rustls-tls"], default-features = false }
tar = "0.4"
@@ -54,6 +58,7 @@ tempfile = "3.24"
rusqlite = { version = "0.33", features = ["bundled"] }
rnix = "0.12"
rowan = "0.15"
nix-js-macros = { path = "../nix-js-macros" }

View File

@@ -360,20 +360,15 @@ export const toFile =
(contentsArg: NixValue): StringWithContext => {
const name = forceString(nameArg);
if (name.includes('/')) {
if (name.includes("/")) {
throw new Error("builtins.toFile: name cannot contain '/'");
}
if (name === '.' || name === '..') {
if (name === "." || name === "..") {
throw new Error("builtins.toFile: invalid name");
}
const context: NixStringContext = new Set();
const contents = coerceToString(
contentsArg,
StringCoercionMode.ToString,
false,
context
);
const contents = coerceToString(contentsArg, StringCoercionMode.ToString, false, context);
const references: string[] = Array.from(context);

View File

@@ -11,6 +11,75 @@ import { force } from "./thunk";
import { mkPath } from "./path";
import { CatchableError, isNixPath } from "./types";
interface StackFrame {
span: string;
message: string;
}
const callStack: StackFrame[] = [];
const MAX_STACK_DEPTH = 1000;
export const STACK_TRACE = { enabled: false };
function enrichError(error: unknown): Error {
const err = error instanceof Error ? error : new Error(String(error));
if (!STACK_TRACE.enabled || callStack.length === 0) {
return err;
}
// Use compact format for easy parsing (no regex needed)
// Format: NIX_STACK_FRAME:context:start:end:message
const nixStackLines = callStack.map((frame) => {
return `NIX_STACK_FRAME:context:${frame.span}:${frame.message}`;
});
// Prepend stack frames to error stack
err.stack = `${nixStackLines.join('\n')}\n${err.stack || ''}`;
return err;
}
/**
* Push an error context onto the stack
* Used for tracking evaluation context (e.g., "while evaluating the condition")
*/
export const pushContext = (message: string, span: string): void => {
if (!STACK_TRACE.enabled) return;
if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift();
}
callStack.push({ span, message });
};
/**
* Pop an error context from the stack
*/
export const popContext = (): void => {
if (!STACK_TRACE.enabled) return;
callStack.pop();
};
/**
* Execute a function with error context tracking
* Automatically pushes context before execution and pops after
*/
export const withContext = <T>(message: string, span: string, fn: () => T): T => {
if (!STACK_TRACE.enabled) {
return fn();
}
pushContext(message, span);
try {
return fn();
} catch (error) {
throw enrichError(error);
} finally {
popContext();
}
};
/**
* Concatenate multiple values into a string or path with context
* This is used for string interpolation like "hello ${world}"
@@ -107,7 +176,29 @@ export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
return mkPath(resolved);
};
export const select = (obj: NixValue, attrpath: NixValue[]): NixValue => {
export const select = (obj: NixValue, attrpath: NixValue[], span?: string): NixValue => {
if (STACK_TRACE.enabled && span) {
const pathStrings = attrpath.map(a => forceString(a));
const path = pathStrings.join('.');
const message = path ? `while selecting attribute [${path}]` : 'while selecting attribute';
if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift();
}
callStack.push({ span, message });
try {
return select_impl(obj, attrpath);
} catch (error) {
throw enrichError(error);
} finally {
callStack.pop();
}
} else {
return select_impl(obj, attrpath);
}
};
function select_impl(obj: NixValue, attrpath: NixValue[]): NixValue {
let attrs = forceAttrs(obj);
for (const attr of attrpath.slice(0, -1)) {
@@ -124,9 +215,31 @@ export const select = (obj: NixValue, attrpath: NixValue[]): NixValue => {
throw new Error(`Attribute '${last}' not found`);
}
return attrs[last];
}
export const selectWithDefault = (obj: NixValue, attrpath: NixValue[], default_val: NixValue, span?: string): NixValue => {
if (STACK_TRACE.enabled && span) {
const pathStrings = attrpath.map(a => forceString(a));
const path = pathStrings.join('.');
const message = path ? `while selecting attribute [${path}]` : 'while selecting attribute';
if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift();
}
callStack.push({ span, message });
try {
return selectWithDefault_impl(obj, attrpath, default_val);
} catch (error) {
throw enrichError(error);
} finally {
callStack.pop();
}
} else {
return selectWithDefault_impl(obj, attrpath, default_val);
}
};
export const selectWithDefault = (obj: NixValue, attrpath: NixValue[], default_val: NixValue): NixValue => {
function selectWithDefault_impl(obj: NixValue, attrpath: NixValue[], default_val: NixValue): NixValue {
let attrs = forceAttrs(obj);
for (const attr of attrpath.slice(0, -1)) {
@@ -146,7 +259,7 @@ export const selectWithDefault = (obj: NixValue, attrpath: NixValue[], default_v
return attrs[last];
}
return default_val;
};
}
export const hasAttr = (obj: NixValue, attrpath: NixValue[]): NixBool => {
if (!isAttrs(obj)) {
@@ -208,7 +321,25 @@ export const validateParams = (
return forced_arg;
};
export const call = (func: NixValue, arg: NixValue): NixValue => {
export const call = (func: NixValue, arg: NixValue, span?: string): NixValue => {
if (STACK_TRACE.enabled && span) {
if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift();
}
callStack.push({ span, message: 'from call site' });
try {
return call_impl(func, arg);
} catch (error) {
throw enrichError(error);
} finally {
callStack.pop();
}
} else {
return call_impl(func, arg);
}
};
function call_impl(func: NixValue, arg: NixValue): NixValue {
const forcedFunc = force(func);
if (typeof forcedFunc === "function") {
return forcedFunc(arg);
@@ -223,7 +354,7 @@ export const call = (func: NixValue, arg: NixValue): NixValue => {
return forceFunction(functor(forcedFunc))(arg);
}
throw new Error(`attempt to call something which is not a function but ${typeName(forcedFunc)}`);
};
}
export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string): NixValue => {
if (forceBool(assertion)) {
@@ -231,3 +362,10 @@ export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string
}
throw new CatchableError(`assertion '${assertionRaw}' failed`);
};
export const ifFunc = (cond: NixValue, consq: NixValue, alter: NixValue) => {
if (forceBool(cond)) {
return consq;
}
return alter;
};

View File

@@ -14,6 +14,10 @@ import {
concatStringsWithContext,
call,
assert,
STACK_TRACE,
pushContext,
popContext,
withContext,
} from "./helpers";
import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins";
@@ -34,6 +38,7 @@ export const Nix = {
HAS_CONTEXT,
IS_PATH,
DEBUG_THUNKS,
STACK_TRACE,
assert,
call,
@@ -46,6 +51,10 @@ export const Nix = {
concatStringsWithContext,
StringCoercionMode,
pushContext,
popContext,
withContext,
op,
builtins,
PRIMOP_METADATA,

View File

@@ -18,8 +18,8 @@ fn main() -> Result<()> {
Ok(())
}
Err(err) => {
eprintln!("Error: {err}");
Err(anyhow::anyhow!("{err}"))
eprintln!("{:?}", miette::Report::new(err));
exit(1);
}
}
}

View File

@@ -33,7 +33,7 @@ fn main() -> Result<()> {
} else {
match context.eval_code(&line) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("Error: {err}"),
Err(err) => eprintln!("{:?}", miette::Report::new(err)),
}
}
}

View File

@@ -6,13 +6,25 @@ use crate::ir::*;
pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
let code = expr.compile(ctx);
let debug_prefix = if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
"Nix.DEBUG_THUNKS.enabled=true;"
let mut debug_flags = Vec::new();
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
debug_flags.push("Nix.DEBUG_THUNKS.enabled=true");
}
if std::env::var("NIX_JS_STACK_TRACE").is_ok() {
debug_flags.push("Nix.STACK_TRACE.enabled=true");
}
let debug_prefix = if debug_flags.is_empty() {
String::new()
} else {
""
format!("{};", debug_flags.join(";"))
};
let cur_dir = ctx.get_current_dir().display().to_string().escape_quote();
format!("(()=>{{{}const currentDir={};return {}}})()", debug_prefix, cur_dir, code)
format!(
"(()=>{{{}const currentDir={};return {}}})()",
debug_prefix, cur_dir, code
)
}
trait Compile<Ctx: CodegenContext> {
@@ -48,12 +60,16 @@ impl EscapeQuote for str {
}
}
fn encode_span(span: rnix::TextRange) -> String {
format!("\"{}:{}\"", usize::from(span.start()), usize::from(span.end()))
}
impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
fn compile(&self, ctx: &Ctx) -> String {
match self {
Ir::Int(int) => format!("{int}n"), // Generate BigInt literal
Ir::Float(float) => float.to_string(),
Ir::Bool(bool) => bool.to_string(),
Ir::Int(int) => format!("{}n", int.inner), // Generate BigInt literal
Ir::Float(float) => float.inner.to_string(),
Ir::Bool(bool) => bool.inner.to_string(),
Ir::Null(_) => "null".to_string(),
Ir::Str(s) => s.val.escape_quote(),
Ir::Path(p) => {
@@ -61,11 +77,23 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
let path_expr = ctx.get_ir(p.expr).compile(ctx);
format!("Nix.resolvePath(currentDir,{})", path_expr)
}
&Ir::If(If { cond, consq, alter }) => {
let cond = ctx.get_ir(cond).compile(ctx);
&Ir::If(If {
cond, consq, alter, span
}) => {
let cond_code = ctx.get_ir(cond).compile(ctx);
let consq = ctx.get_ir(consq).compile(ctx);
let alter = ctx.get_ir(alter).compile(ctx);
format!("({cond})?({consq}):({alter})")
// Only add context tracking if STACK_TRACE is enabled
if std::env::var("NIX_JS_STACK_TRACE").is_ok() {
let cond_span = encode_span(ctx.get_ir(cond).span());
format!(
"(Nix.withContext(\"while evaluating a branch condition\",{},()=>({})))?({}):({})",
cond_span, cond_code, consq, alter
)
} else {
format!("({cond_code})?({consq}):({alter})")
}
}
Ir::BinOp(x) => x.compile(ctx),
Ir::UnOp(x) => x.compile(ctx),
@@ -73,18 +101,18 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
Ir::AttrSet(x) => x.compile(ctx),
Ir::List(x) => x.compile(ctx),
Ir::Call(x) => x.compile(ctx),
Ir::Arg(x) => format!("arg{}", x.0),
Ir::Arg(x) => format!("arg{}", x.inner.0),
Ir::Let(x) => x.compile(ctx),
Ir::Select(x) => x.compile(ctx),
&Ir::Thunk(expr_id) => {
&Ir::Thunk(Thunk { inner: expr_id, .. }) => {
let inner = ctx.get_ir(expr_id).compile(ctx);
format!("Nix.createThunk(()=>({}),\"expr{}\")", inner, expr_id.0)
}
&Ir::ExprRef(expr_id) => {
&Ir::ExprRef(ExprRef { inner: expr_id, .. }) => {
format!("expr{}", expr_id.0)
}
Ir::Builtins(_) => "Nix.builtins".to_string(),
&Ir::Builtin(Builtin(name)) => {
&Ir::Builtin(Builtin { inner: name, .. }) => {
format!("Nix.builtins[{}]", ctx.get_sym(name).escape_quote())
}
Ir::ConcatStrings(x) => x.compile(ctx),
@@ -93,13 +121,24 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
assertion,
expr,
ref assertion_raw,
span,
}) => {
let assertion = ctx.get_ir(assertion).compile(ctx);
let assertion_code = ctx.get_ir(assertion).compile(ctx);
let expr = ctx.get_ir(expr).compile(ctx);
// Only add context tracking if STACK_TRACE is enabled
if std::env::var("NIX_JS_STACK_TRACE").is_ok() {
let assertion_span = encode_span(ctx.get_ir(assertion).span());
format!(
"Nix.assert({assertion},{expr},{})",
assertion_raw.escape_quote()
"Nix.assert(Nix.withContext(\"while evaluating the condition of the assert statement\",{},()=>({})),{},{})",
assertion_span, assertion_code, expr, assertion_raw.escape_quote()
)
} else {
format!(
"Nix.assert({},{},{})",
assertion_code, expr, assertion_raw.escape_quote()
)
}
}
}
}
@@ -108,25 +147,43 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
fn compile(&self, ctx: &Ctx) -> String {
use BinOpKind::*;
let lhs = ctx.get_ir(self.lhs).compile(ctx);
let rhs = ctx.get_ir(self.rhs).compile(ctx);
// Only add context tracking if STACK_TRACE is enabled
let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok();
// Helper to wrap operation with context (only if enabled)
let with_ctx = |op_name: &str, op_call: String| {
if stack_trace_enabled {
let span = encode_span(self.span);
format!(
"Nix.withContext(\"while evaluating the {} operator\",{},()=>({}))",
op_name, span, op_call
)
} else {
op_call
}
};
match self.kind {
Add => format!("Nix.op.add({},{})", lhs, rhs),
Sub => format!("Nix.op.sub({},{})", lhs, rhs),
Mul => format!("Nix.op.mul({},{})", lhs, rhs),
Div => format!("Nix.op.div({},{})", lhs, rhs),
Eq => format!("Nix.op.eq({},{})", lhs, rhs),
Neq => format!("Nix.op.neq({},{})", lhs, rhs),
Lt => format!("Nix.op.lt({},{})", lhs, rhs),
Gt => format!("Nix.op.gt({},{})", lhs, rhs),
Leq => format!("Nix.op.lte({},{})", lhs, rhs),
Geq => format!("Nix.op.gte({},{})", lhs, rhs),
Add => with_ctx("+", format!("Nix.op.add({},{})", lhs, rhs)),
Sub => with_ctx("-", format!("Nix.op.sub({},{})", lhs, rhs)),
Mul => with_ctx("*", format!("Nix.op.mul({},{})", lhs, rhs)),
Div => with_ctx("/", format!("Nix.op.div({},{})", lhs, rhs)),
Eq => with_ctx("==", format!("Nix.op.eq({},{})", lhs, rhs)),
Neq => with_ctx("!=", format!("Nix.op.neq({},{})", lhs, rhs)),
Lt => with_ctx("<", format!("Nix.op.lt({},{})", lhs, rhs)),
Gt => with_ctx(">", format!("Nix.op.gt({},{})", lhs, rhs)),
Leq => with_ctx("<=", format!("Nix.op.lte({},{})", lhs, rhs)),
Geq => with_ctx(">=", format!("Nix.op.gte({},{})", lhs, rhs)),
// Short-circuit operators: use JavaScript native && and ||
And => format!("Nix.force({})&&Nix.force({})", lhs, rhs),
Or => format!("Nix.force({})||Nix.force({})", lhs, rhs),
Impl => format!("(!Nix.force({})||Nix.force({}))", lhs, rhs),
Con => format!("Nix.op.concat({},{})", lhs, rhs),
Upd => format!("Nix.op.update({},{})", lhs, rhs),
And => with_ctx("&&", format!("Nix.force({})&&Nix.force({})", lhs, rhs)),
Or => with_ctx("||", format!("Nix.force({})||Nix.force({})", lhs, rhs)),
Impl => with_ctx("->", format!("(!Nix.force({})||Nix.force({}))", lhs, rhs)),
Con => with_ctx("++", format!("Nix.op.concat({},{})", lhs, rhs)),
Upd => with_ctx("//", format!("Nix.op.update({},{})", lhs, rhs)),
PipeL => format!("Nix.call({},{})", rhs, lhs),
PipeR => format!("Nix.call({},{})", lhs, rhs),
}
@@ -146,7 +203,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for UnOp {
impl<Ctx: CodegenContext> Compile<Ctx> for Func {
fn compile(&self, ctx: &Ctx) -> String {
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().0;
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().inner.0;
let body = ctx.get_ir(self.body).compile(ctx);
// Generate parameter validation code
@@ -170,7 +227,7 @@ impl Func {
return String::new();
}
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().0;
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().inner.0;
// Build required parameter array
let required = if let Some(req) = &self.param.required {
@@ -203,7 +260,8 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Call {
fn compile(&self, ctx: &Ctx) -> String {
let func = ctx.get_ir(self.func).compile(ctx);
let arg = ctx.get_ir(self.arg).compile(ctx);
format!("Nix.call({func},{arg})")
let span_str = encode_span(self.span);
format!("Nix.call({func},{arg},{span_str})")
}
}
@@ -222,7 +280,7 @@ fn should_keep_thunk(ir: &Ir) -> bool {
}
fn unwrap_thunk(ir: &Ir, ctx: &impl CodegenContext) -> String {
if let Ir::Thunk(inner) = ir {
if let Ir::Thunk(Thunk { inner, .. }) = ir {
let inner_ir = ctx.get_ir(*inner);
if should_keep_thunk(inner_ir) {
ir.compile(ctx)
@@ -266,13 +324,14 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
Attr::Dynamic(expr_id) => ctx.get_ir(*expr_id).compile(ctx),
})
.join(",");
let span_str = encode_span(self.span);
if let Some(default) = self.default {
format!(
"Nix.selectWithDefault({lhs},[{attrpath}],{})",
"Nix.selectWithDefault({lhs},[{attrpath}],{},{span_str})",
ctx.get_ir(default).compile(ctx)
)
} else {
format!("Nix.select({lhs},[{attrpath}])")
format!("Nix.select({lhs},[{attrpath}],{span_str})")
}
}
}
@@ -280,16 +339,38 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
fn compile(&self, ctx: &Ctx) -> String {
let mut attrs = Vec::new();
let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok();
for (&sym, &expr) in &self.stcs {
let key = ctx.get_sym(sym);
let value = ctx.get_ir(expr).compile(ctx);
let value_code = ctx.get_ir(expr).compile(ctx);
let value = if stack_trace_enabled {
let value_span = encode_span(ctx.get_ir(expr).span());
format!(
"Nix.withContext(\"while evaluating the attribute '{}'\",{},()=>({}))",
key, value_span, value_code
)
} else {
value_code
};
attrs.push(format!("{}:{}", key.escape_quote(), value));
}
// FIXME: duplicated key
for (key_expr, value_expr) in &self.dyns {
let key = ctx.get_ir(*key_expr).compile(ctx);
let value = ctx.get_ir(*value_expr).compile(ctx);
let value_code = ctx.get_ir(*value_expr).compile(ctx);
let value = if stack_trace_enabled {
let value_span = encode_span(ctx.get_ir(*value_expr).span());
format!(
"Nix.withContext(\"while evaluating a dynamic attribute\",{},()=>({}))",
value_span, value_code
)
} else {
value_code
};
attrs.push(format!("[{}]:{}", key, value));
}
@@ -299,10 +380,24 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
impl<Ctx: CodegenContext> Compile<Ctx> for List {
fn compile(&self, ctx: &Ctx) -> String {
let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok();
let list = self
.items
.iter()
.map(|item| ctx.get_ir(*item).compile(ctx))
.enumerate()
.map(|(idx, item)| {
let item_code = ctx.get_ir(*item).compile(ctx);
if stack_trace_enabled {
let item_span = encode_span(ctx.get_ir(*item).span());
format!(
"Nix.withContext(\"while evaluating list element {}\",{},()=>({}))",
idx, item_span, item_code
)
} else {
item_code
}
})
.join(",");
format!("[{list}]")
}
@@ -310,10 +405,24 @@ impl<Ctx: CodegenContext> Compile<Ctx> for List {
impl<Ctx: CodegenContext> Compile<Ctx> for ConcatStrings {
fn compile(&self, ctx: &Ctx) -> String {
let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok();
let parts: Vec<String> = self
.parts
.iter()
.map(|part| ctx.get_ir(*part).compile(ctx))
.enumerate()
.map(|(_idx, part)| {
let part_code = ctx.get_ir(*part).compile(ctx);
if stack_trace_enabled {
let part_span = encode_span(ctx.get_ir(*part).span());
format!(
"Nix.withContext(\"while evaluating a path segment\",{},()=>({}))",
part_span, part_code
)
} else {
part_code
}
})
.collect();
format!("Nix.concatStringsWithContext([{}])", parts.join(","))

View File

@@ -4,12 +4,17 @@ use std::ptr::NonNull;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use petgraph::graphmap::DiGraphMap;
use rnix::TextRange;
use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, compile};
use crate::error::{Error, Result};
use crate::ir::{ArgId, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, SymId, ToIr as _};
use crate::ir::{
Arg, ArgId, Bool, Builtin, Downgrade as _, DowngradeContext, ExprId, ExprRef, Ir, Null, SymId,
ToIr as _, synthetic_span,
};
use crate::runtime::{Runtime, RuntimeContext};
use crate::sourcemap::NixSourceMapBuilder;
use crate::store::{StoreBackend, StoreConfig};
use crate::value::Value;
use std::sync::Arc;
@@ -43,6 +48,9 @@ mod private {
fn compile_code(&mut self, expr: &str) -> Result<String> {
self.as_mut().compile_code(expr)
}
fn get_current_source(&self) -> Option<Arc<str>> {
self.as_ref().get_current_source()
}
}
}
use private::CtxPtr;
@@ -67,7 +75,11 @@ impl Context {
let config = StoreConfig::from_env();
let store = Arc::new(StoreBackend::new(config)?);
Ok(Self { ctx, runtime, store })
Ok(Self {
ctx,
runtime,
store,
})
}
pub fn eval_code(&mut self, expr: &str) -> Result<Value> {
@@ -83,10 +95,7 @@ impl Context {
tracing::debug!("Compiling code");
let code = self.compile_code(expr)?;
self.runtime
.op_state()
.borrow_mut()
.put(self.store.clone());
self.runtime.op_state().borrow_mut().put(self.store.clone());
tracing::debug!("Executing JavaScript");
self.runtime
@@ -112,6 +121,9 @@ pub(crate) struct Ctx {
symbols: DefaultStringInterner,
global: NonNull<HashMap<SymId, ExprId>>,
current_file: Option<PathBuf>,
source_map: HashMap<PathBuf, Arc<str>>,
current_source: Option<Arc<str>>,
js_source_maps: HashMap<PathBuf, sourcemap::SourceMap>,
}
impl Default for Ctx {
@@ -122,7 +134,12 @@ impl Default for Ctx {
let mut irs = Vec::new();
let mut global = HashMap::new();
irs.push(Builtins.to_ir());
irs.push(
Builtins {
span: synthetic_span(),
}
.to_ir(),
);
let builtins_expr = ExprId(0);
let builtins_sym = symbols.get_or_intern("builtins");
@@ -150,15 +167,41 @@ impl Default for Ctx {
"toString",
];
let consts = [
("true", Ir::Bool(true)),
("false", Ir::Bool(false)),
("null", Ir::Null(())),
(
"true",
Bool {
inner: true,
span: synthetic_span(),
}
.to_ir(),
),
(
"false",
Bool {
inner: false,
span: synthetic_span(),
}
.to_ir(),
),
(
"null",
Null {
span: synthetic_span(),
}
.to_ir(),
),
];
for name in free_globals {
let name_sym = symbols.get_or_intern(name);
let id = ExprId(irs.len());
irs.push(Builtin(name_sym).to_ir());
irs.push(
Builtin {
inner: name_sym,
span: synthetic_span(),
}
.to_ir(),
);
global.insert(name_sym, id);
}
for (name, value) in consts {
@@ -173,6 +216,9 @@ impl Default for Ctx {
irs,
global: unsafe { NonNull::new_unchecked(Box::leak(Box::new(global))) },
current_file: None,
source_map: HashMap::new(),
current_source: None,
js_source_maps: HashMap::new(),
}
}
}
@@ -195,11 +241,27 @@ impl Ctx {
.expect("current_file doesn't have a parent dir")
}
pub(crate) fn get_current_source(&self) -> Option<Arc<str>> {
self.current_source.clone()
}
fn compile_code(&mut self, expr: &str) -> Result<String> {
tracing::debug!("Parsing Nix expression");
// Store source text for error reporting
let source: Arc<str> = Arc::from(expr);
self.current_source = Some(source.clone());
// Store source in source_map if we have a current_file
if let Some(ref file) = self.current_file {
self.source_map.insert(file.clone(), source.clone());
}
let root = rnix::Root::parse(expr);
if !root.errors().is_empty() {
return Err(Error::parse_error(root.errors().iter().join("; ")));
let error_msg = root.errors().iter().join("; ");
let err = Error::parse_error(error_msg).with_source(source);
return Err(err);
}
#[allow(clippy::unwrap_used)]
@@ -283,8 +345,14 @@ impl DowngradeContext for DowngradeCtx<'_> {
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_arg(&mut self) -> ExprId {
self.irs.push(Some(Ir::Arg(ArgId(self.arg_id))));
fn new_arg(&mut self, span: TextRange) -> ExprId {
self.irs.push(Some(
Arg {
inner: ArgId(self.arg_id),
span,
}
.to_ir(),
));
self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
@@ -297,7 +365,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
self.ctx.get_sym(id)
}
fn lookup(&mut self, sym: SymId) -> Result<ExprId> {
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId> {
for scope in self.scopes.iter().rev() {
match scope {
&Scope::Global(global_scope) => {
@@ -345,7 +413,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
}
}
return Ok(self.new_expr(Ir::ExprRef(expr)));
return Ok(self.new_expr(ExprRef { inner: expr, span }.to_ir()));
}
}
&Scope::Param(param_sym, expr) => {
@@ -375,10 +443,15 @@ impl DowngradeContext for DowngradeCtx<'_> {
expr: namespace,
attrpath: vec![Attr::Str(sym)],
default: result, // Link to outer With or None
span,
};
result = Some(self.new_expr(select.to_ir()));
}
result.ok_or_else(|| Error::downgrade_error(format!("'{}' not found", self.get_sym(sym))))
result.ok_or_else(|| {
Error::downgrade_error(format!("'{}' not found", self.get_sym(sym)))
.with_span(span)
.with_source(self.get_current_source().unwrap_or_else(|| Arc::from("")))
})
}
fn extract_expr(&mut self, id: ExprId) -> Ir {
@@ -399,6 +472,19 @@ impl DowngradeContext for DowngradeCtx<'_> {
.insert(expr);
}
fn get_span(&self, id: ExprId) -> rnix::TextRange {
dbg!(id);
if id.0 >= self.ctx.irs.len() {
return self.ctx.irs.get(id.0).unwrap().span()
}
let local_id = id.0 - self.ctx.irs.len();
self.irs.get(local_id).unwrap().as_ref().unwrap().span()
}
fn get_current_source(&self) -> Option<Arc<str>> {
self.ctx.current_source.clone()
}
#[allow(refining_impl_trait)]
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();

View File

@@ -1,135 +1,256 @@
use miette::{Diagnostic, LabeledSpan, SourceSpan};
use std::sync::Arc;
use thiserror::Error;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Error, Debug)]
pub enum ErrorKind {
#[error("error occurred during parse stage: {0}")]
ParseError(String),
#[error("error occurred during downgrade stage: {0}")]
DowngradeError(String),
#[error(
"error occurred during evaluation stage: {msg}{}",
backtrace.as_ref().map_or("".into(), |backtrace| format!("\nBacktrace: {backtrace}"))
)]
EvalError {
msg: String,
backtrace: Option<String>,
#[derive(Error, Debug, Diagnostic)]
pub enum Error {
#[error("Parse error: {message}")]
#[diagnostic(code(nix::parse))]
ParseError {
#[source_code]
src: Option<Arc<str>>,
#[label("error occurred here")]
span: Option<SourceSpan>,
message: String,
},
#[error("internal error occurred: {0}")]
InternalError(String),
#[error("{0}")]
Catchable(String),
#[error("an unknown or unexpected error occurred")]
#[error("Downgrade error: {message}")]
#[diagnostic(code(nix::downgrade))]
DowngradeError {
#[source_code]
src: Option<Arc<str>>,
#[label("{message}")]
span: Option<SourceSpan>,
message: String,
// #[related]
// related: Vec<LabeledSpan>,
},
#[error("Evaluation error: {message}")]
#[diagnostic(code(nix::eval))]
EvalError {
#[source_code]
src: Option<Arc<str>>,
#[label("error occurred here")]
span: Option<SourceSpan>,
message: String,
// #[help]
js_backtrace: Option<String>,
},
#[error("Internal error: {message}")]
#[diagnostic(code(nix::internal))]
InternalError { message: String },
#[error("{message}")]
#[diagnostic(code(nix::catchable))]
Catchable { message: String },
#[error("Unknown error")]
#[diagnostic(code(nix::unknown))]
Unknown,
}
#[derive(Debug)]
pub struct Error {
pub kind: ErrorKind,
pub span: Option<rnix::TextRange>,
pub source: Option<Arc<str>>,
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// Basic display
write!(f, "{}", self.kind)?;
// If we have source and span, print context
if let (Some(source), Some(span)) = (&self.source, self.span) {
let start_byte = usize::from(span.start());
let end_byte = usize::from(span.end());
if start_byte > source.len() || end_byte > source.len() {
return Ok(()); // Span is out of bounds
}
let mut start_line = 1;
let mut start_col = 1usize;
let mut line_start_byte = 0;
for (i, c) in source.char_indices() {
if i >= start_byte {
break;
}
if c == '\n' {
start_line += 1;
start_col = 1;
line_start_byte = i + 1;
} else {
start_col += 1;
impl Error {
pub fn parse_error(msg: String) -> Self {
Error::ParseError {
src: None,
span: None,
message: msg,
}
}
let line_end_byte = source[line_start_byte..]
.find('\n')
.map(|i| line_start_byte + i)
.unwrap_or(source.len());
pub fn downgrade_error(msg: String) -> Self {
Error::DowngradeError {
src: None,
span: None,
message: msg,
// related: Vec::new(),
}
}
let line_str = &source[line_start_byte..line_end_byte];
// pub fn downgrade_error_with_related(msg: String, related: Vec<LabeledSpan>) -> Self {
// Error::DowngradeError {
// src: None,
// span: None,
// message: msg,
// related,
// }
// }
let underline_len = if end_byte > start_byte {
end_byte - start_byte
} else {
1
pub fn eval_error(msg: String, backtrace: Option<String>) -> Self {
Error::EvalError {
src: None,
span: None,
message: msg,
js_backtrace: backtrace,
}
}
pub fn internal(msg: String) -> Self {
Error::InternalError { message: msg }
}
pub fn catchable(msg: String) -> Self {
Error::Catchable { message: msg }
}
pub fn unknown() -> Self {
Error::Unknown
}
pub fn with_span(self, span: rnix::TextRange) -> Self {
let source_span = Some(text_range_to_source_span(span));
match self {
Error::ParseError { src, message, .. } => Error::ParseError {
src,
span: source_span,
message,
},
Error::DowngradeError {
src,
message,
// related,
..
} => Error::DowngradeError {
src,
span: source_span,
message,
// related,
},
Error::EvalError {
src,
message,
js_backtrace,
..
} => Error::EvalError {
src,
span: source_span,
message,
js_backtrace,
},
other => other,
}
}
pub fn with_source(self, source: Arc<str>) -> Self {
let src = Some(source);
match self {
Error::ParseError { span, message, .. } => Error::ParseError { src, span, message },
Error::DowngradeError {
span,
message,
// related,
..
} => Error::DowngradeError {
src,
span,
message,
// related,
},
Error::EvalError {
span,
message,
js_backtrace,
..
} => Error::EvalError {
src,
span,
message,
js_backtrace,
},
other => other,
}
}
}
pub fn text_range_to_source_span(range: rnix::TextRange) -> SourceSpan {
let start = usize::from(range.start());
let len = usize::from(range.end()) - start;
SourceSpan::new(start.into(), len)
}
/// Stack frame types from Nix evaluation
#[derive(Debug, Clone)]
pub(crate) struct NixStackFrame {
pub span: rnix::TextRange,
pub message: String,
}
/// Parse Nix stack trace from V8 Error.stack
/// Returns vector of stack frames (in order from oldest to newest)
pub(crate) fn parse_nix_stack(stack: &str) -> Vec<NixStackFrame> {
let mut frames = Vec::new();
for line in stack.lines() {
if !line.starts_with("NIX_STACK_FRAME:") {
continue;
}
// Format: NIX_STACK_FRAME:type:start:end[:extra_data]
let rest = line.strip_prefix("NIX_STACK_FRAME:").unwrap();
let parts: Vec<&str> = rest.splitn(4, ':').collect();
if parts.len() < 3 {
continue;
}
let frame_type = parts[0];
let start: u32 = match parts[1].parse() {
Ok(v) => v,
Err(_) => continue,
};
let end: u32 = match parts[2].parse() {
Ok(v) => v,
Err(_) => continue,
};
write!(f, "\n --> {}:{}", start_line, start_col)?;
write!(f, "\n |\n")?;
writeln!(f, "{:4} | {}", start_line, line_str)?;
write!(
f,
" | {}{}",
" ".repeat(start_col.saturating_sub(1)),
"^".repeat(underline_len)
)?;
let span = rnix::TextRange::new(
rnix::TextSize::from(start),
rnix::TextSize::from(end)
);
// Convert all frame types to context frames with descriptive messages
let message = match frame_type {
"call" => "from call site".to_string(),
"select" => {
let path = if parts.len() >= 4 { parts[3] } else { "" };
if path.is_empty() {
"while selecting attribute".to_string()
} else {
format!("while selecting attribute [{}]", path)
}
Ok(())
}
"context" => {
if parts.len() >= 4 {
parts[3].to_string()
} else {
String::new()
}
}
_ => continue,
};
frames.push(NixStackFrame { span, message });
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
Some(&self.kind)
}
// Deduplicate consecutive identical frames
frames.dedup_by(|a, b| a.span == b.span && a.message == b.message);
frames
}
impl Error {
pub fn new(kind: ErrorKind) -> Self {
Self {
kind,
span: None,
source: None,
}
/// Format stack trace for display (reversed order, newest at bottom)
pub(crate) fn format_stack_trace(frames: &[NixStackFrame]) -> Vec<String> {
let mut lines = Vec::new();
// Reverse order: oldest first, newest last
for frame in frames.iter().rev() {
lines.push(format!("{} at {}:{}",
frame.message, usize::from(frame.span.start()), usize::from(frame.span.end())));
}
pub fn with_span(mut self, span: rnix::TextRange) -> Self {
self.span = Some(span);
self
}
pub fn with_source(mut self, source: Arc<str>) -> Self {
self.source = Some(source);
self
}
pub fn parse_error(msg: String) -> Self {
Self::new(ErrorKind::ParseError(msg))
}
pub fn downgrade_error(msg: String) -> Self {
Self::new(ErrorKind::DowngradeError(msg))
}
pub fn eval_error(msg: String, backtrace: Option<String>) -> Self {
Self::new(ErrorKind::EvalError { msg, backtrace })
}
pub fn internal(msg: String) -> Self {
Self::new(ErrorKind::InternalError(msg))
}
pub fn catchable(msg: String) -> Self {
Self::new(ErrorKind::Catchable(msg))
}
pub fn unknown() -> Self {
Self::new(ErrorKind::Unknown)
}
lines
}

View File

@@ -1,5 +1,5 @@
use deno_core::op2;
use deno_core::OpState;
use deno_core::op2;
use serde::Serialize;
use tracing::{debug, info, warn};
@@ -14,8 +14,8 @@ pub use cache::FetcherCache;
pub use download::Downloader;
pub use metadata_cache::MetadataCache;
use crate::runtime::NixError;
use crate::nar;
use crate::runtime::NixError;
#[derive(Serialize)]
pub struct FetchUrlResult {
@@ -69,8 +69,7 @@ pub fn op_fetch_url(
let file_name =
name.unwrap_or_else(|| url.rsplit('/').next().unwrap_or("download").to_string());
let metadata_cache =
MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let metadata_cache = MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let input = serde_json::json!({
"type": "file",
@@ -156,10 +155,7 @@ pub fn op_fetch_url(
.add(&input, &info, &store_path, true)
.map_err(|e| NixError::from(e.to_string()))?;
Ok(FetchUrlResult {
store_path,
hash,
})
Ok(FetchUrlResult { store_path, hash })
}
#[op2]
@@ -178,8 +174,7 @@ pub fn op_fetch_tarball(
info!("fetchTarball started");
let dir_name = name.unwrap_or_else(|| "source".to_string());
let metadata_cache =
MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let metadata_cache = MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let input = serde_json::json!({
"type": "tarball",

View File

@@ -78,7 +78,10 @@ impl FetcherCache {
self.hg_cache_dir().join(key)
}
pub fn extract_tarball_to_temp(&self, data: &[u8]) -> Result<(PathBuf, tempfile::TempDir), CacheError> {
pub fn extract_tarball_to_temp(
&self,
data: &[u8],
) -> Result<(PathBuf, tempfile::TempDir), CacheError> {
let temp_dir = tempfile::tempdir()?;
let extracted_path = super::archive::extract_archive(data, temp_dir.path())?;
Ok((extracted_path, temp_dir))

View File

@@ -34,7 +34,8 @@ pub fn fetch_git(
let nar_hash = crate::nar::compute_nar_hash(&checkout_dir)
.map_err(|e| GitError::NarHashError(e.to_string()))?;
let store_path = store.add_to_store_from_path(name, &checkout_dir, vec![])
let store_path = store
.add_to_store_from_path(name, &checkout_dir, vec![])
.map_err(|e| GitError::StoreError(e.to_string()))?;
let rev_count = get_rev_count(&bare_repo, &target_rev)?;

View File

@@ -1,4 +1,4 @@
use rusqlite::{params, Connection, OptionalExtension};
use rusqlite::{Connection, OptionalExtension, params};
use serde::{Deserialize, Serialize};
use serde_json;
use std::path::PathBuf;
@@ -72,8 +72,9 @@ impl MetadataCache {
.unwrap_or_else(|| PathBuf::from("/tmp"))
.join("nix-js");
std::fs::create_dir_all(&cache_dir)
.map_err(|e| CacheError::Database(rusqlite::Error::ToSqlConversionFailure(Box::new(e))))?;
std::fs::create_dir_all(&cache_dir).map_err(|e| {
CacheError::Database(rusqlite::Error::ToSqlConversionFailure(Box::new(e)))
})?;
let db_path = cache_dir.join("fetcher-cache.sqlite");
let conn = Connection::open(db_path)?;
@@ -156,15 +157,15 @@ impl MetadataCache {
.optional()?;
match entry {
Some((input_json, info_json, store_path, immutable, timestamp)) => Ok(Some(
CacheEntry {
Some((input_json, info_json, store_path, immutable, timestamp)) => {
Ok(Some(CacheEntry {
input: serde_json::from_str(&input_json)?,
info: serde_json::from_str(&info_json)?,
store_path,
immutable: immutable != 0,
timestamp: timestamp as u64,
},
)),
}))
}
None => Ok(None),
}
}

View File

@@ -1,6 +1,7 @@
use derive_more::{IsVariant, TryUnwrap, Unwrap};
use hashbrown::{HashMap, HashSet};
use rnix::ast;
use rnix::{TextRange, ast};
use std::sync::Arc;
use string_interner::symbol::SymbolU32;
use crate::context::SccInfo;
@@ -9,24 +10,29 @@ use crate::value::format_symbol;
use nix_js_macros::ir;
mod downgrade;
mod span_utils;
mod utils;
use utils::*;
pub use downgrade::Downgrade;
pub(crate) use span_utils::*;
pub trait DowngradeContext {
fn downgrade(self, expr: rnix::ast::Expr) -> Result<ExprId>;
fn new_expr(&mut self, expr: Ir) -> ExprId;
fn new_arg(&mut self) -> ExprId;
fn new_arg(&mut self, span: TextRange) -> ExprId;
fn new_sym(&mut self, sym: String) -> SymId;
fn get_sym(&self, id: SymId) -> &str;
fn lookup(&mut self, sym: SymId) -> Result<ExprId>;
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId>;
fn extract_expr(&mut self, id: ExprId) -> Ir;
fn replace_expr(&mut self, id: ExprId, expr: Ir);
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>;
fn get_span(&self, id: ExprId) -> TextRange;
fn get_current_source(&self) -> Option<Arc<str>>;
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
@@ -51,27 +57,57 @@ ir! {
Int(i64),
Float(f64),
Bool(bool),
Null(()),
Str,
AttrSet,
List,
Null,
Str { pub val: String },
AttrSet { pub stcs: HashMap<SymId, ExprId>, pub dyns: Vec<(ExprId, ExprId)> },
List { pub items: Vec<ExprId> },
HasAttr,
BinOp,
UnOp,
Select,
If,
Call,
Assert,
ConcatStrings,
Path,
Func,
Let,
HasAttr { pub lhs: ExprId, pub rhs: Vec<Attr> },
BinOp { pub lhs: ExprId, pub rhs: ExprId, pub kind: BinOpKind },
UnOp { pub rhs: ExprId, pub kind: UnOpKind },
Select { pub expr: ExprId, pub attrpath: Vec<Attr>, pub default: Option<ExprId> },
If { pub cond: ExprId, pub consq: ExprId, pub alter: ExprId },
Call { pub func: ExprId, pub arg: ExprId },
Assert { pub assertion: ExprId, pub expr: ExprId, pub assertion_raw: String },
ConcatStrings { pub parts: Vec<ExprId> },
Path { pub expr: ExprId },
Func { pub body: ExprId, pub param: Param, pub arg: ExprId },
Let { pub binding_sccs: SccInfo, pub body: ExprId },
Arg(ArgId),
ExprRef(ExprId),
Thunk(ExprId),
Builtins,
Builtin,
Builtin(SymId),
}
impl Ir {
pub fn span(&self) -> TextRange {
match self {
Ir::Int(i) => i.span,
Ir::Float(f) => f.span,
Ir::Bool(b) => b.span,
Ir::Null(n) => n.span,
Ir::Str(s) => s.span,
Ir::AttrSet(a) => a.span,
Ir::List(l) => l.span,
Ir::HasAttr(h) => h.span,
Ir::BinOp(b) => b.span,
Ir::UnOp(u) => u.span,
Ir::Select(s) => s.span,
Ir::If(i) => i.span,
Ir::Call(c) => c.span,
Ir::Assert(a) => a.span,
Ir::ConcatStrings(c) => c.span,
Ir::Path(p) => p.span,
Ir::Func(f) => f.span,
Ir::Let(l) => l.span,
Ir::Arg(a) => a.span,
Ir::ExprRef(e) => e.span,
Ir::Thunk(t) => t.span,
Ir::Builtins(b) => b.span,
Ir::Builtin(b) => b.span,
}
}
}
impl AttrSet {
@@ -105,7 +141,12 @@ impl AttrSet {
result?;
} else {
// Create a new sub-attrset because this path doesn't exist yet.
let mut attrs = AttrSet::default();
// FIXME: span
let mut attrs = AttrSet {
stcs: Default::default(),
dyns: Default::default(),
span: synthetic_span(),
};
attrs._insert(path, name, value, ctx)?;
let attrs = ctx.new_expr(attrs.to_ir());
self.stcs.insert(ident, attrs);
@@ -115,7 +156,12 @@ impl AttrSet {
Attr::Dynamic(dynamic) => {
// If the next attribute is a dynamic expression, we must create a new sub-attrset.
// We cannot merge with existing dynamic attributes at this stage.
let mut attrs = AttrSet::default();
// FIXME: span
let mut attrs = AttrSet {
stcs: Default::default(),
dyns: Default::default(),
span: synthetic_span(),
};
attrs._insert(path, name, value, ctx)?;
self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir())));
Ok(())
@@ -165,15 +211,6 @@ pub type SymId = SymbolU32;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ArgId(pub usize);
/// Represents a Nix attribute set.
#[derive(Debug, Default)]
pub struct AttrSet {
/// Statically known attributes (key is a string).
pub stcs: HashMap<SymId, ExprId>,
/// Dynamically computed attributes, where both the key and value are expressions.
pub dyns: Vec<(ExprId, ExprId)>,
}
/// Represents a key in an attribute path.
#[derive(Debug, TryUnwrap)]
pub enum Attr {
@@ -185,30 +222,6 @@ pub enum Attr {
Str(SymId),
}
/// Represents a Nix list.
#[derive(Debug)]
pub struct List {
/// The expressions that are elements of the list.
pub items: Vec<ExprId>,
}
/// Represents a "has attribute" check (`?` operator).
#[derive(Debug)]
pub struct HasAttr {
/// The expression to check for the attribute (the left-hand side).
pub lhs: ExprId,
/// The attribute path to look for (the right-hand side).
pub rhs: Vec<Attr>,
}
/// Represents a binary operation.
#[derive(Debug)]
pub struct BinOp {
pub lhs: ExprId,
pub rhs: ExprId,
pub kind: BinOpKind,
}
/// The kinds of binary operations supported in Nix.
#[derive(Clone, Debug)]
pub enum BinOpKind {
@@ -266,13 +279,6 @@ impl From<ast::BinOpKind> for BinOpKind {
}
}
/// Represents a unary operation.
#[derive(Debug)]
pub struct UnOp {
pub rhs: ExprId,
pub kind: UnOpKind,
}
/// The kinds of unary operations.
#[derive(Clone, Debug)]
pub enum UnOpKind {
@@ -289,45 +295,6 @@ impl From<ast::UnaryOpKind> for UnOpKind {
}
}
/// Represents an attribute selection from an attribute set.
#[derive(Debug)]
pub struct Select {
/// The expression that should evaluate to an attribute set.
pub expr: ExprId,
/// The path of attributes to select.
pub attrpath: Vec<Attr>,
/// An optional default value to return if the selection fails.
pub default: Option<ExprId>,
}
/// Represents an `if-then-else` expression.
#[derive(Debug)]
pub struct If {
pub cond: ExprId,
pub consq: ExprId, // Consequence (then branch)
pub alter: ExprId, // Alternative (else branch)
}
/// Represents a function value (a lambda).
#[derive(Debug)]
pub struct Func {
/// The body of the function
pub body: ExprId,
/// The parameter specification for the function.
pub param: Param,
pub arg: ExprId,
}
/// Represents a `let ... in ...` expression.
#[derive(Debug)]
pub struct Let {
/// The bindings in the `let` expression, group in SCCs
pub binding_sccs: SccInfo,
/// The body expression evaluated in the scope of the bindings.
pub body: ExprId,
}
/// Describes the parameters of a function.
#[derive(Debug)]
pub struct Param {
@@ -337,50 +304,3 @@ pub struct Param {
/// If `None`, any attribute is allowed (ellipsis `...` is present).
pub allowed: Option<HashSet<SymId>>,
}
/// Represents a function call.
#[derive(Debug)]
pub struct Call {
/// The expression that evaluates to the function to be called.
pub func: ExprId,
pub arg: ExprId,
}
/// Represents an `assert` expression.
#[derive(Debug)]
pub struct Assert {
/// The condition to assert.
pub assertion: ExprId,
/// The expression to return if the assertion is true.
pub expr: ExprId,
pub assertion_raw: String,
}
/// Represents the concatenation of multiple string expressions.
/// This is typically the result of downgrading an interpolated string.
#[derive(Debug)]
pub struct ConcatStrings {
pub parts: Vec<ExprId>,
}
/// Represents a simple, non-interpolated string literal.
#[derive(Debug)]
pub struct Str {
pub val: String,
}
/// Represents a path literal.
#[derive(Debug)]
pub struct Path {
/// The expression that evaluates to the string content of the path.
/// This can be a simple `Str` or a `ConcatStrings` for interpolated paths.
pub expr: ExprId,
}
/// Represents the special `builtins` global object.
#[derive(Debug)]
pub struct Builtins;
/// Represents an attribute in `builtins`.
#[derive(Debug)]
pub struct Builtin(pub SymId);

View File

@@ -1,7 +1,10 @@
// Assume no parse error
#![allow(clippy::unwrap_used)]
use rnix::ast::{self, Expr, HasEntry};
use std::sync::Arc;
use rnix::ast::{self, AstToken, Expr, HasEntry};
use rowan::ast::AstNode;
use super::*;
use crate::error::{Error, Result};
@@ -16,7 +19,12 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
match self {
Apply(apply) => apply.downgrade(ctx),
Assert(assert) => assert.downgrade(ctx),
Error(error) => Err(self::Error::downgrade_error(error.to_string())),
Error(error) => {
let span = error.syntax().text_range();
Err(self::Error::downgrade_error(error.to_string())
.with_span(span)
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))))
}
IfElse(ifelse) => ifelse.downgrade(ctx),
Select(select) => select.downgrade(ctx),
Str(str) => str.downgrade(ctx),
@@ -44,11 +52,13 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
let assertion_raw = assertion.to_string();
let assertion = assertion.downgrade(ctx)?;
let expr = self.body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Assert {
assertion,
expr,
assertion_raw,
span,
}
.to_ir(),
))
@@ -60,18 +70,29 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
let cond = self.condition().unwrap().downgrade(ctx)?;
let consq = self.body().unwrap().downgrade(ctx)?;
let alter = self.else_body().unwrap().downgrade(ctx)?;
Ok(ctx.new_expr(If { cond, consq, alter }.to_ir()))
let span = self.syntax().text_range();
Ok(ctx.new_expr(
If {
cond,
consq,
alter,
span,
}
.to_ir(),
))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let parts = self
.parts()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
val: lit.to_string(),
span: lit.syntax().text_range(),
}
.to_ir(),
)),
@@ -84,14 +105,15 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
let expr = if parts.len() == 1 {
parts.into_iter().next().unwrap()
} else {
ctx.new_expr(ConcatStrings { parts }.to_ir())
ctx.new_expr(ConcatStrings { parts, span }.to_ir())
};
Ok(ctx.new_expr(Path { expr }.to_ir()))
Ok(ctx.new_expr(Path { expr, span }.to_ir()))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let normalized = self.normalized_parts();
let is_single_literal = normalized.len() == 1
&& matches!(normalized.first(), Some(ast::InterpolPart::Literal(_)));
@@ -99,7 +121,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
let parts = normalized
.into_iter()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit }.to_ir())),
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())),
ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
}
@@ -109,18 +131,28 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
Ok(if is_single_literal {
parts.into_iter().next().unwrap()
} else {
ctx.new_expr(ConcatStrings { parts }.to_ir())
ctx.new_expr(ConcatStrings { parts, span }.to_ir())
})
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
Ok(ctx.new_expr(match self.kind() {
ast::LiteralKind::Integer(int) => Ir::Int(int.value().unwrap()),
ast::LiteralKind::Float(float) => Ir::Float(float.value().unwrap()),
ast::LiteralKind::Integer(int) => Int {
inner: int.value().unwrap(),
span,
}
.to_ir(),
ast::LiteralKind::Float(float) => Float {
inner: float.value().unwrap(),
span,
}
.to_ir(),
ast::LiteralKind::Uri(uri) => Str {
val: uri.to_string(),
span,
}
.to_ir(),
}))
@@ -131,13 +163,14 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let sym = self.ident_token().unwrap().to_string();
let sym = ctx.new_sym(sym);
ctx.lookup(sym)
ctx.lookup(sym, self.syntax().text_range())
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rec = self.rec_token().is_some();
let span = self.syntax().text_range();
if !rec {
let attrs = downgrade_attrs(self, ctx)?;
@@ -151,17 +184,26 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span,
};
for sym in binding_keys {
let expr = ctx.lookup(*sym)?;
// FIXME: span
let expr = ctx.lookup(*sym, synthetic_span())?;
attrs.stcs.insert(*sym, expr);
}
Ok(ctx.new_expr(attrs.to_ir()))
})?;
Ok(ctx.new_expr(Let { body, binding_sccs }.to_ir()))
Ok(ctx.new_expr(
Let {
body,
binding_sccs,
span,
}
.to_ir(),
))
}
}
@@ -172,7 +214,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::List {
.items()
.map(|item| maybe_thunk(item, ctx))
.collect::<Result<_>>()?;
Ok(ctx.new_expr(List { items }.to_ir()))
let span = self.syntax().text_range();
Ok(ctx.new_expr(List { items, span }.to_ir()))
}
}
@@ -182,7 +225,16 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::BinOp {
let lhs = self.lhs().unwrap().downgrade(ctx)?;
let rhs = self.rhs().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
Ok(ctx.new_expr(BinOp { lhs, rhs, kind }.to_ir()))
let span = self.syntax().text_range();
Ok(ctx.new_expr(
BinOp {
lhs,
rhs,
kind,
span,
}
.to_ir(),
))
}
}
@@ -191,7 +243,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::HasAttr {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.expr().unwrap().downgrade(ctx)?;
let rhs = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
Ok(ctx.new_expr(HasAttr { lhs, rhs }.to_ir()))
let span = self.syntax().text_range();
Ok(ctx.new_expr(HasAttr { lhs, rhs, span }.to_ir()))
}
}
@@ -200,7 +253,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::UnaryOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rhs = self.expr().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
Ok(ctx.new_expr(UnOp { rhs, kind }.to_ir()))
let span = self.syntax().text_range();
Ok(ctx.new_expr(UnOp { rhs, kind, span }.to_ir()))
}
}
@@ -210,16 +264,27 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
let expr = self.expr().unwrap().downgrade(ctx)?;
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let default = if let Some(default) = self.default_expr() {
let span = default.syntax().text_range();
let default_expr = default.downgrade(ctx)?;
Some(ctx.new_expr(Ir::Thunk(default_expr)))
Some(
ctx.new_expr(
Thunk {
inner: default_expr,
span,
}
.to_ir(),
),
)
} else {
None
};
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Select {
expr,
attrpath,
default,
span,
}
.to_ir(),
))
@@ -230,6 +295,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
/// The body of the `let` is accessed via `let.body`.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let bindings = downgrade_static_attrs(self, ctx)?;
let binding_keys: Vec<_> = bindings.keys().copied().collect();
@@ -237,10 +303,12 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span,
};
for sym in binding_keys {
let expr = ctx.lookup(sym)?;
// FIXME: span
let expr = ctx.lookup(sym, synthetic_span())?;
attrs.stcs.insert(sym, expr);
}
@@ -252,6 +320,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
expr: attrset_expr,
attrpath: vec![Attr::Str(body_sym)],
default: None,
span,
};
Ok(ctx.new_expr(select.to_ir()))
@@ -263,11 +332,19 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let entries: Vec<_> = self.entries().collect();
let body_expr = self.body().unwrap();
let span = self.syntax().text_range();
let (binding_sccs, body) =
downgrade_let_bindings(entries, ctx, |ctx, _binding_keys| body_expr.downgrade(ctx))?;
Ok(ctx.new_expr(Let { body, binding_sccs }.to_ir()))
Ok(ctx.new_expr(
Let {
body,
binding_sccs,
span,
}
.to_ir(),
))
}
}
@@ -288,13 +365,15 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::With {
/// This involves desugaring pattern-matching arguments into `let` bindings.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let arg = ctx.new_arg();
let param = self.param().unwrap();
let arg = ctx.new_arg(param.syntax().text_range());
let required;
let allowed;
let body;
let span = self.body().unwrap().syntax().text_range();
match self.param().unwrap() {
match param {
ast::Param::IdentParam(id) => {
// Simple case: `x: body`
let param_sym = ctx.new_sym(id.to_string());
@@ -334,6 +413,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
Let {
body: inner_body,
binding_sccs: scc_info,
span,
}
.to_ir(),
);
@@ -341,8 +421,17 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
}
let param = Param { required, allowed };
let span = self.syntax().text_range();
// The function's body and parameters are now stored directly in the `Func` node.
Ok(ctx.new_expr(Func { body, param, arg }.to_ir()))
Ok(ctx.new_expr(
Func {
body,
param,
arg,
span,
}
.to_ir(),
))
}
}
@@ -353,6 +442,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Apply {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let func = self.lambda().unwrap().downgrade(ctx)?;
let arg = maybe_thunk(self.argument().unwrap(), ctx)?;
Ok(ctx.new_expr(Call { func, arg }.to_ir()))
let span = self.syntax().text_range();
Ok(ctx.new_expr(Call { func, arg, span }.to_ir()))
}
}

View File

@@ -0,0 +1,20 @@
use rnix::TextRange;
pub fn merge_spans(spans: impl IntoIterator<Item = TextRange>) -> TextRange {
let mut spans = spans.into_iter();
let first = spans.next().unwrap_or_else(|| synthetic_span());
spans.fold(first, |acc, span| {
let start = acc.start().min(span.start());
let end = acc.end().max(span.end());
TextRange::new(start, end)
})
}
pub fn point_span() -> TextRange {
TextRange::new(0.into(), 0.into())
}
pub fn synthetic_span() -> TextRange {
TextRange::new(0.into(), 0.into())
}

View File

@@ -1,9 +1,12 @@
// Assume no parse error
#![allow(clippy::unwrap_used)]
use std::sync::Arc;
use hashbrown::hash_map::Entry;
use hashbrown::{HashMap, HashSet};
use rnix::ast;
use rowan::ast::AstNode;
use crate::error::{Error, Result};
use crate::ir::{Attr, AttrSet, ConcatStrings, ExprId, Ir, Select, Str, SymId};
@@ -21,7 +24,12 @@ pub fn maybe_thunk(mut expr: ast::Expr, ctx: &mut impl DowngradeContext) -> Resu
}
};
match expr {
Error(error) => return Err(self::Error::downgrade_error(error.to_string())),
Error(error) => {
let span = error.syntax().text_range();
return Err(self::Error::downgrade_error(error.to_string())
.with_span(span)
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
Ident(ident) => return ident.downgrade(ctx),
Literal(lit) => return lit.downgrade(ctx),
Str(str) => return str.downgrade(ctx),
@@ -46,19 +54,28 @@ pub fn maybe_thunk(mut expr: ast::Expr, ctx: &mut impl DowngradeContext) -> Resu
_ => unreachable!(),
}?;
Ok(ctx.new_expr(Ir::Thunk(id)))
Ok(ctx.new_expr(
Thunk {
inner: id,
// span: ctx.get_span(id),
// FIXME: span
span: synthetic_span()
}
.to_ir(),
))
}
/// Downgrades the entries of an attribute set.
/// This handles `inherit` and `attrpath = value;` entries.
pub fn downgrade_attrs(
attrs: impl ast::HasEntry,
attrs: impl ast::HasEntry + AstNode,
ctx: &mut impl DowngradeContext,
) -> Result<AttrSet> {
let entries = attrs.entries();
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span: attrs.syntax().text_range(),
};
for entry in entries {
@@ -75,13 +92,14 @@ pub fn downgrade_attrs(
/// This is a stricter version of `downgrade_attrs` that disallows dynamic attributes,
/// as `let` bindings must be statically known.
pub fn downgrade_static_attrs(
attrs: impl ast::HasEntry,
attrs: impl ast::HasEntry + AstNode,
ctx: &mut impl DowngradeContext,
) -> Result<HashMap<SymId, ExprId>> {
let entries = attrs.entries();
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span: attrs.syntax().text_range(),
};
for entry in entries {
@@ -111,13 +129,16 @@ pub fn downgrade_inherit(
None
};
for attr in inherit.attrs() {
let span = attr.syntax().text_range();
let ident = match downgrade_attr(attr, ctx)? {
Attr::Str(ident) => ident,
_ => {
// `inherit` does not allow dynamic attributes.
return Err(Error::downgrade_error(
"dynamic attributes not allowed in inherit".to_string(),
));
)
.with_span(span)
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
};
let expr = if let Some(expr) = from {
@@ -126,19 +147,28 @@ pub fn downgrade_inherit(
expr,
attrpath: vec![Attr::Str(ident)],
default: None,
span,
}
.to_ir(),
);
ctx.new_expr(Ir::Thunk(select_expr))
ctx.new_expr(
Thunk {
inner: select_expr,
span,
}
.to_ir(),
)
} else {
ctx.lookup(ident)?
ctx.lookup(ident, span)?
};
match stcs.entry(ident) {
Entry::Occupied(occupied) => {
return Err(Error::downgrade_error(format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(*occupied.key()))
)));
))
.with_span(span)
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
Entry::Vacant(vacant) => vacant.insert(expr),
};
@@ -151,6 +181,7 @@ pub fn downgrade_inherit(
pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<Attr> {
use ast::Attr::*;
use ast::InterpolPart::*;
let span = attr.syntax().text_range();
match attr {
Ident(ident) => Ok(Attr::Str(ctx.new_sym(ident.to_string()))),
Str(string) => {
@@ -170,11 +201,13 @@ pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Resul
let parts = parts
.into_iter()
.map(|part| match part {
Literal(lit) => Ok(ctx.new_expr(self::Str { val: lit }.to_ir())),
Literal(lit) => Ok(ctx.new_expr(self::Str { val: lit, span }.to_ir())),
Interpolation(interpol) => interpol.expr().unwrap().downgrade(ctx),
})
.collect::<Result<Vec<_>>>()?;
Ok(Attr::Dynamic(ctx.new_expr(ConcatStrings { parts }.to_ir())))
Ok(Attr::Dynamic(
ctx.new_expr(ConcatStrings { parts, span }.to_ir()),
))
}
}
Dynamic(dynamic) => Ok(Attr::Dynamic(dynamic.expr().unwrap().downgrade(ctx)?)),
@@ -210,11 +243,14 @@ pub fn downgrade_static_attrpathvalue(
attrs: &mut AttrSet,
ctx: &mut impl DowngradeContext,
) -> Result<()> {
let path = downgrade_attrpath(value.attrpath().unwrap(), ctx)?;
let attrpath_node = value.attrpath().unwrap();
let path = downgrade_attrpath(attrpath_node.clone(), ctx)?;
if path.iter().any(|attr| matches!(attr, Attr::Dynamic(_))) {
return Err(Error::downgrade_error(
"dynamic attributes not allowed in let bindings".to_string(),
));
)
.with_span(attrpath_node.syntax().text_range())
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
let value = value.value().unwrap().downgrade(ctx)?;
attrs.insert(path, value, ctx)
@@ -249,21 +285,26 @@ where
{
let mut param_syms = Vec::new();
let mut param_defaults = Vec::new();
let mut param_spans = Vec::new();
let mut seen_params = HashSet::new();
for entry in pat_entries {
let sym = ctx.new_sym(entry.ident().unwrap().to_string());
let span = entry.ident().unwrap().syntax().text_range();
if !seen_params.insert(sym) {
return Err(Error::downgrade_error(format!(
"duplicate parameter '{}'",
format_symbol(ctx.get_sym(sym))
)));
))
.with_span(span)
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
let default_ast = entry.default();
param_syms.push(sym);
param_defaults.push(default_ast);
param_spans.push(span);
}
let mut binding_keys: Vec<SymId> = param_syms.clone();
@@ -292,7 +333,11 @@ where
|ctx, sym_to_slot| {
let mut bindings = HashMap::new();
for (sym, default_ast) in param_syms.iter().zip(param_defaults.iter()) {
for ((sym, default_ast), span) in param_syms
.iter()
.zip(param_defaults.iter())
.zip(param_spans.iter())
{
let slot = *sym_to_slot.get(sym).unwrap();
ctx.set_current_binding(Some(slot));
@@ -307,6 +352,7 @@ where
expr: arg,
attrpath: vec![Attr::Str(*sym)],
default,
span: *span,
}
.to_ir(),
);
@@ -387,12 +433,23 @@ where
for (sym, slot) in binding_keys.iter().copied().zip(slots.iter()) {
if let Some(&expr) = bindings.get(&sym) {
ctx.replace_expr(*slot, Ir::Thunk(expr));
ctx.replace_expr(
*slot,
Thunk {
inner: expr,
// span: ctx.get_span(expr),
// FIXME: span
span: synthetic_span()
}
.to_ir(),
);
} else {
return Err(Error::downgrade_error(format!(
"binding '{}' not found",
format_symbol(ctx.get_sym(sym))
)));
))
.with_span(synthetic_span())
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
}
@@ -430,7 +487,9 @@ where
return Err(Error::downgrade_error(format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(sym))
)));
))
.with_span(ident.syntax().text_range())
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
}
}
@@ -448,7 +507,9 @@ where
return Err(Error::downgrade_error(format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(sym))
)));
))
.with_span(ident.syntax().text_range())
.with_source(ctx.get_current_source().unwrap_or_else(|| Arc::from(""))));
}
}
} else if attrs_vec.len() > 1 {
@@ -471,6 +532,7 @@ where
let mut temp_attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span: synthetic_span()
};
for entry in entries {

View File

@@ -8,9 +8,10 @@ pub mod value;
mod codegen;
mod fetcher;
mod ir;
mod nix_hash;
mod nar;
mod nix_hash;
mod runtime;
mod sourcemap;
mod store;
#[global_allocator]

View File

@@ -1,6 +1,6 @@
use std::env;
use std::io::IsTerminal;
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer};
use tracing_subscriber::{EnvFilter, Layer, fmt, layer::SubscriberExt, util::SubscriberInitExt};
pub fn init_logging() {
let is_terminal = std::io::stderr().is_terminal();
@@ -19,9 +19,7 @@ pub fn init_logging() {
.with_level(true);
let fmt_layer = if show_time {
fmt_layer
.with_timer(fmt::time::uptime())
.boxed()
fmt_layer.with_timer(fmt::time::uptime()).boxed()
} else {
fmt_layer.without_time().boxed()
};
@@ -30,34 +28,20 @@ pub fn init_logging() {
.with(filter)
.with(fmt_layer)
.init();
init_miette_handler();
}
#[macro_export]
macro_rules! trace_span {
($name:expr) => {
tracing::trace_span!($name)
};
($name:expr, $($field:tt)*) => {
tracing::trace_span!($name, $($field)*)
};
}
#[macro_export]
macro_rules! debug_span {
($name:expr) => {
tracing::debug_span!($name)
};
($name:expr, $($field:tt)*) => {
tracing::debug_span!($name, $($field)*)
};
}
#[macro_export]
macro_rules! info_span {
($name:expr) => {
tracing::info_span!($name)
};
($name:expr, $($field:tt)*) => {
tracing::info_span!($name, $($field)*)
};
fn init_miette_handler() {
let is_terminal = std::io::stderr().is_terminal();
miette::set_hook(Box::new(move |_| {
Box::new(
miette::MietteHandlerOpts::new()
.terminal_links(is_terminal)
.unicode(is_terminal)
.color(is_terminal)
.build(),
)
}))
.ok();
}

View File

@@ -1,7 +1,7 @@
use std::borrow::Cow;
use std::marker::PhantomData;
use std::path::{Component, Path, PathBuf};
use std::sync::Once;
use std::sync::{Arc, Once};
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
use deno_error::JsErrorClass;
@@ -17,6 +17,7 @@ pub(crate) trait RuntimeContext: 'static {
fn get_current_dir(&self) -> &Path;
fn set_current_file(&mut self, path: PathBuf);
fn compile_code(&mut self, code: &str) -> Result<String>;
fn get_current_source(&self) -> Option<Arc<str>>;
}
fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
@@ -134,8 +135,7 @@ fn op_resolve_path(
dir.push(path);
dir
} else {
let mut dir = std::env::home_dir()
.ok_or("home dir not defined")?;
let mut dir = std::env::home_dir().ok_or("home dir not defined")?;
dir.push(&path[2..]);
dir
};
@@ -434,7 +434,49 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
let global_value = self
.js_runtime
.execute_script("<eval>", script)
.map_err(|e| Error::eval_error(format!("{}", e.get_message()), e.stack))?;
.map_err(|e| {
let msg = format!("{}", e.get_message());
let stack_str = e.stack.as_ref().map(|s| s.to_string());
let mut error = Error::eval_error(msg.clone(), None);
// Parse Nix stack trace frames
if let Some(ref stack) = stack_str {
let frames = crate::error::parse_nix_stack(stack);
if !frames.is_empty() {
// Get the last frame (where error occurred) for span
if let Some(last_frame) = frames.last() {
let span = last_frame.span;
error = error.with_span(span);
}
// Format stack trace (reversed, newest at bottom)
let trace_lines = crate::error::format_stack_trace(&frames);
if !trace_lines.is_empty() {
let formatted_trace = trace_lines.join("\n");
error = Error::eval_error(msg, Some(formatted_trace));
// Re-apply span after recreating error
if let Some(last_frame) = frames.last() {
let span = last_frame.span;
error = error.with_span(span);
}
}
// Get current source from Context
let op_state = self.js_runtime.op_state();
let op_state_borrow = op_state.borrow();
if let Some(ctx) = op_state_borrow.try_borrow::<Ctx>() {
if let Some(source) = ctx.get_current_source() {
error = error.with_source(source);
}
}
}
}
error
})?;
// Retrieve scope from JsRuntime
deno_core::scope!(scope, self.js_runtime);

110
nix-js/src/sourcemap.rs Normal file
View File

@@ -0,0 +1,110 @@
use base64::{engine::general_purpose::STANDARD, Engine};
use rnix::TextRange;
use sourcemap::{SourceMap, SourceMapBuilder};
use std::sync::Arc;
pub struct NixSourceMapBuilder {
builder: SourceMapBuilder,
source_name: String,
source_content: Arc<str>,
generated_code: String,
}
impl NixSourceMapBuilder {
pub fn new(source_name: impl Into<String>, source_content: Arc<str>) -> Self {
let mut builder = SourceMapBuilder::new(None);
let source_name = source_name.into();
builder.add_source(&source_name);
builder.set_source_contents(0, Some(&source_content));
Self {
builder,
source_name,
source_content,
generated_code: String::new(),
}
}
pub fn add_mapping(&mut self, js_offset: usize, nix_span: TextRange) {
let (js_line, js_col) = byte_to_line_col(&self.generated_code, js_offset);
let (nix_line, nix_col) = byte_to_line_col(&self.source_content, nix_span.start().into());
self.builder.add_raw(
js_line,
js_col,
nix_line,
nix_col,
Some(0),
None,
false,
);
}
pub fn set_generated_code(&mut self, code: String) {
self.generated_code = code;
}
pub fn build(self) -> Result<(SourceMap, String), sourcemap::Error> {
let sourcemap = self.builder.into_sourcemap();
let mut buf = Vec::new();
sourcemap.to_writer(&mut buf)?;
let encoded = STANDARD.encode(&buf);
let data_url = format!(
"data:application/json;charset=utf-8;base64,{}",
encoded
);
Ok((sourcemap, data_url))
}
}
fn byte_to_line_col(text: &str, byte_offset: usize) -> (u32, u32) {
let mut line = 0;
let mut col = 0;
let mut current_offset = 0;
for ch in text.chars() {
if current_offset >= byte_offset {
break;
}
if ch == '\n' {
line += 1;
col = 0;
} else {
col += 1;
}
current_offset += ch.len_utf8();
}
(line, col)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_byte_to_line_col() {
let text = "line1\nline2\nline3";
assert_eq!(byte_to_line_col(text, 0), (0, 0));
assert_eq!(byte_to_line_col(text, 5), (0, 5));
assert_eq!(byte_to_line_col(text, 6), (1, 0));
assert_eq!(byte_to_line_col(text, 12), (2, 0));
}
#[test]
fn test_sourcemap_builder() {
let source = Arc::<str>::from("let x = 1; in x");
let mut builder = NixSourceMapBuilder::new("test.nix", source);
let span = TextRange::new(4.into(), 5.into());
builder.add_mapping(0, span);
let result = builder.build();
assert!(result.is_ok());
}
}

View File

@@ -48,7 +48,7 @@ pub trait Store: Send + Sync {
pub enum StoreBackend {
Simulated(SimulatedStore),
#[cfg(feature = "daemon")]
Daemon(DaemonStore),
Daemon(Box<DaemonStore>),
}
impl StoreBackend {
@@ -56,12 +56,14 @@ impl StoreBackend {
match config.mode {
#[cfg(feature = "daemon")]
StoreMode::Daemon => {
let daemon = DaemonStore::connect(&config.daemon_socket)?;
let daemon = Box::new(DaemonStore::connect(&config.daemon_socket)?);
Ok(StoreBackend::Daemon(daemon))
}
#[cfg(not(feature = "daemon"))]
StoreMode::Daemon => {
tracing::warn!("Daemon mode not available (nix-js not compiled with 'daemon' feature), falling back to simulated store");
tracing::warn!(
"Daemon mode not available (nix-js not compiled with 'daemon' feature), falling back to simulated store"
);
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
@@ -72,17 +74,11 @@ impl StoreBackend {
#[cfg(feature = "daemon")]
StoreMode::Auto => match DaemonStore::connect(&config.daemon_socket) {
Ok(daemon) => {
tracing::debug!(
"Using nix-daemon at {}",
config.daemon_socket.display()
);
Ok(StoreBackend::Daemon(daemon))
tracing::debug!("Using nix-daemon at {}", config.daemon_socket.display());
Ok(StoreBackend::Daemon(Box::new(daemon)))
}
Err(e) => {
tracing::warn!(
"Daemon unavailable ({}), using simulated store",
e
);
tracing::warn!("Daemon unavailable ({}), using simulated store", e);
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
@@ -99,7 +95,7 @@ impl StoreBackend {
match self {
StoreBackend::Simulated(s) => s,
#[cfg(feature = "daemon")]
StoreBackend::Daemon(d) => d,
StoreBackend::Daemon(d) => d.as_ref(),
}
}
}

View File

@@ -24,10 +24,7 @@ impl StoreConfig {
Ok("simulated") => StoreMode::Simulated,
Ok("auto") | Err(_) => StoreMode::Auto,
Ok(other) => {
tracing::warn!(
"Invalid NIX_JS_STORE_MODE '{}', using 'auto'",
other
);
tracing::warn!("Invalid NIX_JS_STORE_MODE '{}', using 'auto'", other);
StoreMode::Auto
}
};

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use std::io::{Error as IoError, ErrorKind as IoErrorKind, Result as IoResult};
use std::path::Path;
@@ -180,18 +182,12 @@ impl Store for DaemonStore {
let nar_data = crate::nar::pack_nar(source_path)?;
let nar_hash_hex = {
let nar_hash: [u8; 32] = {
let mut hasher = Sha256::new();
hasher.update(&nar_data);
hex::encode(hasher.finalize())
hasher.finalize().into()
};
let nar_hash_bytes = hex::decode(&nar_hash_hex)
.map_err(|e| Error::internal(format!("Invalid nar hash: {}", e)))?;
let mut nar_hash_arr = [0u8; 32];
nar_hash_arr.copy_from_slice(&nar_hash_bytes);
let ca_hash = CAHash::Nar(NixHash::Sha256(nar_hash_arr));
let ca_hash = CAHash::Nar(NixHash::Sha256(nar_hash));
let ref_store_paths: std::result::Result<Vec<StorePath<String>>, _> = references
.iter()
@@ -215,7 +211,7 @@ impl Store for DaemonStore {
deriver: None,
nar_hash: unsafe {
std::mem::transmute::<[u8; 32], nix_compat::nix_daemon::types::NarHash>(
nar_hash_arr,
nar_hash,
)
},
references: ref_store_paths,
@@ -258,17 +254,12 @@ impl Store for DaemonStore {
let nar_data = crate::nar::pack_nar(temp_file.path())?;
let nar_hash_hex = {
let nar_hash: [u8; 32] = {
let mut hasher = Sha256::new();
hasher.update(&nar_data);
hex::encode(hasher.finalize())
hasher.finalize().into()
};
let nar_hash_bytes = hex::decode(&nar_hash_hex)
.map_err(|e| Error::internal(format!("Invalid nar hash: {}", e)))?;
let mut nar_hash_arr = [0u8; 32];
nar_hash_arr.copy_from_slice(&nar_hash_bytes);
let content_hash = {
let mut hasher = Sha256::new();
hasher.update(content.as_bytes());
@@ -296,7 +287,7 @@ impl Store for DaemonStore {
deriver: None,
nar_hash: unsafe {
std::mem::transmute::<[u8; 32], nix_compat::nix_daemon::types::NarHash>(
nar_hash_arr,
nar_hash,
)
},
references: ref_store_paths,

View File

@@ -24,10 +24,6 @@ impl SimulatedStore {
Ok(Self { cache, store_dir })
}
pub fn cache(&self) -> &FetcherCache {
&self.cache
}
}
impl Store for SimulatedStore {

View File

@@ -87,7 +87,7 @@ mod tests {
let valid_paths = vec![
"/nix/store/0123456789abcdfghijklmnpqrsvwxyz-hello",
"/nix/store/abcdfghijklmnpqrsvwxyz0123456789-hello-1.0",
"/nix/store/00000000000000000000000000000000-test_+-.?="
"/nix/store/00000000000000000000000000000000-test_+-.?=",
];
for path in valid_paths {
@@ -109,15 +109,36 @@ mod tests {
("/nix/store/tooshort-name", "hash too short"),
(
"/nix/store/abc123defghijklmnopqrstuvwxyz123-name",
"hash too long"
"hash too long",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd123e-name",
"e in hash",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd123o-name",
"o in hash",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd123u-name",
"u in hash",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd123t-name",
"t in hash",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd1234-.name",
"name starts with dot",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd1234-na/me",
"slash in name",
),
(
"/nix/store/abcd1234abcd1234abcd1234abcd1234",
"missing name",
),
("/nix/store/abcd1234abcd1234abcd1234abcd123e-name", "e in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123o-name", "o in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123u-name", "u in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123t-name", "t in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234-.name", "name starts with dot"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234-na/me", "slash in name"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234", "missing name"),
];
for (path, reason) in invalid_paths {

View File

@@ -9,9 +9,13 @@ fn init() {
static INIT: Once = Once::new();
INIT.call_once(|| {
#[cfg(not(feature = "daemon"))]
unsafe { std::env::set_var("NIX_JS_STORE_MODE", "simulated") };
unsafe {
std::env::set_var("NIX_JS_STORE_MODE", "simulated")
};
#[cfg(feature = "daemon")]
unsafe { std::env::set_var("NIX_JS_STORE_MODE", "daemon") };
unsafe {
std::env::set_var("NIX_JS_STORE_MODE", "daemon")
};
});
}