refactor: tidy; fix runtime path resolution

This commit is contained in:
2026-01-17 13:45:44 +08:00
parent f2fc12026f
commit 09bfbca64a
21 changed files with 590 additions and 575 deletions

15
Justfile Normal file
View File

@@ -0,0 +1,15 @@
[no-exit-message]
@repl:
cargo run --bin repl
[no-exit-message]
@eval expr:
cargo run --bin eval -- '{{expr}}'
[no-exit-message]
@replr:
cargo run --bin repl --release
[no-exit-message]
@evalr expr:
cargo run --bin eval --release -- '{{expr}}'

View File

@@ -18,7 +18,7 @@
{
default = pkgs.mkShell {
packages = with pkgs; [
(fenix.packages.${system}.stable.withComponents [
(fenix.packages.${system}.latest.withComponents [
"cargo"
"clippy"
"rust-src"
@@ -30,6 +30,7 @@
lldb
valgrind
hyperfine
just
nodejs
nodePackages.npm

View File

@@ -3,7 +3,7 @@
* Implemented via Rust ops exposed through deno_core
*/
import { forceAttrs, forceBool, forceString, forceNixPath } from "../type-assert";
import { forceAttrs, forceBool, forceString } from "../type-assert";
import type { NixValue, NixAttrs } from "../types";
import { isNixPath } from "../types";
import { force } from "../thunk";
@@ -18,7 +18,6 @@ export const importFunc = (path: NixValue): NixValue => {
// Call Rust op - returns JS code string
const code = Deno.core.ops.op_import(pathStr);
return Function(`return (${code})`)();
};

View File

@@ -3,18 +3,17 @@
*/
import type { NixValue, NixAttrs, NixBool, NixString, NixPath } from "./types";
import { forceAttrs, forceFunction, forceString, typeName } from "./type-assert";
import { forceAttrs, forceBool, forceFunction, forceString, typeName } from "./type-assert";
import { isAttrs } from "./builtins/type-check";
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import {
type NixStringContext,
mkStringWithContext,
isStringWithContext,
getStringContext,
} from "./string-context";
import { force } from "./thunk";
import { mkPath } from "./path";
import { isNixPath } from "./types";
import { CatchableError, isNixPath } from "./types";
/**
* Concatenate multiple values into a string or path with context
@@ -98,9 +97,9 @@ export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath
* @param path - Path string (may be relative or absolute)
* @returns NixPath object with absolute path
*/
export const resolvePath = (path: NixValue): NixPath => {
const path_str = forceString(path);
const resolved = Deno.core.ops.op_resolve_path(path_str);
export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
const pathStr = forceString(path);
const resolved = Deno.core.ops.op_resolve_path(currentDir, pathStr);
return mkPath(resolved);
};
@@ -226,3 +225,10 @@ export const call = (func: NixValue, arg: NixValue): NixValue => {
}
throw new Error(`attempt to call something which is not a function but ${typeName(forcedFunc)}`);
};
export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string): NixValue => {
if (forceBool(assertion)) {
return expr;
}
throw new CatchableError(`assertion '${assertionRaw}' failed`)
}

View File

@@ -13,6 +13,7 @@ import {
hasAttr,
concatStringsWithContext,
call,
assert,
} from "./helpers";
import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins";
@@ -34,6 +35,7 @@ export const Nix = {
IS_PATH,
DEBUG_THUNKS,
assert,
call,
hasAttr,
select,

View File

@@ -35,7 +35,7 @@ declare global {
namespace Deno {
namespace core {
namespace ops {
function op_resolve_path(path: string): string;
function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string;
function op_read_file(path: string): string;
function op_path_exists(path: string): boolean;

View File

@@ -1,14 +1,28 @@
use std::path::Path;
use itertools::Itertools as _;
use crate::ir::*;
pub(crate) trait Compile<Ctx: CodegenContext> {
pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
let code = expr.compile(ctx);
let debug_prefix = if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
"Nix.DEBUG_THUNKS.enabled=true,"
} else {
""
};
let cur_dir = ctx.get_current_dir().display().to_string().escape_quote();
format!("({}currentDir={},{})", debug_prefix, cur_dir, code)
}
trait Compile<Ctx: CodegenContext> {
fn compile(&self, ctx: &Ctx) -> String;
}
pub(crate) trait CodegenContext {
fn get_ir(&self, id: ExprId) -> &Ir;
fn get_sym(&self, id: SymId) -> &str;
fn get_current_dir(&self) -> &Path;
}
trait EscapeQuote {
@@ -45,7 +59,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
Ir::Path(p) => {
// Path needs runtime resolution
let path_expr = ctx.get_ir(p.expr).compile(ctx);
format!("Nix.resolvePath({})", path_expr)
format!("Nix.resolvePath(currentDir,{})", path_expr)
}
&Ir::If(If { cond, consq, alter }) => {
let cond = ctx.get_ir(cond).compile(ctx);
@@ -75,11 +89,17 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
}
Ir::ConcatStrings(x) => x.compile(ctx),
Ir::HasAttr(x) => x.compile(ctx),
&Ir::Assert(Assert { assertion, expr }) => {
&Ir::Assert(Assert {
assertion,
expr,
ref assertion_raw,
}) => {
let assertion = ctx.get_ir(assertion).compile(ctx);
let expr_dbg = ctx.get_ir(expr);
let expr = ctx.get_ir(expr).compile(ctx);
format!("({assertion})?({expr}):(()=>{{throw new Error(`assertion failed ({expr_dbg:#?})`)}})()")
format!(
"Nix.assert({assertion},{expr},{})",
assertion_raw.escape_quote()
)
}
}
}
@@ -102,13 +122,13 @@ impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
Leq => format!("Nix.op.lte({},{})", lhs, rhs),
Geq => format!("Nix.op.gte({},{})", lhs, rhs),
// Short-circuit operators: use JavaScript native && and ||
And => format!("Nix.force({}) && Nix.force({})", lhs, rhs),
Or => format!("Nix.force({}) || Nix.force({})", lhs, rhs),
Impl => format!("(!Nix.force({}) || Nix.force({}))", lhs, rhs),
And => format!("Nix.force({})&&Nix.force({})", lhs, rhs),
Or => format!("Nix.force({})||Nix.force({})", lhs, rhs),
Impl => format!("(!Nix.force({})||Nix.force({}))", lhs, rhs),
Con => format!("Nix.op.concat({},{})", lhs, rhs),
Upd => format!("Nix.op.update({},{})", lhs, rhs),
PipeL => format!("Nix.call({}, {})", rhs, lhs),
PipeR => format!("Nix.call({}, {})", lhs, rhs),
PipeL => format!("Nix.call({},{})", rhs, lhs),
PipeR => format!("Nix.call({},{})", lhs, rhs),
}
}
}
@@ -183,7 +203,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Call {
fn compile(&self, ctx: &Ctx) -> String {
let func = ctx.get_ir(self.func).compile(ctx);
let arg = ctx.get_ir(self.arg).compile(ctx);
format!("Nix.call({func}, {arg})")
format!("Nix.call({func},{arg})")
}
}
@@ -231,7 +251,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Let {
}
let body = ctx.get_ir(self.body).compile(ctx);
format!("(()=>{{{}; return {}}})()", js_statements.join(";"), body)
format!("(()=>{{{};return {}}})()", js_statements.join(";"), body)
}
}
@@ -247,9 +267,12 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
})
.join(",");
if let Some(default) = self.default {
format!("Nix.selectWithDefault({lhs}, [{attrpath}], {})", ctx.get_ir(default).compile(ctx))
format!(
"Nix.selectWithDefault({lhs},[{attrpath}],{})",
ctx.get_ir(default).compile(ctx)
)
} else {
format!("Nix.select({lhs}, [{attrpath}])")
format!("Nix.select({lhs},[{attrpath}])")
}
}
}
@@ -261,16 +284,16 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
for (&sym, &expr) in &self.stcs {
let key = ctx.get_sym(sym);
let value = ctx.get_ir(expr).compile(ctx);
attrs.push(format!("{}: {}", key.escape_quote(), value));
attrs.push(format!("{}:{}", key.escape_quote(), value));
}
for (key_expr, value_expr) in &self.dyns {
let key = ctx.get_ir(*key_expr).compile(ctx);
let value = ctx.get_ir(*value_expr).compile(ctx);
attrs.push(format!("[{}]: {}", key, value));
attrs.push(format!("[{}]:{}", key, value));
}
format!("{{{}}}", attrs.join(", "))
format!("{{{}}}", attrs.join(","))
}
}
@@ -308,6 +331,6 @@ impl<Ctx: CodegenContext> Compile<Ctx> for HasAttr {
Attr::Dynamic(expr_id) => ctx.get_ir(*expr_id).compile(ctx),
})
.join(",");
format!("Nix.hasAttr({lhs}, [{attrpath}])")
format!("Nix.hasAttr({lhs},[{attrpath}])")
}
}

View File

@@ -1,25 +1,19 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::ptr::NonNull;
use hashbrown::HashMap;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use petgraph::graphmap::DiGraphMap;
use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, Compile};
use crate::codegen::{CodegenContext, compile};
use crate::error::{Error, Result};
use crate::ir::{Builtin, DowngradeContext, ExprId, Ir, SymId};
use crate::runtime::{Runtime, RuntimeCtx};
use crate::ir::{ArgId, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, SymId, ToIr as _};
use crate::runtime::{Runtime, RuntimeContext};
use crate::value::Value;
use downgrade::DowngradeCtx;
use drop_guard::{PathDropGuard, PathStackProvider};
mod downgrade;
mod drop_guard;
mod private {
use super::*;
use std::ops::DerefMut;
use std::ptr::NonNull;
pub struct CtxPtr(NonNull<Ctx>);
@@ -36,18 +30,13 @@ mod private {
unsafe { self.0.as_mut() }
}
}
impl PathStackProvider for CtxPtr {
fn path_stack(&mut self) -> &mut Vec<PathBuf> {
&mut self.as_mut().path_stack
}
}
impl RuntimeCtx for CtxPtr {
fn get_current_dir(&self) -> PathBuf {
impl RuntimeContext for CtxPtr {
fn get_current_dir(&self) -> &Path {
self.as_ref().get_current_dir()
}
fn push_path_stack(&mut self, path: PathBuf) -> impl DerefMut<Target = Self> {
PathDropGuard::new(path, self)
fn set_current_file(&mut self, path: PathBuf) {
self.as_mut().current_file = Some(path);
}
fn compile_code(&mut self, expr: &str) -> Result<String> {
self.as_mut().compile_code(expr)
@@ -76,12 +65,16 @@ impl Context {
}
pub fn eval_code(&mut self, expr: &str) -> Result<Value> {
// Initialize `path_stack` with current directory for relative path resolution
let mut guard = PathDropGuard::new_cwd(&mut self.ctx)?;
let ctx = guard.as_ctx();
let code = ctx.compile_code(expr)?;
self.runtime.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
self.ctx.current_file = Some(
std::env::current_dir()
.map_err(|err| {
Error::internal(format!("Failed to get current working dir: {err}"))
})?
.join("__eval__.nix"),
);
let code = self.compile_code(expr)?;
self.runtime
.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
}
pub fn compile_code(&mut self, expr: &str) -> Result<String> {
@@ -98,7 +91,7 @@ pub(crate) struct Ctx {
irs: Vec<Ir>,
symbols: DefaultStringInterner,
global: NonNull<HashMap<SymId, ExprId>>,
path_stack: Vec<PathBuf>,
current_file: Option<PathBuf>,
}
impl Default for Ctx {
@@ -159,7 +152,7 @@ impl Default for Ctx {
symbols,
irs,
global: unsafe { NonNull::new_unchecked(Box::leak(Box::new(global))) },
path_stack: Vec::new(),
current_file: None,
}
}
}
@@ -174,15 +167,12 @@ impl Ctx {
DowngradeCtx::new(self, global_ref)
}
pub(crate) fn get_current_dir(&self) -> PathBuf {
self.path_stack
.last()
.expect(
"path_stack should never be empty when get_current_dir is called. this is a bug",
)
pub(crate) fn get_current_dir(&self) -> &Path {
self.current_file
.as_ref()
.expect("current_file is not set")
.parent()
.expect("path in path_stack should always have a parent dir. this is a bug")
.to_path_buf()
.expect("current_file doesn't have a parent dir")
}
fn compile_code(&mut self, expr: &str) -> Result<String> {
@@ -196,14 +186,7 @@ impl Ctx {
let root = self
.downgrade_ctx()
.downgrade(root.tree().expr().unwrap())?;
let code = self.get_ir(root).compile(self);
let debug_prefix = if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
"Nix.DEBUG_THUNKS.enabled=true,"
} else {
""
};
let code = format!("({}{})", debug_prefix, code);
let code = compile(self.get_ir(root), self);
#[cfg(debug_assertions)]
eprintln!("[DEBUG] generated code: {}", &code);
Ok(code)
@@ -214,14 +197,301 @@ impl CodegenContext for Ctx {
fn get_ir(&self, id: ExprId) -> &Ir {
self.irs.get(id.0).expect("ExprId out of bounds")
}
fn get_sym(&self, id: SymId) -> &str {
self.symbols.resolve(id).expect("SymId out of bounds")
}
}
impl PathStackProvider for Ctx {
fn path_stack(&mut self) -> &mut Vec<PathBuf> {
&mut self.path_stack
fn get_current_dir(&self) -> &std::path::Path {
self.get_current_dir()
}
}
struct DependencyTracker {
graph: DiGraphMap<ExprId, ()>,
current_binding: Option<ExprId>,
let_scope_exprs: HashSet<ExprId>,
// The outer binding that owns this tracker (for nested let scopes in function params)
owner_binding: Option<ExprId>,
}
enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>),
Let(HashMap<SymId, ExprId>),
Param(SymId, ExprId),
With(ExprId),
}
struct ScopeGuard<'a, 'ctx> {
ctx: &'a mut DowngradeCtx<'ctx>,
}
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> {
fn drop(&mut self) {
self.ctx.scopes.pop();
}
}
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> {
self.ctx
}
}
pub struct DowngradeCtx<'ctx> {
ctx: &'ctx mut Ctx,
irs: Vec<Option<Ir>>,
scopes: Vec<Scope<'ctx>>,
arg_id: usize,
dep_tracker_stack: Vec<DependencyTracker>,
}
impl<'ctx> DowngradeCtx<'ctx> {
fn new(ctx: &'ctx mut Ctx, global: &'ctx HashMap<SymId, ExprId>) -> Self {
Self {
scopes: vec![Scope::Global(global)],
irs: vec![],
arg_id: 0,
dep_tracker_stack: Vec::new(),
ctx,
}
}
}
impl DowngradeContext for DowngradeCtx<'_> {
fn new_expr(&mut self, expr: Ir) -> ExprId {
self.irs.push(Some(expr));
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_arg(&mut self) -> ExprId {
self.irs.push(Some(Ir::Arg(ArgId(self.arg_id))));
self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_sym(&mut self, sym: String) -> SymId {
self.ctx.symbols.get_or_intern(sym)
}
fn get_sym(&self, id: SymId) -> &str {
self.ctx.get_sym(id)
}
fn lookup(&mut self, sym: SymId) -> Result<ExprId> {
for scope in self.scopes.iter().rev() {
match scope {
&Scope::Global(global_scope) => {
if let Some(&expr) = global_scope.get(&sym) {
return Ok(expr);
}
}
Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) {
// Find which tracker contains this expression
let expr_tracker_idx = self
.dep_tracker_stack
.iter()
.position(|t| t.let_scope_exprs.contains(&expr));
// Find the innermost tracker with a current_binding
let current_tracker_idx = self
.dep_tracker_stack
.iter()
.rposition(|t| t.current_binding.is_some());
// Record dependency if both exist
if let (Some(expr_idx), Some(curr_idx)) =
(expr_tracker_idx, current_tracker_idx)
{
let current_binding = self.dep_tracker_stack[curr_idx]
.current_binding
.expect("current_binding not set");
let owner_binding = self.dep_tracker_stack[curr_idx].owner_binding;
// If referencing from inner scope to outer scope
if curr_idx >= expr_idx {
let tracker = &mut self.dep_tracker_stack[expr_idx];
let from_node = current_binding;
let to_node = expr;
if curr_idx > expr_idx {
// Cross-scope reference: use owner_binding if available
if let Some(owner) = owner_binding {
tracker.graph.add_edge(owner, expr, ());
}
} else {
// Same-level reference: record directly
tracker.graph.add_edge(from_node, to_node, ());
}
}
}
return Ok(self.new_expr(Ir::ExprRef(expr)));
}
}
&Scope::Param(param_sym, expr) => {
if param_sym == sym {
return Ok(expr);
}
}
&Scope::With(_) => (),
}
}
let namespaces: Vec<ExprId> = self
.scopes
.iter()
.filter_map(|scope| {
if let &Scope::With(namespace) = scope {
Some(namespace)
} else {
None
}
})
.collect();
let mut result = None;
for namespace in namespaces {
use crate::ir::{Attr, Select};
let select = Select {
expr: namespace,
attrpath: vec![Attr::Str(sym)],
default: result, // Link to outer With or None
};
result = Some(self.new_expr(select.to_ir()));
}
result.ok_or_else(|| Error::downgrade_error(format!("'{}' not found", self.get_sym(sym))))
}
fn extract_expr(&mut self, id: ExprId) -> Ir {
let local_id = id.0 - self.ctx.irs.len();
self.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.take()
.expect("extract_expr called on an already extracted expr")
}
fn replace_expr(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len();
let _ = self
.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.insert(expr);
}
#[allow(refining_impl_trait)]
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();
self.irs.extend(std::iter::repeat_with(|| None).take(slots));
(start..start + slots).map(ExprId)
}
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
let root = root.downgrade(&mut self)?;
self.ctx
.irs
.extend(self.irs.into_iter().map(Option::unwrap));
Ok(root)
}
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Let(bindings));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Param(param, arg));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::With(namespace));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn push_dep_tracker(&mut self, slots: &[ExprId]) {
let mut graph = DiGraphMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
graph.add_node(expr);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
graph,
current_binding: None,
let_scope_exprs,
owner_binding: None,
});
}
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId) {
let mut graph = DiGraphMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
graph.add_node(expr);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
graph,
current_binding: None,
let_scope_exprs,
owner_binding: Some(owner),
});
}
fn get_current_binding(&self) -> Option<ExprId> {
self.dep_tracker_stack
.last()
.and_then(|t| t.current_binding)
}
fn set_current_binding(&mut self, expr: Option<ExprId>) {
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
tracker.current_binding = expr;
}
}
fn pop_dep_tracker(&mut self) -> Result<SccInfo> {
let tracker = self
.dep_tracker_stack
.pop()
.expect("pop_dep_tracker without active tracker");
use petgraph::algo::kosaraju_scc;
let sccs = kosaraju_scc(&tracker.graph);
let mut sccs_topo = Vec::new();
for scc_nodes in sccs.iter() {
let mut scc_exprs = Vec::new();
let mut is_recursive = scc_nodes.len() > 1;
for &expr in scc_nodes {
scc_exprs.push(expr);
if !is_recursive && tracker.graph.contains_edge(expr, expr) {
is_recursive = true;
}
}
sccs_topo.push((scc_exprs, is_recursive));
}
Ok(SccInfo { sccs: sccs_topo })
}
}

View File

@@ -1,315 +0,0 @@
use hashbrown::HashMap;
use hashbrown::HashSet;
use petgraph::Directed;
use petgraph::Graph;
use petgraph::graph::NodeIndex;
use crate::codegen::CodegenContext;
use crate::error::{Error, Result};
use crate::ir::{ArgId, Downgrade, DowngradeContext, ExprId, Ir, SymId, ToIr};
use super::{Ctx, SccInfo};
struct DependencyTracker {
expr_to_node: HashMap<ExprId, NodeIndex>,
graph: Graph<ExprId, (), Directed>,
current_binding: Option<ExprId>,
let_scope_exprs: HashSet<ExprId>,
// The outer binding that owns this tracker (for nested let scopes in function params)
owner_binding: Option<ExprId>,
}
enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>),
Let(HashMap<SymId, ExprId>),
Param(SymId, ExprId),
With(ExprId),
}
struct ScopeGuard<'a, 'ctx> {
ctx: &'a mut DowngradeCtx<'ctx>,
}
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> {
fn drop(&mut self) {
self.ctx.scopes.pop();
}
}
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> {
self.ctx
}
}
pub struct DowngradeCtx<'ctx> {
ctx: &'ctx mut Ctx,
irs: Vec<Option<Ir>>,
scopes: Vec<Scope<'ctx>>,
arg_id: usize,
dep_tracker_stack: Vec<DependencyTracker>,
}
impl<'ctx> DowngradeCtx<'ctx> {
pub fn new(ctx: &'ctx mut Ctx, global: &'ctx HashMap<SymId, ExprId>) -> Self {
Self {
scopes: vec![Scope::Global(global)],
irs: vec![],
arg_id: 0,
dep_tracker_stack: Vec::new(),
ctx,
}
}
}
impl DowngradeContext for DowngradeCtx<'_> {
fn new_expr(&mut self, expr: Ir) -> ExprId {
self.irs.push(Some(expr));
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_arg(&mut self) -> ExprId {
self.irs.push(Some(Ir::Arg(ArgId(self.arg_id))));
self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_sym(&mut self, sym: String) -> SymId {
self.ctx.symbols.get_or_intern(sym)
}
fn get_sym(&self, id: SymId) -> &str {
self.ctx.get_sym(id)
}
fn lookup(&mut self, sym: SymId) -> Result<ExprId> {
for scope in self.scopes.iter().rev() {
match scope {
&Scope::Global(global_scope) => {
if let Some(&expr) = global_scope.get(&sym) {
return Ok(expr);
}
}
Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) {
// Find which tracker contains this expression
let expr_tracker_idx = self
.dep_tracker_stack
.iter()
.position(|t| t.let_scope_exprs.contains(&expr));
// Find the innermost tracker with a current_binding
let current_tracker_idx = self
.dep_tracker_stack
.iter()
.rposition(|t| t.current_binding.is_some());
// Record dependency if both exist
if let (Some(expr_idx), Some(curr_idx)) =
(expr_tracker_idx, current_tracker_idx)
{
let current_binding =
self.dep_tracker_stack[curr_idx].current_binding.unwrap();
let owner_binding = self.dep_tracker_stack[curr_idx].owner_binding;
// If referencing from inner scope to outer scope
if curr_idx >= expr_idx {
let tracker = &mut self.dep_tracker_stack[expr_idx];
if let (Some(&from_node), Some(&to_node)) = (
tracker.expr_to_node.get(&current_binding),
tracker.expr_to_node.get(&expr),
) {
// Same-level reference: record directly
tracker.graph.add_edge(from_node, to_node, ());
} else if curr_idx > expr_idx {
// Cross-scope reference: use owner_binding if available
if let Some(owner) = owner_binding
&& let (Some(&from_node), Some(&to_node)) = (
tracker.expr_to_node.get(&owner),
tracker.expr_to_node.get(&expr),
)
{
tracker.graph.add_edge(from_node, to_node, ());
}
}
}
}
return Ok(self.new_expr(Ir::ExprRef(expr)));
}
}
&Scope::Param(param_sym, expr) => {
if param_sym == sym {
return Ok(expr);
}
}
&Scope::With(_) => (),
}
}
let namespaces: Vec<ExprId> = self
.scopes
.iter()
.filter_map(|scope| {
if let &Scope::With(namespace) = scope {
Some(namespace)
} else {
None
}
})
.collect();
let mut result = None;
for namespace in namespaces {
use crate::ir::{Attr, Select};
let select = Select {
expr: namespace,
attrpath: vec![Attr::Str(sym)],
default: result, // Link to outer With or None
};
result = Some(self.new_expr(select.to_ir()));
}
result.ok_or_else(|| Error::downgrade_error(format!("'{}' not found", self.get_sym(sym))))
}
fn extract_expr(&mut self, id: ExprId) -> Ir {
let local_id = id.0 - self.ctx.irs.len();
self.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.take()
.expect("extract_expr called on an already extracted expr")
}
fn replace_expr(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len();
let _ = self
.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.insert(expr);
}
#[allow(refining_impl_trait)]
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();
self.irs.extend(std::iter::repeat_with(|| None).take(slots));
(start..start + slots).map(ExprId)
}
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
let root = root.downgrade(&mut self)?;
self.ctx
.irs
.extend(self.irs.into_iter().map(Option::unwrap));
Ok(root)
}
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Let(bindings));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Param(param, arg));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::With(namespace));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn push_dep_tracker(&mut self, slots: &[ExprId]) {
let mut graph = Graph::new();
let mut expr_to_node = HashMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
let node = graph.add_node(expr);
expr_to_node.insert(expr, node);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
expr_to_node,
graph,
current_binding: None,
let_scope_exprs,
owner_binding: None,
});
}
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId) {
let mut graph = Graph::new();
let mut expr_to_node = HashMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
let node = graph.add_node(expr);
expr_to_node.insert(expr, node);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
expr_to_node,
graph,
current_binding: None,
let_scope_exprs,
owner_binding: Some(owner),
});
}
fn get_current_binding(&self) -> Option<ExprId> {
self.dep_tracker_stack
.last()
.and_then(|t| t.current_binding)
}
fn set_current_binding(&mut self, expr: Option<ExprId>) {
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
tracker.current_binding = expr;
}
}
fn pop_dep_tracker(&mut self) -> Result<SccInfo> {
let tracker = self
.dep_tracker_stack
.pop()
.expect("pop_dep_tracker without active tracker");
use petgraph::algo::kosaraju_scc;
let sccs = kosaraju_scc(&tracker.graph);
let mut sccs_topo = Vec::new();
for scc_nodes in sccs.iter() {
let mut scc_exprs = Vec::new();
let mut is_recursive = scc_nodes.len() > 1;
for &node_idx in scc_nodes {
let expr = tracker.graph[node_idx];
scc_exprs.push(expr);
if !is_recursive && tracker.graph.contains_edge(node_idx, node_idx) {
is_recursive = true;
}
}
sccs_topo.push((scc_exprs, is_recursive));
}
Ok(SccInfo { sccs: sccs_topo })
}
}

View File

@@ -1,41 +0,0 @@
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use crate::error::{Error, Result};
pub trait PathStackProvider {
fn path_stack(&mut self) -> &mut Vec<PathBuf>;
}
pub struct PathDropGuard<'ctx, Ctx: PathStackProvider> {
ctx: &'ctx mut Ctx,
}
impl<'ctx, Ctx: PathStackProvider> PathDropGuard<'ctx, Ctx> {
pub fn new(path: PathBuf, ctx: &'ctx mut Ctx) -> Self {
ctx.path_stack().push(path);
Self { ctx }
}
pub fn new_cwd(ctx: &'ctx mut Ctx) -> Result<Self> {
let cwd = std::env::current_dir()
.map_err(|err| Error::downgrade_error(format!("cannot get cwd: {err}")))?;
let virtual_file = cwd.join("__eval__.nix");
ctx.path_stack().push(virtual_file);
Ok(Self { ctx })
}
pub fn as_ctx(&mut self) -> &mut Ctx {
self.ctx
}
}
impl<Ctx: PathStackProvider> Deref for PathDropGuard<'_, Ctx> {
type Target = Ctx;
fn deref(&self) -> &Self::Target {
self.ctx
}
}
impl<Ctx: PathStackProvider> DerefMut for PathDropGuard<'_, Ctx> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.ctx
}
}

View File

@@ -107,7 +107,10 @@ pub fn op_fetch_tarball(
#[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}", url, expected_hash, expected_nar_hash);
eprintln!(
"[DEBUG] fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}",
url, expected_hash, expected_nar_hash
);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new();
@@ -122,9 +125,10 @@ pub fn op_fetch_tarball(
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache HIT (with expected nar_hash)");
// Need to compute tarball hash if not cached
let tarball_hash = expected_hash.as_ref()
let tarball_hash = expected_hash
.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();
return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(),
hash: tarball_hash,
@@ -135,10 +139,14 @@ pub fn op_fetch_tarball(
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
} else if let Some((cached, cached_nar_hash)) = cache.get_extracted_tarball_by_url(&url) {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache HIT (by URL, nar_hash={})", cached_nar_hash);
let tarball_hash = expected_hash.as_ref()
eprintln!(
"[DEBUG] fetchTarball: cache HIT (by URL, nar_hash={})",
cached_nar_hash
);
let tarball_hash = expected_hash
.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();
return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(),
hash: tarball_hash,
@@ -175,7 +183,10 @@ pub fn op_fetch_tarball(
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: computed tarball_hash={}, nar_hash={}", tarball_hash, nar_hash);
eprintln!(
"[DEBUG] fetchTarball: computed tarball_hash={}, nar_hash={}",
tarball_hash, nar_hash
);
// Verify NAR hash if provided
if let Some(ref expected) = expected_nar_hash {
@@ -212,7 +223,10 @@ pub fn op_fetch_git(
#[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev);
eprintln!(
"[DEBUG] fetchGit: {} (ref: {:?}, rev: {:?})",
url, git_ref, rev
);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string());

View File

@@ -186,7 +186,6 @@ fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> {
pub enum ArchiveError {
IoError(std::io::Error),
ZipError(zip::result::ZipError),
UnsupportedFormat(String),
}
impl std::fmt::Display for ArchiveError {
@@ -194,9 +193,6 @@ impl std::fmt::Display for ArchiveError {
match self {
ArchiveError::IoError(e) => write!(f, "I/O error: {}", e),
ArchiveError::ZipError(e) => write!(f, "ZIP error: {}", e),
ArchiveError::UnsupportedFormat(fmt) => {
write!(f, "Unsupported archive format: {}", fmt)
}
}
}
}

View File

@@ -169,7 +169,10 @@ impl FetcherCache {
let data_dir = cache_dir.join(&key);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: url={}, expected_hash={}", url, expected_hash);
eprintln!(
"[CACHE] get_tarball: url={}, expected_hash={}",
url, expected_hash
);
if !meta_path.exists() || !data_dir.exists() {
#[cfg(debug_assertions)]
@@ -181,12 +184,18 @@ impl FetcherCache {
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: cached hash={}, name={}", meta.hash, meta.name);
eprintln!(
"[CACHE] get_tarball: cached hash={}, name={}",
meta.hash, meta.name
);
if meta.hash == expected_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: hash match, checking store_path={}", store_path.display());
eprintln!(
"[CACHE] get_tarball: hash match, checking store_path={}",
store_path.display()
);
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: HIT - returning store path");
@@ -198,7 +207,10 @@ impl FetcherCache {
}
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: hash mismatch (cached={}, expected={})", meta.hash, expected_hash);
eprintln!(
"[CACHE] get_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_hash
);
None
}
}
@@ -242,7 +254,10 @@ impl FetcherCache {
let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: url={}, expected_nar_hash={}", url, expected_nar_hash);
eprintln!(
"[CACHE] get_extracted_tarball: url={}, expected_nar_hash={}",
url, expected_nar_hash
);
if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)]
@@ -254,12 +269,18 @@ impl FetcherCache {
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: cached hash={}, name={}", meta.hash, meta.name);
eprintln!(
"[CACHE] get_extracted_tarball: cached hash={}, name={}",
meta.hash, meta.name
);
if meta.hash == expected_nar_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: hash match, checking store_path={}", store_path.display());
eprintln!(
"[CACHE] get_extracted_tarball: hash match, checking store_path={}",
store_path.display()
);
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: HIT - returning store path");
@@ -271,7 +292,10 @@ impl FetcherCache {
}
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: hash mismatch (cached={}, expected={})", meta.hash, expected_nar_hash);
eprintln!(
"[CACHE] get_extracted_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_nar_hash
);
None
}
}
@@ -288,7 +312,9 @@ impl FetcherCache {
if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: cache miss - meta or content dir not found");
eprintln!(
"[CACHE] get_extracted_tarball_by_url: cache miss - meta or content dir not found"
);
return None;
}
@@ -296,7 +322,10 @@ impl FetcherCache {
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: cached hash={}, name={}", meta.hash, meta.name);
eprintln!(
"[CACHE] get_extracted_tarball_by_url: cached hash={}, name={}",
meta.hash, meta.name
);
let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() {
@@ -322,7 +351,10 @@ impl FetcherCache {
let cache_entry_dir = cache_dir.join(&key);
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: url={}, hash={}, name={}", url, hash, name);
eprintln!(
"[CACHE] put_tarball_from_extracted: url={}, hash={}, name={}",
url, hash, name
);
fs::create_dir_all(&cache_entry_dir)?;
@@ -340,7 +372,10 @@ impl FetcherCache {
let store_path = self.make_store_path(hash, name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: store_path={}", store_path.display());
eprintln!(
"[CACHE] put_tarball_from_extracted: store_path={}",
store_path.display()
);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
copy_dir_recursive(extracted_path, &store_path)?;

View File

@@ -5,6 +5,7 @@ use std::process::Command;
use super::FetchGitResult;
use super::cache::FetcherCache;
#[allow(clippy::too_many_arguments)]
pub fn fetch_git(
cache: &FetcherCache,
url: &str,

View File

@@ -353,6 +353,7 @@ pub struct Assert {
pub assertion: ExprId,
/// The expression to return if the assertion is true.
pub expr: ExprId,
pub assertion_raw: String,
}
/// Represents the concatenation of multiple string expressions.

View File

@@ -3,8 +3,8 @@
use rnix::ast::{self, Expr, HasEntry};
use crate::error::{Error, Result};
use super::*;
use crate::error::{Error, Result};
pub trait Downgrade<Ctx: DowngradeContext> {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
@@ -40,9 +40,18 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let assertion = self.condition().unwrap().downgrade(ctx)?;
let assertion = self.condition().unwrap();
let assertion_raw = assertion.to_string();
let assertion = assertion.downgrade(ctx)?;
let expr = self.body().unwrap().downgrade(ctx)?;
Ok(ctx.new_expr(Assert { assertion, expr }.to_ir()))
Ok(ctx.new_expr(
Assert {
assertion,
expr,
assertion_raw,
}
.to_ir(),
))
}
}

View File

@@ -1,13 +1,14 @@
#![warn(clippy::unwrap_used)]
mod codegen;
pub mod context;
pub mod error;
pub mod value;
mod codegen;
mod fetcher;
mod ir;
mod nix_hash;
mod runtime;
pub mod value;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

View File

@@ -1,7 +1,6 @@
use std::borrow::Cow;
use std::marker::PhantomData;
use std::ops::DerefMut;
use std::path::{Component, PathBuf};
use std::path::{Component, Path, PathBuf};
use std::sync::Once;
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
@@ -14,20 +13,20 @@ type ScopeRef<'p, 's> = v8::PinnedRef<'p, v8::HandleScope<'s>>;
type LocalValue<'a> = v8::Local<'a, v8::Value>;
type LocalSymbol<'a> = v8::Local<'a, v8::Symbol>;
pub(crate) trait RuntimeCtx: 'static {
fn get_current_dir(&self) -> PathBuf;
fn push_path_stack(&mut self, path: PathBuf) -> impl DerefMut<Target = Self>;
pub(crate) trait RuntimeContext: 'static {
fn get_current_dir(&self) -> &Path;
fn set_current_file(&mut self, path: PathBuf);
fn compile_code(&mut self, code: &str) -> Result<String>;
}
fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
const ESM: &[ExtensionFileSource] =
&deno_core::include_js_files!(nix_runtime dir "runtime-ts/dist", "runtime.js");
let mut ops = vec![
op_import::<Ctx>(),
op_read_file(),
op_path_exists(),
op_resolve_path::<Ctx>(),
op_resolve_path(),
op_sha256_hex(),
op_make_store_path(),
op_output_path_name(),
@@ -75,7 +74,7 @@ pub(crate) use private::NixError;
#[deno_core::op2]
#[string]
fn op_import<Ctx: RuntimeCtx>(
fn op_import<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
@@ -95,8 +94,7 @@ fn op_import<Ctx: RuntimeCtx>(
#[cfg(debug_assertions)]
eprintln!("[DEBUG] compiling file: {}", absolute_path.display());
let mut guard = ctx.push_path_stack(absolute_path);
let ctx = guard.deref_mut();
ctx.set_current_file(absolute_path);
Ok(ctx.compile_code(&content).map_err(|err| err.to_string())?)
}
@@ -114,19 +112,17 @@ fn op_path_exists(#[string] path: String) -> bool {
#[deno_core::op2]
#[string]
fn op_resolve_path<Ctx: RuntimeCtx>(
state: &mut OpState,
fn op_resolve_path(
#[string] current_dir: String,
#[string] path: String,
) -> std::result::Result<String, NixError> {
let ctx = state.borrow::<Ctx>();
// If already absolute, return as-is
if path.starts_with('/') {
return Ok(path);
}
// Resolve relative path against current file directory (or CWD)
let current_dir = ctx.get_current_dir().join(&path);
let current_dir = PathBuf::from(current_dir).join(&path);
let mut normalized = PathBuf::new();
for component in current_dir.components() {
match component {
@@ -229,14 +225,14 @@ fn op_add_path(
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
if computed_hash != expected_hash {
if let Some(expected_hash) = sha256
&& computed_hash != expected_hash
{
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
path, expected_hash, computed_hash
)));
}
}
let store_path = crate::nix_hash::make_store_path("source", &computed_hash, &computed_name);
@@ -248,8 +244,8 @@ fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixEr
use std::fs;
if path.is_file() {
let contents = fs::read(path)
.map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let contents =
fs::read(path).map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
Ok(hex::encode(hasher.finalize()))
@@ -278,8 +274,7 @@ fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixEr
}
}
pub(crate) struct Runtime<Ctx: RuntimeCtx> {
pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime,
is_thunk_symbol: v8::Global<v8::Symbol>,
primop_metadata_symbol: v8::Global<v8::Symbol>,
@@ -288,7 +283,7 @@ pub(crate) struct Runtime<Ctx: RuntimeCtx> {
_marker: PhantomData<Ctx>,
}
impl<Ctx: RuntimeCtx> Runtime<Ctx> {
impl<Ctx: RuntimeContext> Runtime<Ctx> {
pub(crate) fn new() -> Result<Self> {
// Initialize V8 once
static INIT: Once = Once::new();
@@ -345,7 +340,8 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
))
}
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT)
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT, IS_PATH)
#[allow(clippy::type_complexity)]
fn get_symbols(
scope: &ScopeRef,
) -> Result<(
@@ -365,55 +361,24 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
Error::internal("failed to convert global Nix Value to object".into())
})?;
let is_thunk_sym_key = v8::String::new(scope, "IS_THUNK")
let get_symbol = |symbol| {
let key = v8::String::new(scope, symbol)
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let is_thunk_sym = nix_obj
.get(scope, is_thunk_sym_key.into())
.ok_or_else(|| Error::internal("failed to get IS_THUNK Symbol".into()))?;
let is_thunk = is_thunk_sym.try_cast::<v8::Symbol>().map_err(|err| {
let val = nix_obj
.get(scope, key.into())
.ok_or_else(|| Error::internal(format!("failed to get {symbol} Symbol")))?;
let sym = val.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert IS_THUNK Value to Symbol ({err})"
"failed to convert {symbol} Value to Symbol ({err})"
))
})?;
let is_thunk = v8::Global::new(scope, is_thunk);
Ok(v8::Global::new(scope, sym))
};
let primop_metadata_sym_key = v8::String::new(scope, "PRIMOP_METADATA")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let primop_metadata_sym = nix_obj
.get(scope, primop_metadata_sym_key.into())
.ok_or_else(|| Error::internal("failed to get PRIMOP_METADATA Symbol".into()))?;
let primop_metadata = primop_metadata_sym
.try_cast::<v8::Symbol>()
.map_err(|err| {
Error::internal(format!(
"failed to convert PRIMOP_METADATA Value to Symbol ({err})"
))
})?;
let primop_metadata = v8::Global::new(scope, primop_metadata);
let has_context_sym_key = v8::String::new(scope, "HAS_CONTEXT")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let has_context_sym = nix_obj
.get(scope, has_context_sym_key.into())
.ok_or_else(|| Error::internal("failed to get HAS_CONTEXT Symbol".into()))?;
let has_context = has_context_sym.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert HAS_CONTEXT Value to Symbol ({err})"
))
})?;
let has_context = v8::Global::new(scope, has_context);
let is_path_sym_key = v8::String::new(scope, "IS_PATH")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let is_path_sym = nix_obj
.get(scope, is_path_sym_key.into())
.ok_or_else(|| Error::internal("failed to get IS_PATH Symbol".into()))?;
let is_path = is_path_sym.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert IS_PATH Value to Symbol ({err})"
))
})?;
let is_path = v8::Global::new(scope, is_path);
let is_thunk = get_symbol("IS_THUNK")?;
let primop_metadata = get_symbol("PRIMOP_METADATA")?;
let has_context = get_symbol("HAS_CONTEXT")?;
let is_path = get_symbol("IS_PATH")?;
Ok((is_thunk, primop_metadata, has_context, is_path))
}
@@ -616,14 +581,9 @@ mod test {
#[test]
fn to_value_working() {
let mut ctx = Context::new().unwrap();
const EXPR: &str = "({ test: [1., 9223372036854775807n, true, false, 'hello world!'] })";
assert_eq!(
ctx.eval_js(
"({
test: [1., 9223372036854775807n, true, false, 'hello world!']
})"
.into(),
)
.unwrap(),
ctx.eval_js(EXPR.into()).unwrap(),
Value::AttrSet(AttrSet::new(std::collections::BTreeMap::from([(
Symbol::from("test"),
Value::List(List::new(vec![

View File

@@ -150,40 +150,85 @@ fn builtins_concat_lists() {
#[test]
fn builtins_compare_versions_basic() {
assert_eq!(eval("builtins.compareVersions \"1.0\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.1\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3\" \"2.3\""), Value::Int(0));
assert_eq!(eval("builtins.compareVersions \"2.5\" \"2.3\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"3.1\" \"2.3\""), Value::Int(1));
assert_eq!(
eval("builtins.compareVersions \"1.0\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.1\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3\" \"2.3\""),
Value::Int(0)
);
assert_eq!(
eval("builtins.compareVersions \"2.5\" \"2.3\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"3.1\" \"2.3\""),
Value::Int(1)
);
}
#[test]
fn builtins_compare_versions_components() {
assert_eq!(eval("builtins.compareVersions \"2.3.1\" \"2.3\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"2.3\" \"2.3.1\""), Value::Int(-1));
assert_eq!(
eval("builtins.compareVersions \"2.3.1\" \"2.3\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3\" \"2.3.1\""),
Value::Int(-1)
);
}
#[test]
fn builtins_compare_versions_numeric_vs_alpha() {
// Numeric component comes before alpha component
assert_eq!(eval("builtins.compareVersions \"2.3.1\" \"2.3a\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"2.3a\" \"2.3.1\""), Value::Int(-1));
assert_eq!(
eval("builtins.compareVersions \"2.3.1\" \"2.3a\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3a\" \"2.3.1\""),
Value::Int(-1)
);
}
#[test]
fn builtins_compare_versions_pre() {
// "pre" is special: comes before everything except another "pre"
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre3\" \"2.3pre12\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3c\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3q\""), Value::Int(-1));
assert_eq!(
eval("builtins.compareVersions \"2.3pre1\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3pre3\" \"2.3pre12\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3pre1\" \"2.3c\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3pre1\" \"2.3q\""),
Value::Int(-1)
);
}
#[test]
fn builtins_compare_versions_alpha() {
// Alphabetic comparison
assert_eq!(eval("builtins.compareVersions \"2.3a\" \"2.3c\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3c\" \"2.3a\""), Value::Int(1));
assert_eq!(
eval("builtins.compareVersions \"2.3a\" \"2.3c\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3c\" \"2.3a\""),
Value::Int(1)
);
}
#[test]

View File

@@ -110,10 +110,7 @@ fn test_path_with_file() {
let test_file = temp_dir.path().join("test.txt");
std::fs::write(&test_file, "Hello, World!").unwrap();
let expr = format!(
r#"builtins.path {{ path = {}; }}"#,
test_file.display()
);
let expr = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result = ctx.eval_code(&expr).unwrap();
// Should return a store path string
@@ -239,10 +236,7 @@ fn test_path_with_sha256() {
std::fs::write(&test_file, "Test content for hashing").unwrap();
// First, get the hash by calling without sha256
let expr1 = format!(
r#"builtins.path {{ path = {}; }}"#,
test_file.display()
);
let expr1 = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result1 = ctx.eval_code(&expr1).unwrap();
let store_path1 = match result1 {
Value::String(s) => s,
@@ -251,10 +245,7 @@ fn test_path_with_sha256() {
// Compute the actual hash (for testing, we'll just verify the same path is returned)
// In real usage, the user would know the hash beforehand
let expr2 = format!(
r#"builtins.path {{ path = {}; }}"#,
test_file.display()
);
let expr2 = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result2 = ctx.eval_code(&expr2).unwrap();
let store_path2 = match result2 {
Value::String(s) => s,

View File

@@ -58,7 +58,10 @@ fn test_match_posix_space_class() {
#[test]
fn test_match_posix_upper_class() {
assert_eq!(eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#), Value::Null);
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#),
Value::Null
);
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO ""#),
@@ -250,9 +253,8 @@ fn test_replace_longer_pattern() {
#[test]
fn test_replace_different_lengths() {
let result = std::panic::catch_unwind(|| {
eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#)
});
let result =
std::panic::catch_unwind(|| eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#));
assert!(result.is_err());
}