Compare commits

..

8 Commits

28 changed files with 1816 additions and 1539 deletions

2
.gitignore vendored
View File

@@ -5,3 +5,5 @@ target/
# Profiling
flamegraph*.svg
perf.data*
profile.json.gz
prof.json

41
Cargo.lock generated
View File

@@ -916,6 +916,40 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "ere"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced253a70c1159bac921dc672edbb5530c35fbca23a7f1b9f03641f39e4172f1"
dependencies = [
"ere-core",
"ere-macros",
]
[[package]]
name = "ere-core"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6639aa08d01ec59ec0509e9b79b4e17b953131b053d1a9a6b449150e58c9706d"
dependencies = [
"proc-macro2",
"quote",
"syn",
"thiserror 2.0.17",
"utf8-ranges",
]
[[package]]
name = "ere-macros"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9a41368e568caa1ee19d8dfa943d7f2f3ddabc0d9c0965cc37d76e0146c6b89"
dependencies = [
"ere-core",
"quote",
"syn",
]
[[package]]
name = "errno"
version = "0.3.13"
@@ -1912,6 +1946,7 @@ dependencies = [
"deno_error",
"derive_more",
"dirs",
"ere",
"flate2",
"hashbrown 0.16.1",
"hex",
@@ -3550,6 +3585,12 @@ dependencies = [
"serde",
]
[[package]]
name = "utf8-ranges"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcfc827f90e53a02eaef5e535ee14266c1d569214c6aa70133a624d8a3164ba"
[[package]]
name = "utf8_iter"
version = "1.0.4"

18
flake.lock generated
View File

@@ -8,11 +8,11 @@
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1768892055,
"narHash": "sha256-zatCoDgFd0C8YEOztMeBcom6cka0GqJGfc0aAXvpktc=",
"lastModified": 1770447430,
"narHash": "sha256-smrRbWhvJF6BATB6pXbD8Cp04HRrVcYQkXqOhUF81nk=",
"owner": "nix-community",
"repo": "fenix",
"rev": "81d6a7547e090f7e760b95b9cc534461f6045e43",
"rev": "e1b28f6ca0d1722edceec1f2f3501558988d1aed",
"type": "github"
},
"original": {
@@ -37,11 +37,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1768886240,
"narHash": "sha256-C2TjvwYZ2VDxYWeqvvJ5XPPp6U7H66zeJlRaErJKoEM=",
"lastModified": 1770197578,
"narHash": "sha256-AYqlWrX09+HvGs8zM6ebZ1pwUqjkfpnv8mewYwAo+iM=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "80e4adbcf8992d3fd27ad4964fbb84907f9478b0",
"rev": "00c21e4c93d963c50d4c0c89bfa84ed6e0694df2",
"type": "github"
},
"original": {
@@ -61,11 +61,11 @@
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1768816483,
"narHash": "sha256-bXeWgVkvxN76QEw12OaWFbRhO1yt+5QETz/BxBX4dk0=",
"lastModified": 1770290336,
"narHash": "sha256-rJ79U68ZLjCSg1Qq+63aBXi//W7blaKiYq9NnfeTboA=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "1b8952b49fa10cae9020f0e46d0b8938563a6b64",
"rev": "d2a00da09293267e5be2efb216698762929d7140",
"type": "github"
},
"original": {

View File

@@ -62,6 +62,7 @@ rnix = "0.12"
rowan = "0.15"
nix-js-macros = { path = "../nix-js-macros" }
ere = "0.2.4"
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }

View File

@@ -7,19 +7,19 @@ use nix_js::value::Value;
pub fn eval(expr: &str) -> Value {
Context::new()
.unwrap()
.eval_code(Source::new_eval(expr.into()).unwrap())
.eval(Source::new_eval(expr.into()).unwrap())
.unwrap()
}
pub fn eval_result(expr: &str) -> Result<Value> {
Context::new()
.unwrap()
.eval_code(Source::new_eval(expr.into()).unwrap())
.eval(Source::new_eval(expr.into()).unwrap())
}
pub fn compile(expr: &str) -> String {
Context::new()
.unwrap()
.compile_code(Source::new_eval(expr.into()).unwrap())
.compile(Source::new_eval(expr.into()).unwrap())
.unwrap()
}

View File

@@ -46,8 +46,24 @@ Dependency tracking for imported derivations may be incomplete.`,
export const scopedImport =
(scope: NixValue) =>
(path: NixValue): never => {
throw new Error("Not implemented: scopedImport");
(path: NixValue): NixValue => {
const scopeAttrs = forceAttrs(scope);
const scopeKeys = Object.keys(scopeAttrs);
const context: NixStringContext = new Set();
const pathStr = coerceToPath(path, context);
if (context.size > 0) {
console.warn(
`[WARN] scopedImport: Path has string context which is not yet fully tracked.
Dependency tracking for imported derivations may be incomplete.`,
);
}
const code = Deno.core.ops.op_scoped_import(pathStr, scopeKeys);
const scopedFunc = Function(`return (${code})`)();
return scopedFunc(scopeAttrs);
};
export const storePath = (pathArg: NixValue): StringWithContext => {

View File

@@ -80,81 +80,17 @@ export const concatStringsSep =
return mkStringWithContext(result, context);
};
const POSIX_CLASSES: Record<string, string> = {
alnum: "a-zA-Z0-9",
alpha: "a-zA-Z",
blank: " \\t",
digit: "0-9",
lower: "a-z",
upper: "A-Z",
space: "\\s",
xdigit: "0-9A-Fa-f",
punct: "\\-!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~",
};
function posixToJsRegex(pattern: string, fullMatch: boolean = false): RegExp {
let jsPattern = pattern;
jsPattern = jsPattern.replace(/\[(\^?)(?:\[:(\w+):\])+\]/g, (match) => {
const isNegated = match[1] === "^";
const classNames = [...match.matchAll(/\[:(\w+):\]/g)].map((m) => m[1]);
const combined = classNames
.map((className) => {
const replacement = POSIX_CLASSES[className];
if (!replacement) {
throw new Error(`Unknown POSIX character class: ${className}`);
}
return replacement;
})
.join("");
return isNegated ? `[^${combined}]` : `[${combined}]`;
});
jsPattern = jsPattern.replace(/\[:(\w+):\]/g, (_match, className) => {
const replacement = POSIX_CLASSES[className];
if (!replacement) {
throw new Error(`Unknown POSIX character class: ${className}`);
}
return replacement;
});
if (fullMatch) {
if (!jsPattern.startsWith("^")) {
jsPattern = "^" + jsPattern;
}
if (!jsPattern.endsWith("$")) {
jsPattern = jsPattern + "$";
}
}
return new RegExp(jsPattern, "u");
}
export const match =
(regex: NixValue) =>
(str: NixValue): NixValue => {
const regexStr = forceStringValue(regex);
const inputStr = forceStringValue(str);
try {
const re = posixToJsRegex(regexStr, true);
const result = inputStr.match(re);
if (!result) {
const result = Deno.core.ops.op_match(regexStr, inputStr);
if (result === null) {
return null;
}
const groups: NixValue[] = [];
for (let i = 1; i < result.length; i++) {
groups.push(result[i] !== undefined ? result[i] : null);
}
return groups;
} catch (e) {
throw new Error(`Invalid regular expression '${regexStr}': ${e}`);
}
return result.map((g) => (g !== null ? g : null));
};
export const split =
@@ -164,37 +100,16 @@ export const split =
const inputStr = forceString(str);
const inputStrValue = getStringValue(inputStr);
try {
const re = posixToJsRegex(regexStr);
const reGlobal = new RegExp(re.source, re.flags + "g");
const result = Deno.core.ops.op_split(regexStr, inputStrValue);
const result: NixValue[] = [];
let lastIndex = 0;
let match: RegExpExecArray | null;
while ((match = reGlobal.exec(inputStrValue)) !== null) {
result.push(inputStrValue.substring(lastIndex, match.index));
const groups: NixValue[] = [];
for (let i = 1; i < match.length; i++) {
groups.push(match[i] !== undefined ? match[i] : null);
}
result.push(groups);
lastIndex = match.index + match[0].length;
if (match[0].length === 0) {
reGlobal.lastIndex++;
}
}
if (lastIndex === 0) {
if (result.length === 1 && typeof result[0] === "string") {
return [inputStr];
}
result.push(inputStrValue.substring(lastIndex));
return result;
} catch (e) {
throw new Error(`Invalid regular expression '${regexStr}': ${e}`);
return result.map((item) => {
if (typeof item === "string") {
return item;
}
return item.map((g) => (g !== null ? g : null));
});
};

View File

@@ -4,7 +4,7 @@
* All functionality is exported via the global `Nix` object
*/
import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS, forceDeepSafe, IS_CYCLE } from "./thunk";
import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS, forceDeep, IS_CYCLE, forceShallow } from "./thunk";
import {
select,
selectWithDefault,
@@ -23,18 +23,21 @@ import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins";
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import { HAS_CONTEXT } from "./string-context";
import { IS_PATH, mkAttrs, mkFunction, mkAttrsWithPos, ATTR_POSITIONS } from "./types";
import { IS_PATH, mkAttrs, mkFunction, mkAttrsWithPos, ATTR_POSITIONS, NixValue } from "./types";
import { forceBool } from "./type-assert";
export type NixRuntime = typeof Nix;
const replBindings: Record<string, NixValue> = {};
/**
* The global Nix runtime object
*/
export const Nix = {
createThunk,
force,
forceDeepSafe,
forceShallow,
forceDeep,
forceBool,
isThunk,
IS_THUNK,
@@ -66,6 +69,12 @@ export const Nix = {
op,
builtins,
PRIMOP_METADATA,
replBindings,
setReplBinding: (name: string, value: NixValue) => {
replBindings[name] = value;
},
getReplBinding: (name: string) => replBindings[name],
};
globalThis.Nix = Nix;

View File

@@ -6,6 +6,7 @@
import type { NixValue, NixThunkInterface, NixStrictValue } from "./types";
import { HAS_CONTEXT } from "./string-context";
import { IS_PATH } from "./types";
import { isAttrs } from "./builtins/type-check";
/**
* Symbol used to mark objects as thunks
@@ -151,7 +152,7 @@ export const CYCLE_MARKER = { [IS_CYCLE]: true };
* Returns a fully forced value where thunks are replaced with their results.
* Cyclic references are replaced with CYCLE_MARKER, preserving the container type.
*/
export const forceDeepSafe = (value: NixValue, seen: WeakSet<object> = new WeakSet()): NixStrictValue => {
export const forceDeep = (value: NixValue, seen: WeakSet<object> = new WeakSet()): NixStrictValue => {
const forced = force(value);
if (forced === null || typeof forced !== "object") {
@@ -171,13 +172,43 @@ export const forceDeepSafe = (value: NixValue, seen: WeakSet<object> = new WeakS
}
if (Array.isArray(forced)) {
return forced.map((item) => forceDeepSafe(item, seen));
return forced.map((item) => forceDeep(item, seen));
}
if (typeof forced === "object") {
const result: Record<string, NixValue> = {};
for (const [key, val] of Object.entries(forced)) {
result[key] = forceDeepSafe(val, seen);
result[key] = forceDeep(val, seen);
}
return result;
}
return forced;
};
export const forceShallow = (value: NixValue): NixStrictValue => {
const forced = force(value);
if (forced === null || typeof forced !== "object") {
return forced;
}
if (Array.isArray(forced)) {
return forced.map((item) => {
const forcedItem = force(item);
if (typeof forcedItem === "object" && forcedItem === forced) {
return CYCLE_MARKER
} else {
return forcedItem
}
});
}
if (isAttrs(forced)) {
const result: Record<string, NixValue> = {};
for (const [key, val] of Object.entries(forced)) {
const forcedVal = force(val);
result[key] = forcedVal === forced ? CYCLE_MARKER : forcedVal;
}
return result;
}

View File

@@ -37,6 +37,7 @@ declare global {
namespace ops {
function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string;
function op_scoped_import(path: string, scopeKeys: string[]): string;
function op_read_file(path: string): string;
function op_read_file_type(path: string): string;
function op_read_dir(path: string): Record<string, string>;
@@ -93,6 +94,8 @@ declare global {
sha256: string | null,
include_paths: string[],
): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
}
}
}

View File

@@ -13,7 +13,7 @@ fn main() -> Result<()> {
args.next();
let expr = args.next().unwrap();
let src = Source::new_eval(expr)?;
match Context::new()?.eval_code(src) {
match Context::new()?.eval(src) {
Ok(value) => {
println!("{value}");
Ok(())

View File

@@ -1,7 +1,7 @@
use anyhow::Result;
use hashbrown::HashSet;
use nix_js::context::Context;
use nix_js::error::Source;
use regex::Regex;
use rustyline::DefaultEditor;
use rustyline::error::ReadlineError;
@@ -10,7 +10,8 @@ fn main() -> Result<()> {
let mut rl = DefaultEditor::new()?;
let mut context = Context::new()?;
let re = Regex::new(r"^\s*([a-zA-Z_][a-zA-Z0-9_'-]*)\s*=(.*)$").unwrap();
let mut scope = HashSet::new();
const RE: ere::Regex<3> = ere::compile_regex!("^[ \t]*([a-zA-Z_][a-zA-Z0-9_'-]*)[ \t]*(.*)$");
loop {
let readline = rl.readline("nix-js-repl> ");
match readline {
@@ -19,21 +20,27 @@ fn main() -> Result<()> {
continue;
}
let _ = rl.add_history_entry(line.as_str());
if let Some(_caps) = re.captures(&line) {
eprintln!("Error: binding not implemented yet");
continue;
/* let ident = caps.get(1).unwrap().as_str();
let expr = caps.get(2).unwrap().as_str().trim();
if let Some([Some(_), Some(ident), Some(rest)]) = RE.exec(&line) {
if let Some(expr) = rest.strip_prefix('=') {
let expr = expr.trim_start();
if expr.is_empty() {
eprintln!("Error: missing expression after '='");
continue;
}
if let Err(err) = context.add_binding(ident, expr) {
eprintln!("Error: {}", err);
} */
match context.add_binding(ident, expr, &mut scope) {
Ok(value) => println!("{} = {}", ident, value),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
} else {
let src = Source::new_repl(line)?;
match context.eval_code(src) {
match context.eval_repl(src, &scope) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
}
} else {
let src = Source::new_repl(line)?;
match context.eval_shallow(src) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}

View File

@@ -2,6 +2,7 @@ use std::fmt::{self, Write as _};
use std::path::Path;
use crate::ir::*;
use crate::value::Symbol;
pub(crate) struct CodeBuffer {
buf: String,
@@ -83,7 +84,7 @@ fn joined<Ctx: CodegenContext, I: Iterator, F: Fn(&Ctx, &mut CodeBuffer, I::Item
}
macro_rules! code {
($buf:expr, $ctx:expr; $($item:expr),* $(,)?) => {{
($buf:expr, $ctx:expr; $($item:expr)*) => {{
$(
($item).compile($ctx, $buf);
)*
@@ -99,8 +100,8 @@ macro_rules! code {
write!($buf, $fmt, $($arg)*).unwrap()
};
($buf:expr, $lit:literal) => {
$buf.push_str($lit)
($buf:expr, $fmt:literal) => {
write!($buf, $fmt).unwrap()
};
}
@@ -116,7 +117,7 @@ pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
code!(&mut buf, ctx;
"Nix.builtins.storeDir="
quoted(ctx.get_store_dir())
";const currentDir="
";const __currentDir="
quoted(&ctx.get_current_dir().display().to_string())
";return "
expr
@@ -125,6 +126,28 @@ pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
buf.into_string()
}
pub(crate) fn compile_scoped(expr: &Ir, ctx: &impl CodegenContext) -> String {
let mut buf = CodeBuffer::with_capacity(8192);
code!(&mut buf, ctx; "((__scope)=>{");
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
code!(&mut buf, ctx; "Nix.DEBUG_THUNKS.enabled=true;");
}
code!(&mut buf, ctx;
"Nix.builtins.storeDir="
quoted(ctx.get_store_dir())
";const __currentDir="
quoted(&ctx.get_current_dir().display().to_string())
";return "
expr
"})"
);
buf.into_string()
}
trait Compile<Ctx: CodegenContext> {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer);
}
@@ -184,7 +207,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for rnix::TextRange {
pub(crate) trait CodegenContext {
fn get_ir(&self, id: ExprId) -> &Ir;
fn get_sym(&self, id: SymId) -> &str;
fn get_sym(&self, id: SymId) -> Symbol<'_>;
fn get_current_dir(&self) -> &Path;
fn get_store_dir(&self) -> &str;
fn get_current_source_id(&self) -> usize;
@@ -197,6 +220,12 @@ impl<Ctx: CodegenContext> Compile<Ctx> for ExprId {
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
quoted(self).compile(ctx, buf);
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
match self {
@@ -216,7 +245,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
code!(buf, ctx; quoted(&s.val));
}
Ir::Path(p) => {
code!(buf, ctx; "Nix.resolvePath(currentDir," ctx.get_ir(p.expr) ")");
code!(buf, ctx; "Nix.resolvePath(__currentDir," ctx.get_ir(p.expr) ")");
}
Ir::If(x) => x.compile(ctx, buf),
Ir::BinOp(x) => x.compile(ctx, buf),
@@ -238,8 +267,8 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
}
&Ir::Builtin(Builtin { inner: name, .. }) => {
code!(buf, ctx;
"Nix.builtins[",
quoted(ctx.get_sym(name)),
"Nix.builtins["
ctx.get_sym(name)
"]"
);
}
@@ -255,26 +284,40 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
let assertion_span = assertion_ir.span();
code!(buf, ctx;
"Nix.assert(Nix.withContext(\"while evaluating the condition of the assert statement\",",
assertion_span,
",()=>(",
assertion_ir,
")),",
ctx.get_ir(expr),
",",
quoted(assertion_raw),
",",
assert_span,
"Nix.assert(Nix.withContext(\"while evaluating the condition of the assert statement\","
assertion_span
",()=>("
assertion_ir
")),"
ctx.get_ir(expr)
","
quoted(assertion_raw)
","
assert_span
")"
);
}
Ir::CurPos(cur_pos) => {
code!(buf, ctx;
"Nix.mkPos(",
cur_pos.span,
"Nix.mkPos("
cur_pos.span
")"
);
}
&Ir::ReplBinding(ReplBinding { inner: name, .. }) => {
code!(buf, ctx;
"Nix.getReplBinding("
ctx.get_sym(name)
")"
);
}
&Ir::ScopedImportBinding(ScopedImportBinding { inner: name, .. }) => {
code!(buf, ctx;
"__scope["
ctx.get_sym(name)
"]"
);
}
}
}
}
@@ -361,22 +404,10 @@ impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
);
}
PipeL => {
code!(buf, ctx;
"Nix.call(",
rhs,
",",
lhs,
")"
);
code!(buf, ctx; "Nix.call(" rhs "," lhs ")");
}
PipeR => {
code!(buf, ctx;
"Nix.call(",
lhs,
",",
rhs,
")"
);
code!(buf, ctx; "Nix.call(" lhs "," rhs ")");
}
}
}
@@ -385,20 +416,13 @@ impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
impl<Ctx: CodegenContext> Compile<Ctx> for UnOp {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
use UnOpKind::*;
let rhs = ctx.get_ir(self.rhs);
match self.kind {
Neg => {
code!(buf, ctx;
"Nix.op.sub(0n,",
ctx.get_ir(self.rhs),
")"
);
code!(buf, ctx; "Nix.op.sub(0n," rhs ")");
}
Not => {
code!(buf, ctx;
"Nix.op.bnot(",
ctx.get_ir(self.rhs),
")"
);
code!(buf, ctx; "Nix.op.bnot(" ctx.get_ir(self.rhs) ")");
}
}
}
@@ -418,32 +442,33 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Func {
{
code!(buf, "Nix.mkFunction(arg{}=>", id);
if has_thunks {
code!(buf, ctx; "{", &self.thunks, "return ", self.body, "}");
code!(buf, ctx; "{" self.thunks "return " self.body "}");
} else {
code!(buf, ctx; "(", self.body, ")");
code!(buf, ctx; "(" self.body ")");
}
code!(buf, ctx;
",["
joined(required.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
code!(buf, ctx; quoted(ctx.get_sym(sym)));
code!(buf, ctx; ctx.get_sym(sym));
})
"],["
joined(optional.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
code!(buf, ctx; quoted(ctx.get_sym(sym)));
code!(buf, ctx; ctx.get_sym(sym));
})
"],{"
joined(required.iter().chain(optional.iter()), ",", |ctx: &Ctx, buf, &(sym, span)| {
code!(buf, ctx; quoted(ctx.get_sym(sym)), ":", span);
code!(buf, ctx; ctx.get_sym(sym) ":" span);
})
"},"
ellipsis
")"
);
code!(buf, "{})", ellipsis);
} else {
code!(buf, "arg{}=>", id);
if has_thunks {
code!(buf, ctx; "{", &self.thunks, "return ", self.body, "}");
code!(buf, ctx; "{" self.thunks "return " self.body "}");
} else {
code!(buf, ctx; "(", self.body, ")");
code!(buf, ctx; "(" self.body ")");
}
}
}
@@ -452,12 +477,12 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Func {
impl<Ctx: CodegenContext> Compile<Ctx> for Call {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
code!(buf, ctx;
"Nix.call(",
ctx.get_ir(self.func),
",",
ctx.get_ir(self.arg),
",",
self.span,
"Nix.call("
ctx.get_ir(self.func)
","
ctx.get_ir(self.arg)
","
self.span
")"
);
}
@@ -491,11 +516,8 @@ impl<Ctx: CodegenContext> Compile<Ctx> for TopLevel {
if self.thunks.is_empty() {
ctx.get_ir(self.body).compile(ctx, buf);
} else {
code!(buf, "(()=>{");
code!(buf, ctx; &self.thunks);
code!(buf, "return ");
ctx.get_ir(self.body).compile(ctx, buf);
code!(buf, "})()");
let body = ctx.get_ir(self.body);
code!(buf, ctx; "(()=>{" self.thunks "return " body "})()");
}
}
}
@@ -509,7 +531,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
",["
joined(self.attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; quoted(ctx.get_sym(*sym))),
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
}
})
@@ -526,7 +548,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
",["
joined(self.attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; quoted(ctx.get_sym(*sym))),
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
}
})
@@ -549,12 +571,12 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
code!(
buf, ctx;
quoted(key) ":Nix.withContext(\"while evaluating the attribute '" escaped(key) "'\"," val.span() ",()=>(" val "))"
key ":Nix.withContext(\"while evaluating the attribute '" escaped(&key) "'\"," val.span() ",()=>(" val "))"
);
})
"},{"
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
code!(buf, ctx; quoted(ctx.get_sym(sym)) ":" span);
code!(buf, ctx; ctx.get_sym(sym) ":" span);
})
"},{dynKeys:["
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (key, _, _)| {
@@ -583,12 +605,12 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
code!(
buf, ctx;
quoted(key) ":Nix.withContext(\"while evaluating the attribute '" escaped(key) "'\"," val.span() ",()=>(" val "))"
key ":Nix.withContext(\"while evaluating the attribute '" escaped(&key) "'\"," val.span() ",()=>(" val "))"
);
})
"},{"
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
code!(buf, ctx; quoted(ctx.get_sym(sym)) ":" span);
code!(buf, ctx; ctx.get_sym(sym) ":" span);
})
"})"
);
@@ -638,7 +660,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for HasAttr {
",["
joined(self.rhs.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; quoted(ctx.get_sym(*sym))),
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
}
})

View File

@@ -1,26 +1,41 @@
use std::path::Path;
use std::ptr::NonNull;
use hashbrown::HashMap;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use rnix::TextRange;
use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, compile};
use crate::downgrade::*;
use crate::error::{Error, Result, Source};
use crate::ir::{
Arg, ArgId, Bool, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, Null, SymId, Thunk,
ToIr as _, synthetic_span,
Arg, ArgId, Bool, Builtin, ExprId, Ir, Null, ReplBinding, ScopedImportBinding, SymId, Thunk,
ToIr as _,
};
use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{Store, StoreBackend, StoreConfig};
use crate::value::Value;
use crate::value::{Symbol, Value};
pub struct Context {
ctx: Ctx,
runtime: Runtime<Ctx>,
}
macro_rules! eval {
($name:ident, $wrapper:literal) => {
pub fn $name(&mut self, source: Source) -> Result<Value> {
tracing::info!("Starting evaluation");
tracing::debug!("Compiling code");
let code = self.compile(source)?;
tracing::debug!("Executing JavaScript");
self.runtime.eval(format!($wrapper, code), &mut self.ctx)
}
};
}
impl Context {
pub fn new() -> Result<Self> {
let ctx = Ctx::new()?;
@@ -29,29 +44,48 @@ impl Context {
Ok(Self { ctx, runtime })
}
pub fn eval_code(&mut self, source: Source) -> Result<Value> {
eval!(eval, "Nix.force({})");
eval!(eval_shallow, "Nix.forceShallow({})");
eval!(eval_deep, "Nix.forceDeep({})");
pub fn eval_repl<'a>(&'a mut self, source: Source, scope: &'a HashSet<SymId>) -> Result<Value> {
tracing::info!("Starting evaluation");
tracing::debug!("Compiling code");
let code = self.compile_code(source)?;
let code = self.ctx.compile(source, Some(Scope::Repl(scope)))?;
tracing::debug!("Executing JavaScript");
self.runtime
.eval(format!("Nix.forceDeepSafe({code})"), &mut self.ctx)
.eval(format!("Nix.forceShallow({})", code), &mut self.ctx)
}
pub fn compile_code(&mut self, source: Source) -> Result<String> {
self.ctx.compile_code(source)
}
#[allow(dead_code)]
pub(crate) fn eval_js(&mut self, code: String) -> Result<Value> {
self.runtime.eval(code, &mut self.ctx)
pub fn compile(&mut self, source: Source) -> Result<String> {
self.ctx.compile(source, None)
}
pub fn get_store_dir(&self) -> &str {
self.ctx.get_store_dir()
}
pub fn add_binding<'a>(
&'a mut self,
name: &str,
expr: &str,
scope: &'a mut HashSet<SymId>,
) -> Result<Value> {
let source = Source::new_repl(expr.to_string())?;
let code = self.ctx.compile(source, Some(Scope::Repl(scope)))?;
let sym = self.ctx.symbols.get_or_intern(name);
let eval_and_store = format!(
"(()=>{{const __v=Nix.forceShallow({});Nix.setReplBinding(\"{}\",__v);return __v}})()",
code, name
);
scope.insert(sym);
self.runtime.eval(eval_and_store, &mut self.ctx)
}
}
pub(crate) struct Ctx {
@@ -72,7 +106,7 @@ impl Ctx {
irs.push(
Builtins {
span: synthetic_span(),
span: rnix::TextRange::default(),
}
.to_ir(),
);
@@ -107,7 +141,7 @@ impl Ctx {
"true",
Bool {
inner: true,
span: synthetic_span(),
span: rnix::TextRange::default(),
}
.to_ir(),
),
@@ -115,14 +149,14 @@ impl Ctx {
"false",
Bool {
inner: false,
span: synthetic_span(),
span: rnix::TextRange::default(),
}
.to_ir(),
),
(
"null",
Null {
span: synthetic_span(),
span: rnix::TextRange::default(),
}
.to_ir(),
),
@@ -134,7 +168,7 @@ impl Ctx {
irs.push(
Builtin {
inner: name_sym,
span: synthetic_span(),
span: rnix::TextRange::default(),
}
.to_ir(),
);
@@ -159,9 +193,9 @@ impl Ctx {
})
}
pub(crate) fn downgrade_ctx<'a>(&'a mut self) -> DowngradeCtx<'a> {
fn downgrade_ctx<'a>(&'a mut self, extra_scope: Option<Scope<'a>>) -> DowngradeCtx<'a> {
let global_ref = unsafe { self.global.as_ref() };
DowngradeCtx::new(self, global_ref)
DowngradeCtx::new(self, global_ref, extra_scope)
}
pub(crate) fn get_current_dir(&self) -> &Path {
@@ -183,7 +217,7 @@ impl Ctx {
self.sources.get(id).expect("source not found").clone()
}
fn compile_code(&mut self, source: Source) -> Result<String> {
fn compile<'a>(&'a mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<String> {
tracing::debug!("Parsing Nix expression");
self.sources.push(source.clone());
@@ -197,7 +231,7 @@ impl Ctx {
#[allow(clippy::unwrap_used)]
let root = self
.downgrade_ctx()
.downgrade_ctx(extra_scope)
.downgrade(root.tree().expr().unwrap())?;
tracing::debug!("Generating JavaScript code");
@@ -205,14 +239,49 @@ impl Ctx {
tracing::debug!("Generated code: {}", &code);
Ok(code)
}
pub(crate) fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
use crate::codegen::compile_scoped;
tracing::debug!("Parsing Nix expression for scoped import");
self.sources.push(source.clone());
let root = rnix::Root::parse(&source.src);
if !root.errors().is_empty() {
let error_msg = root.errors().iter().join("; ");
let err = Error::parse_error(error_msg).with_source(source);
return Err(err);
}
let scope = Scope::ScopedImport(
scope
.into_iter()
.map(|k| self.symbols.get_or_intern(k))
.collect(),
);
#[allow(clippy::unwrap_used)]
let root = self
.downgrade_ctx(Some(scope))
.downgrade(root.tree().expr().unwrap())?;
tracing::debug!("Generating JavaScript code for scoped import");
let code = compile_scoped(self.get_ir(root), self);
tracing::debug!("Generated scoped code: {}", &code);
Ok(code)
}
}
impl CodegenContext for Ctx {
fn get_ir(&self, id: ExprId) -> &Ir {
self.irs.get(id.0).expect("ExprId out of bounds")
}
fn get_sym(&self, id: SymId) -> &str {
self.symbols.resolve(id).expect("SymId out of bounds")
fn get_sym(&self, id: SymId) -> Symbol<'_> {
self.symbols
.resolve(id)
.expect("SymId out of bounds")
.into()
}
fn get_current_dir(&self) -> &std::path::Path {
self.get_current_dir()
@@ -238,8 +307,11 @@ impl RuntimeContext for Ctx {
fn add_source(&mut self, source: Source) {
self.sources.push(source);
}
fn compile_code(&mut self, source: Source) -> Result<String> {
self.compile_code(source)
fn compile(&mut self, source: Source) -> Result<String> {
self.compile(source, None)
}
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
self.compile_scoped(source, scope)
}
fn get_source(&self, id: usize) -> Source {
self.get_source(id)
@@ -251,6 +323,8 @@ impl RuntimeContext for Ctx {
enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>),
Repl(&'ctx HashSet<SymId>),
ScopedImport(HashSet<SymId>),
Let(HashMap<SymId, ExprId>),
Param(SymId, ExprId),
With(ExprId),
@@ -281,9 +355,15 @@ pub struct DowngradeCtx<'ctx> {
}
impl<'ctx> DowngradeCtx<'ctx> {
fn new(ctx: &'ctx mut Ctx, global: &'ctx HashMap<SymId, ExprId>) -> Self {
fn new(
ctx: &'ctx mut Ctx,
global: &'ctx HashMap<SymId, ExprId>,
extra_scope: Option<Scope<'ctx>>,
) -> Self {
Self {
scopes: vec![Scope::Global(global)],
scopes: std::iter::once(Scope::Global(global))
.chain(extra_scope)
.collect(),
irs: vec![],
arg_id: 0,
thunk_scopes: vec![Vec::new()],
@@ -345,7 +425,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
self.ctx.symbols.get_or_intern(sym)
}
fn get_sym(&self, id: SymId) -> &str {
fn get_sym(&self, id: SymId) -> Symbol<'_> {
self.ctx.get_sym(id)
}
@@ -357,6 +437,16 @@ impl DowngradeContext for DowngradeCtx<'_> {
return Ok(expr);
}
}
&Scope::Repl(repl_bindings) => {
if repl_bindings.contains(&sym) {
return Ok(self.new_expr(ReplBinding { inner: sym, span }.to_ir()));
}
}
Scope::ScopedImport(scoped_bindings) => {
if scoped_bindings.contains(&sym) {
return Ok(self.new_expr(ScopedImportBinding { inner: sym, span }.to_ir()));
}
}
Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) {
return Ok(self.new_expr(Thunk { inner: expr, span }.to_ir()));
@@ -387,7 +477,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
use crate::ir::{Attr, Select};
let select = Select {
expr: namespace,
attrpath: vec![Attr::Str(sym, synthetic_span())],
attrpath: vec![Attr::Str(sym, rnix::TextRange::default())],
default: result, // Link to outer With or None
span,
};
@@ -415,7 +505,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();
let range = (start..start + slots).map(ExprId);
let span = synthetic_span();
let span = rnix::TextRange::default();
// Fill reserved slots with placeholder value
self.irs.extend(
range

View File

@@ -5,14 +5,488 @@ use hashbrown::hash_map::Entry;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use rnix::TextRange;
use rnix::ast::{self, HasEntry};
use rnix::ast::{self, AstToken, Expr, HasEntry};
use rowan::ast::AstNode;
use crate::error::{Error, Result};
use crate::ir::{Attr, AttrSet, ConcatStrings, ExprId, Select, Str, SymId};
use crate::value::format_symbol;
use crate::error::{Error, Result, Source};
use crate::ir::*;
use crate::value::Symbol;
use super::*;
pub trait DowngradeContext {
fn downgrade(self, expr: rnix::ast::Expr) -> Result<ExprId>;
fn new_expr(&mut self, expr: Ir) -> ExprId;
fn new_arg(&mut self, span: TextRange) -> ExprId;
fn maybe_thunk(&mut self, id: ExprId) -> ExprId;
fn new_sym(&mut self, sym: String) -> SymId;
fn get_sym(&self, id: SymId) -> Symbol<'_>;
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId>;
fn get_ir(&self, id: ExprId) -> &Ir;
fn replace_ir(&mut self, id: ExprId, expr: Ir);
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>;
fn get_current_source(&self) -> Source;
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R;
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where
F: FnOnce(&mut Self) -> R;
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R;
fn push_thunk_scope(&mut self);
fn pop_thunk_scope(&mut self) -> Vec<(ExprId, ExprId)>;
fn register_thunk(&mut self, slot: ExprId, inner: ExprId);
}
pub trait Downgrade<Ctx: DowngradeContext> {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
use Expr::*;
match self {
Apply(apply) => apply.downgrade(ctx),
Assert(assert) => assert.downgrade(ctx),
Error(error) => {
let span = error.syntax().text_range();
Err(self::Error::downgrade_error(
error.to_string(),
ctx.get_current_source(),
span,
))
}
IfElse(ifelse) => ifelse.downgrade(ctx),
Select(select) => select.downgrade(ctx),
Str(str) => str.downgrade(ctx),
Path(path) => path.downgrade(ctx),
Literal(lit) => lit.downgrade(ctx),
Lambda(lambda) => lambda.downgrade(ctx),
LegacyLet(let_) => let_.downgrade(ctx),
LetIn(letin) => letin.downgrade(ctx),
List(list) => list.downgrade(ctx),
BinOp(op) => op.downgrade(ctx),
AttrSet(attrs) => attrs.downgrade(ctx),
UnaryOp(op) => op.downgrade(ctx),
Ident(ident) => ident.downgrade(ctx),
With(with) => with.downgrade(ctx),
HasAttr(has) => has.downgrade(ctx),
Paren(paren) => paren.expr().unwrap().downgrade(ctx),
Root(root) => root.expr().unwrap().downgrade(ctx),
}
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let assertion = self.condition().unwrap();
let assertion_raw = assertion.to_string();
let assertion = assertion.downgrade(ctx)?;
let expr = self.body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Assert {
assertion,
expr,
assertion_raw,
span,
}
.to_ir(),
))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let cond = self.condition().unwrap().downgrade(ctx)?;
let consq = self.body().unwrap().downgrade(ctx)?;
let alter = self.else_body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(
If {
cond,
consq,
alter,
span,
}
.to_ir(),
))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let parts = self
.parts()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
val: lit.to_string(),
span: lit.syntax().text_range(),
}
.to_ir(),
)),
ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
}
})
.collect::<Result<Vec<_>>>()?;
let expr = if parts.len() == 1 {
let part = parts.into_iter().next().unwrap();
if let &Ir::Str(Str { ref val, span }) = ctx.get_ir(part)
&& let Some(path) = val.strip_prefix("<").map(|path| &path[..path.len() - 1])
{
ctx.replace_ir(
part,
Str {
val: path.to_string(),
span,
}
.to_ir(),
);
let sym = ctx.new_sym("findFile".into());
let find_file = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let sym = ctx.new_sym("nixPath".into());
let nix_path = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let call = ctx.new_expr(
Call {
func: find_file,
arg: nix_path,
span,
}
.to_ir(),
);
return Ok(ctx.new_expr(
Call {
func: call,
arg: part,
span,
}
.to_ir(),
));
} else {
part
}
} else {
ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: false,
}
.to_ir(),
)
};
Ok(ctx.new_expr(Path { expr, span }.to_ir()))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let normalized = self.normalized_parts();
let is_single_literal = normalized.len() == 1
&& matches!(normalized.first(), Some(ast::InterpolPart::Literal(_)));
let parts = normalized
.into_iter()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())),
ast::InterpolPart::Interpolation(interpol) => {
let inner = interpol.expr().unwrap().downgrade(ctx)?;
Ok(ctx.maybe_thunk(inner))
}
})
.collect::<Result<Vec<_>>>()?;
Ok(if is_single_literal {
parts.into_iter().next().unwrap()
} else {
ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: true,
}
.to_ir(),
)
})
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
Ok(ctx.new_expr(match self.kind() {
ast::LiteralKind::Integer(int) => Int {
inner: int.value().unwrap(),
span,
}
.to_ir(),
ast::LiteralKind::Float(float) => Float {
inner: float.value().unwrap(),
span,
}
.to_ir(),
ast::LiteralKind::Uri(uri) => Str {
val: uri.to_string(),
span,
}
.to_ir(),
}))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let text = self.ident_token().unwrap().to_string();
let span = self.syntax().text_range();
if text == "__curPos" {
return Ok(ctx.new_expr(CurPos { span }.to_ir()));
}
let sym = ctx.new_sym(text);
ctx.lookup(sym, span)
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rec = self.rec_token().is_some();
let span = self.syntax().text_range();
if !rec {
let attrs = downgrade_attrs(self, ctx)?;
return Ok(ctx.new_expr(attrs.to_ir()));
}
// rec { a = 1; b = a; } => let a = 1; b = a; in { inherit a b; }
let entries: Vec<_> = self.entries().collect();
downgrade_rec_bindings(entries, ctx, span)
}
}
/// Downgrades a list.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::List {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let items = self
.items()
.map(|item| {
let id = item.downgrade(ctx)?;
Ok(ctx.maybe_thunk(id))
})
.collect::<Result<_>>()?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(List { items, span }.to_ir()))
}
}
/// Downgrades a binary operation.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::BinOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.lhs().unwrap().downgrade(ctx)?;
let rhs = self.rhs().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
let span = self.syntax().text_range();
Ok(ctx.new_expr(
BinOp {
lhs,
rhs,
kind,
span,
}
.to_ir(),
))
}
}
/// Downgrades a "has attribute" (`?`) expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::HasAttr {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.expr().unwrap().downgrade(ctx)?;
let rhs = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(HasAttr { lhs, rhs, span }.to_ir()))
}
}
/// Downgrades a unary operation.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::UnaryOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rhs = self.expr().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
let span = self.syntax().text_range();
Ok(ctx.new_expr(UnOp { rhs, kind, span }.to_ir()))
}
}
/// Downgrades an attribute selection (`.`).
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let expr = self.expr().unwrap().downgrade(ctx)?;
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let default = if let Some(default) = self.default_expr() {
let default_expr = default.downgrade(ctx)?;
Some(ctx.maybe_thunk(default_expr))
} else {
None
};
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Select {
expr,
attrpath,
default,
span,
}
.to_ir(),
))
}
}
/// Downgrades a `legacy let`, which is essentially a recursive attribute set.
/// The body of the `let` is accessed via `let.body`.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let entries: Vec<_> = self.entries().collect();
let attrset_expr = downgrade_let_bindings(entries, ctx, span, |ctx, binding_keys| {
// Create plain attrset as body with inherit
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span,
};
for sym in binding_keys {
let expr = ctx.lookup(*sym, rnix::TextRange::default())?;
attrs.stcs.insert(*sym, (expr, rnix::TextRange::default()));
}
Ok(ctx.new_expr(attrs.to_ir()))
})?;
let body_sym = ctx.new_sym("body".to_string());
let select = Select {
expr: attrset_expr,
attrpath: vec![Attr::Str(body_sym, rnix::TextRange::default())],
default: None,
span,
};
Ok(ctx.new_expr(select.to_ir()))
}
}
/// Downgrades a `let ... in ...` expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let entries: Vec<_> = self.entries().collect();
let body_expr = self.body().unwrap();
let span = self.syntax().text_range();
downgrade_let_bindings(entries, ctx, span, |ctx, _binding_keys| {
body_expr.downgrade(ctx)
})
}
}
/// Downgrades a `with` expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::With {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
// with namespace; expr
let namespace = self.namespace().unwrap().downgrade(ctx)?;
// Downgrade body in With scope
let expr = ctx.with_with_scope(namespace, |ctx| self.body().unwrap().downgrade(ctx))?;
Ok(expr)
}
}
/// Downgrades a lambda (function) expression.
/// This involves desugaring pattern-matching arguments into `let` bindings.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let raw_param = self.param().unwrap();
let arg = ctx.new_arg(raw_param.syntax().text_range());
ctx.push_thunk_scope();
let param;
let body;
match raw_param {
ast::Param::IdentParam(id) => {
// Simple case: `x: body`
let param_sym = ctx.new_sym(id.to_string());
param = None;
// Downgrade body in Param scope
body = ctx
.with_param_scope(param_sym, arg, |ctx| self.body().unwrap().downgrade(ctx))?;
}
ast::Param::Pattern(pattern) => {
let alias = pattern
.pat_bind()
.map(|alias| ctx.new_sym(alias.ident().unwrap().to_string()));
let ellipsis = pattern.ellipsis_token().is_some();
let pat_entries = pattern.pat_entries();
let PatternBindings {
body: inner_body,
required,
optional,
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
self.body().unwrap().downgrade(ctx)
})?;
param = Some(Param {
required,
optional,
ellipsis,
});
body = inner_body;
}
}
let thunks = ctx.pop_thunk_scope();
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Func {
body,
param,
arg,
thunks,
span,
}
.to_ir(),
))
}
}
/// Downgrades a function application.
/// In Nix, function application is left-associative, so `f a b` should be parsed as `((f a) b)`.
/// Each Apply node represents a single function call with one argument.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Apply {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let func = self.lambda().unwrap().downgrade(ctx)?;
let arg = self.argument().unwrap().downgrade(ctx)?;
let arg = ctx.maybe_thunk(arg);
let span = self.syntax().text_range();
Ok(ctx.new_expr(Call { func, arg, span }.to_ir()))
}
}
enum PendingValue {
Expr(ast::Expr),
@@ -321,10 +795,7 @@ impl PendingAttrSet {
if self.stcs.contains_key(&sym) {
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(sym))
),
format!("attribute '{}' already defined", ctx.get_sym(sym)),
ctx.get_current_source(),
span,
));
@@ -401,7 +872,7 @@ fn make_attrpath_value_entry(path: Vec<ast::Attr>, value: ast::Expr) -> ast::Ent
}
/// Downgrades the entries of a non-recursive attribute set.
pub fn downgrade_attrs(
fn downgrade_attrs(
attrs: impl ast::HasEntry + AstNode,
ctx: &mut impl DowngradeContext,
) -> Result<AttrSet> {
@@ -412,7 +883,7 @@ pub fn downgrade_attrs(
}
/// Downgrades a single attribute key (part of an attribute path).
pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<Attr> {
fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<Attr> {
use ast::Attr::*;
use ast::InterpolPart::*;
match attr {
@@ -462,7 +933,7 @@ pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Resul
}
/// Downgrades an attribute path (e.g., `a.b."${c}".d`) into a `Vec<Attr>`.
pub fn downgrade_attrpath(
fn downgrade_attrpath(
attrpath: ast::Attrpath,
ctx: &mut impl DowngradeContext,
) -> Result<Vec<Attr>> {
@@ -472,14 +943,14 @@ pub fn downgrade_attrpath(
.collect::<Result<Vec<_>>>()
}
pub struct PatternBindings {
pub body: ExprId,
pub required: Vec<(SymId, TextRange)>,
pub optional: Vec<(SymId, TextRange)>,
struct PatternBindings {
body: ExprId,
required: Vec<(SymId, TextRange)>,
optional: Vec<(SymId, TextRange)>,
}
/// Helper function for Lambda pattern parameters.
pub fn downgrade_pattern_bindings<Ctx>(
fn downgrade_pattern_bindings<Ctx>(
pat_entries: impl Iterator<Item = ast::PatEntry>,
alias: Option<SymId>,
arg: ExprId,
@@ -507,7 +978,7 @@ where
if !seen_params.insert(sym) {
return Err(Error::downgrade_error(
format!("duplicate parameter '{}'", format_symbol(ctx.get_sym(sym))),
format!("duplicate parameter '{}'", ctx.get_sym(sym)),
ctx.get_current_source(),
span,
));
@@ -553,7 +1024,7 @@ where
.collect();
for &slot in &slots {
let span = synthetic_span();
let span = rnix::TextRange::default();
ctx.replace_ir(slot, Thunk { inner: slot, span }.to_ir());
}
@@ -603,7 +1074,7 @@ where
/// Downgrades a `let...in` expression. This is a special case of rec attrs
/// that disallows dynamic attributes and has a body expression.
pub fn downgrade_let_bindings<Ctx, F>(
fn downgrade_let_bindings<Ctx, F>(
entries: Vec<ast::Entry>,
ctx: &mut Ctx,
span: TextRange,
@@ -619,7 +1090,7 @@ where
}
/// Downgrades a `rec` attribute set.
pub fn downgrade_rec_bindings<Ctx>(
fn downgrade_rec_bindings<Ctx>(
entries: Vec<ast::Entry>,
ctx: &mut Ctx,
span: TextRange,
@@ -635,8 +1106,8 @@ where
};
for sym in binding_keys {
let expr = ctx.lookup(*sym, synthetic_span())?;
attrs.stcs.insert(*sym, (expr, synthetic_span()));
let expr = ctx.lookup(*sym, rnix::TextRange::default())?;
attrs.stcs.insert(*sym, (expr, rnix::TextRange::default()));
}
Ok(ctx.new_expr(attrs.to_ir()))
@@ -671,7 +1142,7 @@ where
.collect();
for &slot in &slots {
let slot_span = synthetic_span();
let slot_span = rnix::TextRange::default();
ctx.replace_ir(
slot,
Thunk {
@@ -691,7 +1162,7 @@ where
} else {
return Err(Error::internal(format!(
"binding '{}' not found",
format_symbol(ctx.get_sym(sym))
ctx.get_sym(sym)
)));
}
}
@@ -733,10 +1204,7 @@ fn collect_binding_syms<Ctx: DowngradeContext, const ALLOW_DYN: bool>(
for (sym, (_, span)) in &pending.stcs {
if !binding_syms.insert(*sym) {
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(*sym))
),
format!("attribute '{}' already defined", ctx.get_sym(*sym)),
ctx.get_current_source(),
*span,
));

View File

@@ -18,7 +18,7 @@ pub use download::Downloader;
pub use metadata_cache::MetadataCache;
use crate::nar;
use crate::runtime::NixError;
use crate::runtime::NixRuntimeError;
#[derive(Serialize)]
pub struct FetchUrlResult {
@@ -62,14 +62,15 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
#[string] expected_hash: Option<String>,
#[string] name: Option<String>,
executable: bool,
) -> Result<FetchUrlResult, NixError> {
) -> Result<FetchUrlResult, NixRuntimeError> {
let _span = tracing::info_span!("op_fetch_url", url = %url).entered();
info!("fetchurl started");
let file_name =
name.unwrap_or_else(|| url.rsplit('/').next().unwrap_or("download").to_string());
let metadata_cache = MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let metadata_cache =
MetadataCache::new(3600).map_err(|e| NixRuntimeError::from(e.to_string()))?;
let input = serde_json::json!({
"type": "file",
@@ -80,7 +81,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
if let Some(cached_entry) = metadata_cache
.lookup(&input)
.map_err(|e| NixError::from(e.to_string()))?
.map_err(|e| NixRuntimeError::from(e.to_string()))?
{
let cached_hash = cached_entry
.info
@@ -112,7 +113,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
let downloader = Downloader::new();
let data = downloader
.download(&url)
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
info!(bytes = data.len(), "Download complete");
@@ -121,7 +122,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected);
if hash != normalized_expected {
return Err(NixError::from(format!(
return Err(NixRuntimeError::from(format!(
"hash mismatch for '{}': expected {}, got {}",
url, normalized_expected, hash
)));
@@ -132,7 +133,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
let store = ctx.get_store();
let store_path = store
.add_to_store(&file_name, &data, false, vec![])
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
info!(store_path = %store_path, "Added to store");
@@ -153,7 +154,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
metadata_cache
.add(&input, &info, &store_path, true)
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
Ok(FetchUrlResult { store_path, hash })
}
@@ -166,12 +167,13 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
#[string] expected_hash: Option<String>,
#[string] expected_nar_hash: Option<String>,
#[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> {
) -> Result<FetchTarballResult, NixRuntimeError> {
let _span = tracing::info_span!("op_fetch_tarball", url = %url).entered();
info!("fetchTarball started");
let dir_name = name.unwrap_or_else(|| "source".to_string());
let metadata_cache = MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let metadata_cache =
MetadataCache::new(3600).map_err(|e| NixRuntimeError::from(e.to_string()))?;
let input = serde_json::json!({
"type": "tarball",
@@ -181,7 +183,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
if let Some(cached_entry) = metadata_cache
.lookup(&input)
.map_err(|e| NixError::from(e.to_string()))?
.map_err(|e| NixRuntimeError::from(e.to_string()))?
{
let cached_nar_hash = cached_entry
.info
@@ -218,7 +220,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
let downloader = Downloader::new();
let data = downloader
.download(&url)
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
info!(bytes = data.len(), "Download complete");
@@ -227,7 +229,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected);
if tarball_hash != normalized_expected {
return Err(NixError::from(format!(
return Err(NixRuntimeError::from(format!(
"Tarball hash mismatch for '{}': expected {}, got {}",
url, normalized_expected, tarball_hash
)));
@@ -235,14 +237,14 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
}
info!("Extracting tarball");
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let cache = FetcherCache::new().map_err(|e| NixRuntimeError::from(e.to_string()))?;
let (extracted_path, _temp_dir) = cache
.extract_tarball_to_temp(&data)
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
info!("Computing NAR hash");
let nar_hash =
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
nar::compute_nar_hash(&extracted_path).map_err(|e| NixRuntimeError::from(e.to_string()))?;
debug!(
tarball_hash = %tarball_hash,
@@ -253,7 +255,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
if let Some(ref expected) = expected_nar_hash {
let normalized_expected = normalize_hash(expected);
if nar_hash != normalized_expected {
return Err(NixError::from(format!(
return Err(NixRuntimeError::from(format!(
"NAR hash mismatch for '{}': expected {}, got {}",
url, normalized_expected, nar_hash
)));
@@ -265,7 +267,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&dir_name, &extracted_path, vec![])
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
info!(store_path = %store_path, "Added to store");
@@ -278,7 +280,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
let immutable = expected_nar_hash.is_some();
metadata_cache
.add(&input, &info, &store_path, immutable)
.map_err(|e| NixError::from(e.to_string()))?;
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
Ok(FetchTarballResult {
store_path,
@@ -298,10 +300,10 @@ pub fn op_fetch_git<Ctx: RuntimeContext>(
submodules: bool,
all_refs: bool,
#[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> {
) -> Result<FetchGitResult, NixRuntimeError> {
let _span = tracing::info_span!("op_fetch_git", url = %url).entered();
info!("fetchGit started");
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let cache = FetcherCache::new().map_err(|e| NixRuntimeError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string());
let ctx: &Ctx = state.get_ctx();
@@ -318,7 +320,7 @@ pub fn op_fetch_git<Ctx: RuntimeContext>(
all_refs,
&dir_name,
)
.map_err(|e| NixError::from(e.to_string()))
.map_err(|e| NixRuntimeError::from(e.to_string()))
}
#[op2]
@@ -327,11 +329,12 @@ pub fn op_fetch_hg(
#[string] url: String,
#[string] rev: Option<String>,
#[string] name: Option<String>,
) -> Result<FetchHgResult, NixError> {
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
) -> Result<FetchHgResult, NixRuntimeError> {
let cache = FetcherCache::new().map_err(|e| NixRuntimeError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string());
hg::fetch_hg(&cache, &url, rev.as_deref(), &dir_name).map_err(|e| NixError::from(e.to_string()))
hg::fetch_hg(&cache, &url, rev.as_deref(), &dir_name)
.map_err(|e| NixRuntimeError::from(e.to_string()))
}
fn normalize_hash(hash: &str) -> String {

View File

@@ -3,49 +3,8 @@ use hashbrown::HashMap;
use rnix::{TextRange, ast};
use string_interner::symbol::SymbolU32;
use crate::error::{Result, Source};
use nix_js_macros::ir;
mod downgrade;
mod span_utils;
mod utils;
use utils::*;
pub use downgrade::Downgrade;
pub(crate) use span_utils::*;
pub trait DowngradeContext {
fn downgrade(self, expr: rnix::ast::Expr) -> Result<ExprId>;
fn new_expr(&mut self, expr: Ir) -> ExprId;
fn new_arg(&mut self, span: TextRange) -> ExprId;
fn maybe_thunk(&mut self, id: ExprId) -> ExprId;
fn new_sym(&mut self, sym: String) -> SymId;
fn get_sym(&self, id: SymId) -> &str;
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId>;
fn get_ir(&self, id: ExprId) -> &Ir;
fn replace_ir(&mut self, id: ExprId, expr: Ir);
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>;
fn get_current_source(&self) -> Source;
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R;
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where
F: FnOnce(&mut Self) -> R;
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R;
fn push_thunk_scope(&mut self);
fn pop_thunk_scope(&mut self) -> Vec<(ExprId, ExprId)>;
fn register_thunk(&mut self, slot: ExprId, inner: ExprId);
}
ir! {
Ir,
@@ -73,6 +32,8 @@ ir! {
Builtins,
Builtin(SymId),
CurPos,
ReplBinding(SymId),
ScopedImportBinding(SymId),
}
#[repr(transparent)]

View File

@@ -1,453 +0,0 @@
// Assume no parse error
#![allow(clippy::unwrap_used)]
use rnix::ast::{self, AstToken, Expr, HasEntry};
use rowan::ast::AstNode;
use super::*;
use crate::error::{Error, Result};
pub trait Downgrade<Ctx: DowngradeContext> {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
use Expr::*;
match self {
Apply(apply) => apply.downgrade(ctx),
Assert(assert) => assert.downgrade(ctx),
Error(error) => {
let span = error.syntax().text_range();
Err(self::Error::downgrade_error(
error.to_string(),
ctx.get_current_source(),
span,
))
}
IfElse(ifelse) => ifelse.downgrade(ctx),
Select(select) => select.downgrade(ctx),
Str(str) => str.downgrade(ctx),
Path(path) => path.downgrade(ctx),
Literal(lit) => lit.downgrade(ctx),
Lambda(lambda) => lambda.downgrade(ctx),
LegacyLet(let_) => let_.downgrade(ctx),
LetIn(letin) => letin.downgrade(ctx),
List(list) => list.downgrade(ctx),
BinOp(op) => op.downgrade(ctx),
AttrSet(attrs) => attrs.downgrade(ctx),
UnaryOp(op) => op.downgrade(ctx),
Ident(ident) => ident.downgrade(ctx),
With(with) => with.downgrade(ctx),
HasAttr(has) => has.downgrade(ctx),
Paren(paren) => paren.expr().unwrap().downgrade(ctx),
Root(root) => root.expr().unwrap().downgrade(ctx),
}
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let assertion = self.condition().unwrap();
let assertion_raw = assertion.to_string();
let assertion = assertion.downgrade(ctx)?;
let expr = self.body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Assert {
assertion,
expr,
assertion_raw,
span,
}
.to_ir(),
))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let cond = self.condition().unwrap().downgrade(ctx)?;
let consq = self.body().unwrap().downgrade(ctx)?;
let alter = self.else_body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(
If {
cond,
consq,
alter,
span,
}
.to_ir(),
))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let parts = self
.parts()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
val: lit.to_string(),
span: lit.syntax().text_range(),
}
.to_ir(),
)),
ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
}
})
.collect::<Result<Vec<_>>>()?;
let expr = if parts.len() == 1 {
let part = parts.into_iter().next().unwrap();
if let &Ir::Str(Str { ref val, span }) = ctx.get_ir(part)
&& let Some(path) = val.strip_prefix("<").map(|path| &path[..path.len() - 1])
{
ctx.replace_ir(
part,
Str {
val: path.to_string(),
span,
}
.to_ir(),
);
let sym = ctx.new_sym("findFile".into());
let find_file = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let sym = ctx.new_sym("nixPath".into());
let nix_path = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let call = ctx.new_expr(
Call {
func: find_file,
arg: nix_path,
span,
}
.to_ir(),
);
return Ok(ctx.new_expr(
Call {
func: call,
arg: part,
span,
}
.to_ir(),
));
} else {
part
}
} else {
ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: false,
}
.to_ir(),
)
};
Ok(ctx.new_expr(Path { expr, span }.to_ir()))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let normalized = self.normalized_parts();
let is_single_literal = normalized.len() == 1
&& matches!(normalized.first(), Some(ast::InterpolPart::Literal(_)));
let parts = normalized
.into_iter()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())),
ast::InterpolPart::Interpolation(interpol) => {
let inner = interpol.expr().unwrap().downgrade(ctx)?;
Ok(ctx.maybe_thunk(inner))
}
})
.collect::<Result<Vec<_>>>()?;
Ok(if is_single_literal {
parts.into_iter().next().unwrap()
} else {
ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: true,
}
.to_ir(),
)
})
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
Ok(ctx.new_expr(match self.kind() {
ast::LiteralKind::Integer(int) => Int {
inner: int.value().unwrap(),
span,
}
.to_ir(),
ast::LiteralKind::Float(float) => Float {
inner: float.value().unwrap(),
span,
}
.to_ir(),
ast::LiteralKind::Uri(uri) => Str {
val: uri.to_string(),
span,
}
.to_ir(),
}))
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let text = self.ident_token().unwrap().to_string();
let span = self.syntax().text_range();
if text == "__curPos" {
return Ok(ctx.new_expr(CurPos { span }.to_ir()));
}
let sym = ctx.new_sym(text);
ctx.lookup(sym, span)
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rec = self.rec_token().is_some();
let span = self.syntax().text_range();
if !rec {
let attrs = downgrade_attrs(self, ctx)?;
return Ok(ctx.new_expr(attrs.to_ir()));
}
// rec { a = 1; b = a; } => let a = 1; b = a; in { inherit a b; }
let entries: Vec<_> = self.entries().collect();
downgrade_rec_bindings(entries, ctx, span)
}
}
/// Downgrades a list.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::List {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let items = self
.items()
.map(|item| {
let id = item.downgrade(ctx)?;
Ok(ctx.maybe_thunk(id))
})
.collect::<Result<_>>()?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(List { items, span }.to_ir()))
}
}
/// Downgrades a binary operation.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::BinOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.lhs().unwrap().downgrade(ctx)?;
let rhs = self.rhs().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
let span = self.syntax().text_range();
Ok(ctx.new_expr(
BinOp {
lhs,
rhs,
kind,
span,
}
.to_ir(),
))
}
}
/// Downgrades a "has attribute" (`?`) expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::HasAttr {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.expr().unwrap().downgrade(ctx)?;
let rhs = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(HasAttr { lhs, rhs, span }.to_ir()))
}
}
/// Downgrades a unary operation.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::UnaryOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rhs = self.expr().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
let span = self.syntax().text_range();
Ok(ctx.new_expr(UnOp { rhs, kind, span }.to_ir()))
}
}
/// Downgrades an attribute selection (`.`).
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let expr = self.expr().unwrap().downgrade(ctx)?;
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let default = if let Some(default) = self.default_expr() {
let default_expr = default.downgrade(ctx)?;
Some(ctx.maybe_thunk(default_expr))
} else {
None
};
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Select {
expr,
attrpath,
default,
span,
}
.to_ir(),
))
}
}
/// Downgrades a `legacy let`, which is essentially a recursive attribute set.
/// The body of the `let` is accessed via `let.body`.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let entries: Vec<_> = self.entries().collect();
let attrset_expr = downgrade_let_bindings(entries, ctx, span, |ctx, binding_keys| {
// Create plain attrset as body with inherit
let mut attrs = AttrSet {
stcs: HashMap::new(),
dyns: Vec::new(),
span,
};
for sym in binding_keys {
let expr = ctx.lookup(*sym, synthetic_span())?;
attrs.stcs.insert(*sym, (expr, synthetic_span()));
}
Ok(ctx.new_expr(attrs.to_ir()))
})?;
let body_sym = ctx.new_sym("body".to_string());
let select = Select {
expr: attrset_expr,
attrpath: vec![Attr::Str(body_sym, synthetic_span())],
default: None,
span,
};
Ok(ctx.new_expr(select.to_ir()))
}
}
/// Downgrades a `let ... in ...` expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let entries: Vec<_> = self.entries().collect();
let body_expr = self.body().unwrap();
let span = self.syntax().text_range();
downgrade_let_bindings(entries, ctx, span, |ctx, _binding_keys| {
body_expr.downgrade(ctx)
})
}
}
/// Downgrades a `with` expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::With {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
// with namespace; expr
let namespace = self.namespace().unwrap().downgrade(ctx)?;
// Downgrade body in With scope
let expr = ctx.with_with_scope(namespace, |ctx| self.body().unwrap().downgrade(ctx))?;
Ok(expr)
}
}
/// Downgrades a lambda (function) expression.
/// This involves desugaring pattern-matching arguments into `let` bindings.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let raw_param = self.param().unwrap();
let arg = ctx.new_arg(raw_param.syntax().text_range());
ctx.push_thunk_scope();
let param;
let body;
match raw_param {
ast::Param::IdentParam(id) => {
// Simple case: `x: body`
let param_sym = ctx.new_sym(id.to_string());
param = None;
// Downgrade body in Param scope
body = ctx
.with_param_scope(param_sym, arg, |ctx| self.body().unwrap().downgrade(ctx))?;
}
ast::Param::Pattern(pattern) => {
let alias = pattern
.pat_bind()
.map(|alias| ctx.new_sym(alias.ident().unwrap().to_string()));
let ellipsis = pattern.ellipsis_token().is_some();
let pat_entries = pattern.pat_entries();
let PatternBindings {
body: inner_body,
required,
optional,
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
self.body().unwrap().downgrade(ctx)
})?;
param = Some(Param {
required,
optional,
ellipsis,
});
body = inner_body;
}
}
let thunks = ctx.pop_thunk_scope();
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Func {
body,
param,
arg,
thunks,
span,
}
.to_ir(),
))
}
}
/// Downgrades a function application.
/// In Nix, function application is left-associative, so `f a b` should be parsed as `((f a) b)`.
/// Each Apply node represents a single function call with one argument.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Apply {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let func = self.lambda().unwrap().downgrade(ctx)?;
let arg = self.argument().unwrap().downgrade(ctx)?;
let arg = ctx.maybe_thunk(arg);
let span = self.syntax().text_range();
Ok(ctx.new_expr(Call { func, arg, span }.to_ir()))
}
}

View File

@@ -1,18 +0,0 @@
#![allow(unused)]
use rnix::TextRange;
pub fn merge_spans(spans: impl IntoIterator<Item = TextRange>) -> TextRange {
let mut spans = spans.into_iter();
let first = spans.next().unwrap_or_else(synthetic_span);
spans.fold(first, |acc, span| {
let start = acc.start().min(span.start());
let end = acc.end().max(span.end());
TextRange::new(start, end)
})
}
pub fn synthetic_span() -> TextRange {
TextRange::new(0.into(), 0.into())
}

View File

@@ -6,6 +6,7 @@ pub mod logging;
pub mod value;
mod codegen;
mod downgrade;
mod fetcher;
mod ir;
mod nar;

View File

@@ -1,15 +1,16 @@
use std::borrow::Cow;
use std::marker::PhantomData;
use std::path::{Component, Path, PathBuf};
use std::sync::{Arc, Once};
use std::path::Path;
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
use rust_embed::Embed;
use crate::error::{Error, Result, Source};
use crate::store::Store;
use crate::value::{AttrSet, List, Symbol, Value};
mod ops;
use ops::*;
type ScopeRef<'p, 's> = v8::PinnedRef<'p, v8::HandleScope<'s>>;
type LocalValue<'a> = v8::Local<'a, v8::Value>;
type LocalSymbol<'a> = v8::Local<'a, v8::Symbol>;
@@ -17,7 +18,8 @@ type LocalSymbol<'a> = v8::Local<'a, v8::Symbol>;
pub(crate) trait RuntimeContext: 'static {
fn get_current_dir(&self) -> &Path;
fn add_source(&mut self, path: Source);
fn compile_code(&mut self, source: Source) -> Result<String>;
fn compile(&mut self, source: Source) -> Result<String>;
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String>;
fn get_source(&self, id: usize) -> Source;
fn get_store(&self) -> &dyn Store;
}
@@ -43,6 +45,7 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
&deno_core::include_js_files!(nix_runtime dir "runtime-ts/dist", "runtime.js");
let mut ops = vec![
op_import::<Ctx>(),
op_scoped_import::<Ctx>(),
op_read_file(),
op_read_file_type(),
op_read_dir(),
@@ -62,6 +65,8 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_get_env(),
op_walk_dir(),
op_add_filtered_path::<Ctx>(),
op_match(),
op_split(),
];
ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -87,633 +92,20 @@ mod private {
}
impl std::error::Error for SimpleErrorWrapper {}
js_error_wrapper!(SimpleErrorWrapper, NixError, "Error");
js_error_wrapper!(SimpleErrorWrapper, NixRuntimeError, "Error");
impl From<String> for NixError {
impl From<String> for NixRuntimeError {
fn from(value: String) -> Self {
NixError(SimpleErrorWrapper(value))
NixRuntimeError(SimpleErrorWrapper(value))
}
}
impl From<&str> for NixError {
impl From<&str> for NixRuntimeError {
fn from(value: &str) -> Self {
NixError(SimpleErrorWrapper(value.to_string()))
NixRuntimeError(SimpleErrorWrapper(value.to_string()))
}
}
}
pub(crate) use private::NixError;
#[derive(Embed)]
#[folder = "src/runtime/corepkgs"]
pub(crate) struct CorePkgs;
#[deno_core::op2]
#[string]
fn op_import<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
let _span = tracing::info_span!("op_import", path = %path).entered();
let ctx: &mut Ctx = state.get_ctx_mut();
// FIXME: special path type
if path.starts_with("<nix/") && path.ends_with(">") {
let corepkg_name = &path[5..path.len() - 1];
if let Some(file) = CorePkgs::get(corepkg_name) {
tracing::info!("Importing corepkg: {}", corepkg_name);
let source = Source {
ty: crate::error::SourceType::Eval(Arc::new(ctx.get_current_dir().to_path_buf())),
src: str::from_utf8(&file.data)
.expect("corrupted corepkgs file")
.into(),
};
ctx.add_source(source.clone());
return Ok(ctx.compile_code(source).map_err(|err| err.to_string())?);
} else {
return Err(format!("Corepkg not found: {}", corepkg_name).into());
}
}
let current_dir = ctx.get_current_dir();
let mut absolute_path = current_dir.join(&path);
// Do NOT resolve symlinks (eval-okay-symlink-resolution)
// TODO: is this correct?
// .canonicalize()
// .map_err(|e| format!("Failed to resolve path {}: {}", path, e))?;
if absolute_path.is_dir() {
absolute_path.push("default.nix")
}
tracing::info!("Importing file: {}", absolute_path.display());
let source = Source::new_file(absolute_path.clone())
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
tracing::debug!("Compiling file");
ctx.add_source(source.clone());
Ok(ctx.compile_code(source).map_err(|err| err.to_string())?)
}
#[deno_core::op2]
#[string]
fn op_read_file(#[string] path: String) -> std::result::Result<String, NixError> {
Ok(std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {}", path, e))?)
}
#[deno_core::op2(fast)]
fn op_path_exists(#[string] path: String) -> bool {
let must_be_dir = path.ends_with('/') || path.ends_with("/.");
let p = Path::new(&path);
if must_be_dir {
match std::fs::metadata(p) {
Ok(m) => m.is_dir(),
Err(_) => false,
}
} else {
std::fs::symlink_metadata(p).is_ok()
}
}
#[deno_core::op2]
#[string]
fn op_read_file_type(#[string] path: String) -> std::result::Result<String, NixError> {
let path = Path::new(&path);
let metadata = std::fs::symlink_metadata(path)
.map_err(|e| format!("Failed to read file type for {}: {}", path.display(), e))?;
let file_type = metadata.file_type();
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else if file_type.is_file() {
"regular"
} else {
"unknown"
};
Ok(type_str.to_string())
}
#[deno_core::op2]
#[serde]
fn op_read_dir(
#[string] path: String,
) -> std::result::Result<std::collections::HashMap<String, String>, NixError> {
let path = Path::new(&path);
if !path.is_dir() {
return Err(format!("{} is not a directory", path.display()).into());
}
let entries = std::fs::read_dir(path)
.map_err(|e| format!("Failed to read directory {}: {}", path.display(), e))?;
let mut result = std::collections::HashMap::new();
for entry in entries {
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
let file_name = entry.file_name().to_string_lossy().to_string();
let file_type = entry.file_type().map_err(|e| {
format!(
"Failed to read file type for {}: {}",
entry.path().display(),
e
)
})?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else if file_type.is_file() {
"regular"
} else {
"unknown"
};
result.insert(file_name, type_str.to_string());
}
Ok(result)
}
#[deno_core::op2]
#[string]
fn op_resolve_path(
#[string] current_dir: String,
#[string] path: String,
) -> std::result::Result<String, NixError> {
let _span = tracing::debug_span!("op_resolve_path").entered();
tracing::debug!(current_dir, path);
// If already absolute, return as-is
if path.starts_with('/') {
return Ok(path);
}
// Resolve relative path against current file directory (or CWD)
let current_dir = if !path.starts_with("~/") {
let mut dir = PathBuf::from(current_dir);
dir.push(path);
dir
} else {
let mut dir = std::env::home_dir().ok_or("home dir not defined")?;
dir.push(&path[2..]);
dir
};
let mut normalized = PathBuf::new();
for component in current_dir.components() {
match component {
Component::Prefix(p) => normalized.push(p.as_os_str()),
Component::RootDir => normalized.push("/"),
Component::CurDir => {}
Component::ParentDir => {
normalized.pop();
}
Component::Normal(c) => normalized.push(c),
}
}
tracing::debug!(normalized = normalized.display().to_string());
Ok(normalized.to_string_lossy().to_string())
}
#[deno_core::op2]
#[string]
fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_hash::sha256_hex(&data)
}
#[deno_core::op2]
#[string]
fn op_make_placeholder(#[string] output: String) -> String {
use sha2::{Digest, Sha256};
let input = format!("nix-output:{}", output);
let mut hasher = Sha256::new();
hasher.update(input.as_bytes());
let hash: [u8; 32] = hasher.finalize().into();
let encoded = crate::nix_hash::nix_base32_encode(&hash);
format!("/{}", encoded)
}
#[deno_core::op2]
#[serde]
fn op_decode_span<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] span_str: String,
) -> std::result::Result<serde_json::Value, NixError> {
let parts: Vec<&str> = span_str.split(':').collect();
if parts.len() != 3 {
return Ok(serde_json::json!({
"file": serde_json::Value::Null,
"line": serde_json::Value::Null,
"column": serde_json::Value::Null
}));
}
let source_id: usize = parts[0].parse().map_err(|_| "Invalid source ID")?;
let start: u32 = parts[1].parse().map_err(|_| "Invalid start offset")?;
let ctx: &Ctx = state.get_ctx();
let source = ctx.get_source(source_id);
let content = &source.src;
let (line, column) = byte_offset_to_line_col(content, start as usize);
Ok(serde_json::json!({
"file": source.get_name(),
"line": line,
"column": column
}))
}
fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
let mut line = 1u32;
let mut col = 1u32;
for (idx, ch) in content.char_indices() {
if idx >= offset {
break;
}
if ch == '\n' {
line += 1;
col = 1;
} else {
col += 1;
}
}
(line, col)
}
#[deno_core::op2]
#[string]
fn op_make_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] ty: String,
#[string] hash_hex: String,
#[string] name: String,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
}
#[deno_core::op2]
#[string]
fn op_make_text_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_hex: String,
#[string] name: String,
#[serde] references: Vec<String>,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_hash::make_text_store_path(store_dir, &hash_hex, &name, &references)
}
#[deno_core::op2]
#[string]
fn op_output_path_name(#[string] drv_name: String, #[string] output_name: String) -> String {
crate::nix_hash::output_path_name(&drv_name, &output_name)
}
#[deno_core::op2]
#[string]
fn op_make_fixed_output_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_algo: String,
#[string] hash: String,
#[string] hash_mode: String,
#[string] name: String,
) -> String {
use sha2::{Digest, Sha256};
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_hash::make_store_path(store_dir, "source", &hash, &name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
let mut hasher = Sha256::new();
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_hash::make_store_path(store_dir, "output:out", &inner_hash, &name)
}
}
#[deno_core::op2]
#[string]
fn op_add_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
) -> std::result::Result<String, NixError> {
use sha2::{Digest, Sha256};
use std::fs;
use std::path::Path;
let path_obj = Path::new(&path);
if !path_obj.exists() {
return Err(NixError::from(format!("path '{}' does not exist", path)));
}
let computed_name = name.unwrap_or_else(|| {
path_obj
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string()
});
let computed_hash = if recursive {
crate::nar::compute_nar_hash(path_obj)
.map_err(|e| NixError::from(format!("failed to compute NAR hash: {}", e)))?
} else {
if !path_obj.is_file() {
return Err(NixError::from(
"when 'recursive' is false, path must be a regular file",
));
}
let contents = fs::read(path_obj)
.map_err(|e| NixError::from(format!("failed to read '{}': {}", path, e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
let expected_hex = crate::nix_hash::decode_hash_to_hex(&expected_hash)
.ok_or_else(|| NixError::from(format!("invalid hash format: {}", expected_hash)))?;
if computed_hash != expected_hex {
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
path, expected_hex, computed_hash
)));
}
}
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = if recursive {
store
.add_to_store_from_path(&computed_name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?
} else {
let contents = fs::read(path_obj)
.map_err(|e| NixError::from(format!("failed to read '{}': {}", path, e)))?;
store
.add_to_store(&computed_name, &contents, false, vec![])
.map_err(|e| NixError::from(format!("failed to add to store: {}", e)))?
};
Ok(store_path)
}
#[deno_core::op2]
#[string]
fn op_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use crate::store::validate_store_path;
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
validate_store_path(store_dir, &path).map_err(|e| NixError::from(e.to_string()))?;
store
.ensure_path(&path)
.map_err(|e| NixError::from(e.to_string()))?;
Ok(path)
}
#[deno_core::op2]
#[string]
fn op_to_file<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] name: String,
#[string] contents: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixError> {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_text_to_store(&name, &contents, references)
.map_err(|e| NixError::from(format!("builtins.toFile failed: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[string]
fn op_copy_path_to_store<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use std::path::Path;
let path_obj = Path::new(&path);
if !path_obj.exists() {
return Err(NixError::from(format!("path '{}' does not exist", path)));
}
let name = path_obj
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to copy path to store: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[string]
fn op_get_env(#[string] key: String) -> std::result::Result<String, NixError> {
match std::env::var(key) {
Ok(val) => Ok(val),
Err(std::env::VarError::NotPresent) => Ok("".into()),
Err(err) => Err(format!("Failed to read env var: {err}").into()),
}
}
#[deno_core::op2]
#[serde]
fn op_walk_dir(#[string] path: String) -> std::result::Result<Vec<(String, String)>, NixError> {
fn walk_recursive(
base: &Path,
current: &Path,
results: &mut Vec<(String, String)>,
) -> std::result::Result<(), NixError> {
let entries = std::fs::read_dir(current)
.map_err(|e| NixError::from(format!("failed to read directory: {}", e)))?;
for entry in entries {
let entry =
entry.map_err(|e| NixError::from(format!("failed to read entry: {}", e)))?;
let path = entry.path();
let rel_path = path
.strip_prefix(base)
.map_err(|e| NixError::from(format!("failed to get relative path: {}", e)))?
.to_string_lossy()
.to_string();
let file_type = entry
.file_type()
.map_err(|e| NixError::from(format!("failed to get file type: {}", e)))?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else {
"regular"
};
results.push((rel_path.clone(), type_str.to_string()));
if file_type.is_dir() {
walk_recursive(base, &path, results)?;
}
}
Ok(())
}
let path = Path::new(&path);
if !path.is_dir() {
return Err(NixError::from(format!(
"{} is not a directory",
path.display()
)));
}
let mut results = Vec::new();
walk_recursive(path, path, &mut results)?;
Ok(results)
}
#[deno_core::op2]
#[string]
fn op_add_filtered_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] src_path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
#[serde] include_paths: Vec<String>,
) -> std::result::Result<String, NixError> {
use sha2::{Digest, Sha256};
use std::fs;
let src = Path::new(&src_path);
if !src.exists() {
return Err(NixError::from(format!(
"path '{}' does not exist",
src_path
)));
}
let computed_name = name.unwrap_or_else(|| {
src.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string()
});
let temp_dir = tempfile::tempdir()
.map_err(|e| NixError::from(format!("failed to create temp dir: {}", e)))?;
let dest = temp_dir.path().join(&computed_name);
fs::create_dir_all(&dest)
.map_err(|e| NixError::from(format!("failed to create dest dir: {}", e)))?;
for rel_path in &include_paths {
let src_file = src.join(rel_path);
let dest_file = dest.join(rel_path);
if let Some(parent) = dest_file.parent() {
fs::create_dir_all(parent)
.map_err(|e| NixError::from(format!("failed to create dir: {}", e)))?;
}
let metadata = fs::symlink_metadata(&src_file)
.map_err(|e| NixError::from(format!("failed to read metadata: {}", e)))?;
if metadata.is_symlink() {
let target = fs::read_link(&src_file)
.map_err(|e| NixError::from(format!("failed to read symlink: {}", e)))?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &dest_file)
.map_err(|e| NixError::from(format!("failed to create symlink: {}", e)))?;
#[cfg(not(unix))]
return Err(NixError::from("symlinks not supported on this platform"));
} else if metadata.is_dir() {
fs::create_dir_all(&dest_file)
.map_err(|e| NixError::from(format!("failed to create dir: {}", e)))?;
} else {
fs::copy(&src_file, &dest_file)
.map_err(|e| NixError::from(format!("failed to copy file: {}", e)))?;
}
}
let computed_hash = if recursive {
crate::nar::compute_nar_hash(&dest)
.map_err(|e| NixError::from(format!("failed to compute NAR hash: {}", e)))?
} else {
if !dest.is_file() {
return Err(NixError::from(
"when 'recursive' is false, path must be a regular file",
));
}
let contents =
fs::read(&dest).map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
let expected_hex = crate::nix_hash::decode_hash_to_hex(&expected_hash)
.ok_or_else(|| NixError::from(format!("invalid hash format: {}", expected_hash)))?;
if computed_hash != expected_hex {
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
src_path, expected_hex, computed_hash
)));
}
}
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&computed_name, &dest, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?;
Ok(store_path)
}
pub(crate) use private::NixRuntimeError;
pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime,
@@ -727,11 +119,16 @@ pub(crate) struct Runtime<Ctx: RuntimeContext> {
impl<Ctx: RuntimeContext> Runtime<Ctx> {
pub(crate) fn new() -> Result<Self> {
use std::sync::Once;
// Initialize V8 once
static INIT: Once = Once::new();
INIT.call_once(|| {
// First flag is always not recognized
assert_eq!(deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]), [""]);
assert_eq!(
deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]),
[""]
);
JsRuntime::init_platform(
Some(v8::new_default_platform(0, false).make_shared()),
false,
@@ -743,6 +140,8 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
..Default::default()
});
js_runtime.op_state().borrow_mut().put(RegexCache::new());
let (
is_thunk_symbol,
primop_metadata_symbol,
@@ -773,7 +172,6 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
.js_runtime
.execute_script("<eval>", script)
.map_err(|error| {
// Get current source from Context
let op_state = self.js_runtime.op_state();
let op_state_borrow = op_state.borrow();
let ctx: &Ctx = op_state_borrow.get_ctx();
@@ -934,7 +332,7 @@ fn to_value<'a>(
let val = val.get(scope, key).expect("infallible operation");
let key = key.to_rust_string_lossy(scope);
(
Symbol::new(key),
Symbol::from(key),
to_value(
val,
scope,
@@ -1050,29 +448,3 @@ fn to_primop<'a>(
Some(Value::PrimOpApp(name))
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod test {
use super::*;
use crate::context::Context;
#[test]
fn to_value_working() {
let mut ctx = Context::new().unwrap();
const EXPR: &str = "({ test: [1., 9223372036854775807n, true, false, 'hello world!'] })";
assert_eq!(
ctx.eval_js(EXPR.into()).unwrap(),
Value::AttrSet(AttrSet::new(std::collections::BTreeMap::from([(
Symbol::from("test"),
Value::List(List::new(vec![
Value::Float(1.),
Value::Int(9223372036854775807),
Value::Bool(true),
Value::Bool(false),
Value::String("hello world!".to_string())
]))
)])))
);
}
}

766
nix-js/src/runtime/ops.rs Normal file
View File

@@ -0,0 +1,766 @@
use std::path::{Component, Path, PathBuf};
use std::sync::Arc;
use hashbrown::hash_map::{Entry, HashMap};
use deno_core::OpState;
use regex::Regex;
use rust_embed::Embed;
use crate::error::Source;
use super::{NixRuntimeError, OpStateExt, RuntimeContext};
#[derive(Debug, Default)]
pub(super) struct RegexCache {
cache: HashMap<String, Regex>,
}
impl RegexCache {
pub(super) fn new() -> Self {
Self {
cache: HashMap::new(),
}
}
fn get_regex(&mut self, pattern: &str) -> Result<Regex, regex::Error> {
Ok(match self.cache.entry(pattern.to_string()) {
Entry::Occupied(occupied) => occupied.get().clone(),
Entry::Vacant(vacant) => {
let re = Regex::new(pattern)?;
vacant.insert(re).clone()
}
})
}
}
#[derive(Embed)]
#[folder = "src/runtime/corepkgs"]
pub(crate) struct CorePkgs;
#[deno_core::op2]
#[string]
pub(super) fn op_import<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixRuntimeError> {
let _span = tracing::info_span!("op_import", path = %path).entered();
let ctx: &mut Ctx = state.get_ctx_mut();
// FIXME: special path type
if path.starts_with("<nix/") && path.ends_with(">") {
let corepkg_name = &path[5..path.len() - 1];
if let Some(file) = CorePkgs::get(corepkg_name) {
tracing::info!("Importing corepkg: {}", corepkg_name);
let source = Source {
ty: crate::error::SourceType::Eval(Arc::new(ctx.get_current_dir().to_path_buf())),
src: str::from_utf8(&file.data)
.expect("corrupted corepkgs file")
.into(),
};
ctx.add_source(source.clone());
return Ok(ctx.compile(source).map_err(|err| err.to_string())?);
} else {
return Err(format!("Corepkg not found: {}", corepkg_name).into());
}
}
let current_dir = ctx.get_current_dir();
let mut absolute_path = current_dir.join(&path);
// Do NOT resolve symlinks (eval-okay-symlink-resolution)
// TODO: is this correct?
// .canonicalize()
// .map_err(|e| format!("Failed to resolve path {}: {}", path, e))?;
if absolute_path.is_dir() {
absolute_path.push("default.nix")
}
tracing::info!("Importing file: {}", absolute_path.display());
let source = Source::new_file(absolute_path.clone())
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
tracing::debug!("Compiling file");
ctx.add_source(source.clone());
Ok(ctx.compile(source).map_err(|err| err.to_string())?)
}
#[deno_core::op2]
#[string]
pub(super) fn op_scoped_import<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
#[serde] scope: Vec<String>,
) -> std::result::Result<String, NixRuntimeError> {
let _span = tracing::info_span!("op_scoped_import", path = %path).entered();
let ctx: &mut Ctx = state.get_ctx_mut();
let current_dir = ctx.get_current_dir();
let mut absolute_path = current_dir.join(&path);
if absolute_path.is_dir() {
absolute_path.push("default.nix")
}
tracing::info!("Scoped importing file: {}", absolute_path.display());
let source = Source::new_file(absolute_path.clone())
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
tracing::debug!("Compiling file for scoped import");
ctx.add_source(source.clone());
Ok(ctx
.compile_scoped(source, scope)
.map_err(|err| err.to_string())?)
}
#[deno_core::op2]
#[string]
pub(super) fn op_read_file(#[string] path: String) -> std::result::Result<String, NixRuntimeError> {
Ok(std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {}", path, e))?)
}
#[deno_core::op2(fast)]
pub(super) fn op_path_exists(#[string] path: String) -> bool {
let must_be_dir = path.ends_with('/') || path.ends_with("/.");
let p = Path::new(&path);
if must_be_dir {
match std::fs::metadata(p) {
Ok(m) => m.is_dir(),
Err(_) => false,
}
} else {
std::fs::symlink_metadata(p).is_ok()
}
}
#[deno_core::op2]
#[string]
pub(super) fn op_read_file_type(
#[string] path: String,
) -> std::result::Result<String, NixRuntimeError> {
let path = Path::new(&path);
let metadata = std::fs::symlink_metadata(path)
.map_err(|e| format!("Failed to read file type for {}: {}", path.display(), e))?;
let file_type = metadata.file_type();
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else if file_type.is_file() {
"regular"
} else {
"unknown"
};
Ok(type_str.to_string())
}
#[deno_core::op2]
#[serde]
pub(super) fn op_read_dir(
#[string] path: String,
) -> std::result::Result<std::collections::HashMap<String, String>, NixRuntimeError> {
let path = Path::new(&path);
if !path.is_dir() {
return Err(format!("{} is not a directory", path.display()).into());
}
let entries = std::fs::read_dir(path)
.map_err(|e| format!("Failed to read directory {}: {}", path.display(), e))?;
let mut result = std::collections::HashMap::new();
for entry in entries {
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
let file_name = entry.file_name().to_string_lossy().to_string();
let file_type = entry.file_type().map_err(|e| {
format!(
"Failed to read file type for {}: {}",
entry.path().display(),
e
)
})?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else if file_type.is_file() {
"regular"
} else {
"unknown"
};
result.insert(file_name, type_str.to_string());
}
Ok(result)
}
#[deno_core::op2]
#[string]
pub(super) fn op_resolve_path(
#[string] current_dir: String,
#[string] path: String,
) -> std::result::Result<String, NixRuntimeError> {
let _span = tracing::debug_span!("op_resolve_path").entered();
tracing::debug!(current_dir, path);
// If already absolute, return as-is
if path.starts_with('/') {
return Ok(path);
}
// Resolve relative path against current file directory (or CWD)
let current_dir = if !path.starts_with("~/") {
let mut dir = PathBuf::from(current_dir);
dir.push(path);
dir
} else {
let mut dir = std::env::home_dir().ok_or("home dir not defined")?;
dir.push(&path[2..]);
dir
};
let mut normalized = PathBuf::new();
for component in current_dir.components() {
match component {
Component::Prefix(p) => normalized.push(p.as_os_str()),
Component::RootDir => normalized.push("/"),
Component::CurDir => {}
Component::ParentDir => {
normalized.pop();
}
Component::Normal(c) => normalized.push(c),
}
}
tracing::debug!(normalized = normalized.display().to_string());
Ok(normalized.to_string_lossy().to_string())
}
#[deno_core::op2]
#[string]
pub(super) fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_hash::sha256_hex(&data)
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_placeholder(#[string] output: String) -> String {
use sha2::{Digest, Sha256};
let input = format!("nix-output:{}", output);
let mut hasher = Sha256::new();
hasher.update(input.as_bytes());
let hash: [u8; 32] = hasher.finalize().into();
let encoded = crate::nix_hash::nix_base32_encode(&hash);
format!("/{}", encoded)
}
#[deno_core::op2]
#[serde]
pub(super) fn op_decode_span<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] span_str: String,
) -> std::result::Result<serde_json::Value, NixRuntimeError> {
let parts: Vec<&str> = span_str.split(':').collect();
if parts.len() != 3 {
return Ok(serde_json::json!({
"file": serde_json::Value::Null,
"line": serde_json::Value::Null,
"column": serde_json::Value::Null
}));
}
let source_id: usize = parts[0].parse().map_err(|_| "Invalid source ID")?;
let start: u32 = parts[1].parse().map_err(|_| "Invalid start offset")?;
let ctx: &Ctx = state.get_ctx();
let source = ctx.get_source(source_id);
let content = &source.src;
let (line, column) = byte_offset_to_line_col(content, start as usize);
Ok(serde_json::json!({
"file": source.get_name(),
"line": line,
"column": column
}))
}
fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
let mut line = 1u32;
let mut col = 1u32;
for (idx, ch) in content.char_indices() {
if idx >= offset {
break;
}
if ch == '\n' {
line += 1;
col = 1;
} else {
col += 1;
}
}
(line, col)
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] ty: String,
#[string] hash_hex: String,
#[string] name: String,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_text_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_hex: String,
#[string] name: String,
#[serde] references: Vec<String>,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_hash::make_text_store_path(store_dir, &hash_hex, &name, &references)
}
#[deno_core::op2]
#[string]
pub(super) fn op_output_path_name(
#[string] drv_name: String,
#[string] output_name: String,
) -> String {
crate::nix_hash::output_path_name(&drv_name, &output_name)
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_fixed_output_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_algo: String,
#[string] hash: String,
#[string] hash_mode: String,
#[string] name: String,
) -> String {
use sha2::{Digest, Sha256};
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_hash::make_store_path(store_dir, "source", &hash, &name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
let mut hasher = Sha256::new();
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_hash::make_store_path(store_dir, "output:out", &inner_hash, &name)
}
}
#[deno_core::op2]
#[string]
pub(super) fn op_add_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
) -> std::result::Result<String, NixRuntimeError> {
use sha2::{Digest, Sha256};
use std::fs;
use std::path::Path;
let path_obj = Path::new(&path);
if !path_obj.exists() {
return Err(NixRuntimeError::from(format!(
"path '{}' does not exist",
path
)));
}
let computed_name = name.unwrap_or_else(|| {
path_obj
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string()
});
let computed_hash = if recursive {
crate::nar::compute_nar_hash(path_obj)
.map_err(|e| NixRuntimeError::from(format!("failed to compute NAR hash: {}", e)))?
} else {
if !path_obj.is_file() {
return Err(NixRuntimeError::from(
"when 'recursive' is false, path must be a regular file",
));
}
let contents = fs::read(path_obj)
.map_err(|e| NixRuntimeError::from(format!("failed to read '{}': {}", path, e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
let expected_hex =
crate::nix_hash::decode_hash_to_hex(&expected_hash).ok_or_else(|| {
NixRuntimeError::from(format!("invalid hash format: {}", expected_hash))
})?;
if computed_hash != expected_hex {
return Err(NixRuntimeError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
path, expected_hex, computed_hash
)));
}
}
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = if recursive {
store
.add_to_store_from_path(&computed_name, path_obj, vec![])
.map_err(|e| NixRuntimeError::from(format!("failed to add path to store: {}", e)))?
} else {
let contents = fs::read(path_obj)
.map_err(|e| NixRuntimeError::from(format!("failed to read '{}': {}", path, e)))?;
store
.add_to_store(&computed_name, &contents, false, vec![])
.map_err(|e| NixRuntimeError::from(format!("failed to add to store: {}", e)))?
};
Ok(store_path)
}
#[deno_core::op2]
#[string]
pub(super) fn op_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixRuntimeError> {
use crate::store::validate_store_path;
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
validate_store_path(store_dir, &path).map_err(|e| NixRuntimeError::from(e.to_string()))?;
store
.ensure_path(&path)
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
Ok(path)
}
#[deno_core::op2]
#[string]
pub(super) fn op_to_file<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] name: String,
#[string] contents: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixRuntimeError> {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_text_to_store(&name, &contents, references)
.map_err(|e| NixRuntimeError::from(format!("builtins.toFile failed: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[string]
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixRuntimeError> {
use std::path::Path;
let path_obj = Path::new(&path);
if !path_obj.exists() {
return Err(NixRuntimeError::from(format!(
"path '{}' does not exist",
path
)));
}
let name = path_obj
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&name, path_obj, vec![])
.map_err(|e| NixRuntimeError::from(format!("failed to copy path to store: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[string]
pub(super) fn op_get_env(#[string] key: String) -> std::result::Result<String, NixRuntimeError> {
match std::env::var(key) {
Ok(val) => Ok(val),
Err(std::env::VarError::NotPresent) => Ok("".into()),
Err(err) => Err(format!("Failed to read env var: {err}").into()),
}
}
#[deno_core::op2]
#[serde]
pub(super) fn op_walk_dir(
#[string] path: String,
) -> std::result::Result<Vec<(String, String)>, NixRuntimeError> {
fn walk_recursive(
base: &Path,
current: &Path,
results: &mut Vec<(String, String)>,
) -> std::result::Result<(), NixRuntimeError> {
let entries = std::fs::read_dir(current)
.map_err(|e| NixRuntimeError::from(format!("failed to read directory: {}", e)))?;
for entry in entries {
let entry =
entry.map_err(|e| NixRuntimeError::from(format!("failed to read entry: {}", e)))?;
let path = entry.path();
let rel_path = path
.strip_prefix(base)
.map_err(|e| NixRuntimeError::from(format!("failed to get relative path: {}", e)))?
.to_string_lossy()
.to_string();
let file_type = entry
.file_type()
.map_err(|e| NixRuntimeError::from(format!("failed to get file type: {}", e)))?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else {
"regular"
};
results.push((rel_path.clone(), type_str.to_string()));
if file_type.is_dir() {
walk_recursive(base, &path, results)?;
}
}
Ok(())
}
let path = Path::new(&path);
if !path.is_dir() {
return Err(NixRuntimeError::from(format!(
"{} is not a directory",
path.display()
)));
}
let mut results = Vec::new();
walk_recursive(path, path, &mut results)?;
Ok(results)
}
#[deno_core::op2]
#[string]
pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] src_path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
#[serde] include_paths: Vec<String>,
) -> std::result::Result<String, NixRuntimeError> {
use sha2::{Digest, Sha256};
use std::fs;
let src = Path::new(&src_path);
if !src.exists() {
return Err(NixRuntimeError::from(format!(
"path '{}' does not exist",
src_path
)));
}
let computed_name = name.unwrap_or_else(|| {
src.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string()
});
let temp_dir = tempfile::tempdir()
.map_err(|e| NixRuntimeError::from(format!("failed to create temp dir: {}", e)))?;
let dest = temp_dir.path().join(&computed_name);
fs::create_dir_all(&dest)
.map_err(|e| NixRuntimeError::from(format!("failed to create dest dir: {}", e)))?;
for rel_path in &include_paths {
let src_file = src.join(rel_path);
let dest_file = dest.join(rel_path);
if let Some(parent) = dest_file.parent() {
fs::create_dir_all(parent)
.map_err(|e| NixRuntimeError::from(format!("failed to create dir: {}", e)))?;
}
let metadata = fs::symlink_metadata(&src_file)
.map_err(|e| NixRuntimeError::from(format!("failed to read metadata: {}", e)))?;
if metadata.is_symlink() {
let target = fs::read_link(&src_file)
.map_err(|e| NixRuntimeError::from(format!("failed to read symlink: {}", e)))?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &dest_file)
.map_err(|e| NixRuntimeError::from(format!("failed to create symlink: {}", e)))?;
#[cfg(not(unix))]
return Err(NixRuntimeError::from(
"symlinks not supported on this platform",
));
} else if metadata.is_dir() {
fs::create_dir_all(&dest_file)
.map_err(|e| NixRuntimeError::from(format!("failed to create dir: {}", e)))?;
} else {
fs::copy(&src_file, &dest_file)
.map_err(|e| NixRuntimeError::from(format!("failed to copy file: {}", e)))?;
}
}
let computed_hash = if recursive {
crate::nar::compute_nar_hash(&dest)
.map_err(|e| NixRuntimeError::from(format!("failed to compute NAR hash: {}", e)))?
} else {
if !dest.is_file() {
return Err(NixRuntimeError::from(
"when 'recursive' is false, path must be a regular file",
));
}
let contents = fs::read(&dest)
.map_err(|e| NixRuntimeError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
let expected_hex =
crate::nix_hash::decode_hash_to_hex(&expected_hash).ok_or_else(|| {
NixRuntimeError::from(format!("invalid hash format: {}", expected_hash))
})?;
if computed_hash != expected_hex {
return Err(NixRuntimeError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
src_path, expected_hex, computed_hash
)));
}
}
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&computed_name, &dest, vec![])
.map_err(|e| NixRuntimeError::from(format!("failed to add path to store: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[serde]
pub(super) fn op_match(
state: &mut OpState,
#[string] regex: String,
#[string] text: String,
) -> std::result::Result<Option<Vec<Option<String>>>, NixRuntimeError> {
let cache = state.borrow_mut::<RegexCache>();
let re = cache
.get_regex(&format!("^{}$", regex))
.map_err(|_| NixRuntimeError::from(format!("invalid regular expression '{}'", regex)))?;
match re.captures(&text) {
Some(caps) => {
let groups: Vec<Option<String>> = caps
.iter()
.skip(1)
.map(|grp| grp.map(|g| g.as_str().to_string()))
.collect();
Ok(Some(groups))
}
None => Ok(None),
}
}
#[deno_core::op2]
#[serde]
pub(super) fn op_split(
state: &mut OpState,
#[string] regex: String,
#[string] text: String,
) -> std::result::Result<Vec<SplitResult>, NixRuntimeError> {
let cache = state.borrow_mut::<RegexCache>();
let re = cache
.get_regex(&regex)
.map_err(|_| NixRuntimeError::from(format!("invalid regular expression '{}'", regex)))?;
let mut capture_locations = re.capture_locations();
let num_captures = capture_locations.len();
let mut ret: Vec<SplitResult> = Vec::new();
let mut pos = 0;
while let Some(thematch) = re.captures_read_at(&mut capture_locations, &text, pos) {
ret.push(SplitResult::Text(text[pos..thematch.start()].to_string()));
let captures: Vec<Option<String>> = (1..num_captures)
.map(|i| capture_locations.get(i))
.map(|o| o.map(|(start, end)| text[start..end].to_string()))
.collect();
ret.push(SplitResult::Captures(captures));
if pos == text.len() {
break;
}
pos = thematch.end();
}
ret.push(SplitResult::Text(text[pos..].to_string()));
Ok(ret)
}
#[derive(serde::Serialize)]
#[serde(untagged)]
pub(super) enum SplitResult {
Text(String),
Captures(Vec<Option<String>>),
}

View File

@@ -5,32 +5,38 @@ use core::ops::Deref;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::ops::DerefMut;
use std::sync::LazyLock;
use derive_more::{Constructor, IsVariant, Unwrap};
use regex::Regex;
/// Represents a Nix symbol, which is used as a key in attribute sets.
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Constructor)]
pub struct Symbol(String);
pub struct Symbol<'a>(Cow<'a, str>);
impl<T: Into<String>> From<T> for Symbol {
fn from(value: T) -> Self {
Symbol(value.into())
pub type StaticSymbol = Symbol<'static>;
impl From<String> for Symbol<'_> {
fn from(value: String) -> Self {
Symbol(Cow::Owned(value))
}
}
impl<'a> From<&'a str> for Symbol<'a> {
fn from(value: &'a str) -> Self {
Symbol(Cow::Borrowed(value))
}
}
/// Formats a string slice as a Nix symbol, quoting it if necessary.
pub fn format_symbol<'a>(sym: impl Into<Cow<'a, str>>) -> Cow<'a, str> {
let sym = sym.into();
if REGEX.is_match(&sym) {
if Symbol::NORMAL_REGEX.test(&sym) {
sym
} else {
Cow::Owned(format!(r#""{sym}""#))
Cow::Owned(escape_quote_string(&sym))
}
}
impl Display for Symbol {
impl Display for Symbol<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
if self.normal() {
write!(f, "{}", self.0)
@@ -40,55 +46,41 @@ impl Display for Symbol {
}
}
static REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_'-]*$").expect("hardcoded regex is always valid")
});
impl Symbol {
impl Symbol<'_> {
const NORMAL_REGEX: ere::Regex<1> = ere::compile_regex!("^[a-zA-Z_][a-zA-Z0-9_'-]*$");
/// Checks if the symbol is a "normal" identifier that doesn't require quotes.
fn normal(&self) -> bool {
REGEX.is_match(self)
Self::NORMAL_REGEX.test(self)
}
}
impl Deref for Symbol {
impl Deref for Symbol<'_> {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Symbol {
/// Consumes the `Symbol`, returning its inner `String`.
pub fn into_inner(self) -> String {
self.0
}
/// Returns a reference to the inner `String`.
pub fn as_inner(&self) -> &String {
&self.0
}
}
/// Represents a Nix attribute set, which is a map from symbols to values.
#[derive(Constructor, Default, Clone, PartialEq)]
pub struct AttrSet {
data: BTreeMap<Symbol, Value>,
data: BTreeMap<StaticSymbol, Value>,
}
impl AttrSet {
/// Gets a value by key (string or Symbol).
pub fn get(&self, key: impl Into<Symbol>) -> Option<&Value> {
pub fn get<'a, 'sym: 'a>(&'a self, key: impl Into<Symbol<'sym>>) -> Option<&'a Value> {
self.data.get(&key.into())
}
/// Checks if a key exists in the attribute set.
pub fn contains_key(&self, key: impl Into<Symbol>) -> bool {
pub fn contains_key<'a, 'sym: 'a>(&'a self, key: impl Into<Symbol<'sym>>) -> bool {
self.data.contains_key(&key.into())
}
}
impl Deref for AttrSet {
type Target = BTreeMap<Symbol, Value>;
type Target = BTreeMap<StaticSymbol, Value>;
fn deref(&self) -> &Self::Target {
&self.data
}
@@ -117,12 +109,31 @@ impl Debug for AttrSet {
impl Display for AttrSet {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
use Value::*;
if self.data.len() > 1 {
writeln!(f, "{{")?;
for (k, v) in self.data.iter() {
write!(f, " {k} = ")?;
match v {
List(_) => writeln!(f, "[ ... ];")?,
AttrSet(_) => writeln!(f, "{{ ... }};")?,
v => writeln!(f, "{v};")?,
}
}
write!(f, "}}")
} else {
write!(f, "{{")?;
for (k, v) in self.data.iter() {
write!(f, " {k} = {v};")?;
write!(f, " {k} = ")?;
match v {
List(_) => write!(f, "[ ... ];")?,
AttrSet(_) => write!(f, "{{ ... }};")?,
v => write!(f, "{v};")?,
}
}
write!(f, " }}")
}
}
}
impl AttrSet {
@@ -163,11 +174,28 @@ impl DerefMut for List {
impl Display for List {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "[ ")?;
use Value::*;
if self.data.len() > 1 {
writeln!(f, "[")?;
for v in self.data.iter() {
write!(f, "{v} ")?;
match v {
List(_) => writeln!(f, " [ ... ]")?,
AttrSet(_) => writeln!(f, " {{ ... }}")?,
v => writeln!(f, " {v}")?,
}
}
write!(f, "]")
} else {
write!(f, "[ ")?;
for v in self.data.iter() {
match v {
List(_) => write!(f, "[ ... ] ")?,
AttrSet(_) => write!(f, "{{ ... }} ")?,
v => write!(f, "{v} ")?,
}
}
write!(f, "]")
}
}
}

View File

@@ -3,12 +3,13 @@
mod utils;
use nix_js::value::Value;
use utils::{eval, eval_result};
use utils::{eval_deep, eval_deep_result};
#[test]
fn derivation_minimal() {
let result =
eval(r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#);
let result = eval_deep(
r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -44,7 +45,7 @@ fn derivation_minimal() {
#[test]
fn derivation_with_args() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "test";
builder = "/bin/sh";
@@ -66,7 +67,7 @@ fn derivation_with_args() {
#[test]
fn derivation_to_string() {
let result = eval(
let result = eval_deep(
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
);
@@ -78,7 +79,8 @@ fn derivation_to_string() {
#[test]
fn derivation_missing_name() {
let result = eval_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
let result =
eval_deep_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
@@ -87,7 +89,7 @@ fn derivation_missing_name() {
#[test]
fn derivation_invalid_name_with_drv_suffix() {
let result = eval_result(
let result = eval_deep_result(
r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
);
@@ -98,7 +100,7 @@ fn derivation_invalid_name_with_drv_suffix() {
#[test]
fn derivation_missing_builder() {
let result = eval_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
let result = eval_deep_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
@@ -107,7 +109,7 @@ fn derivation_missing_builder() {
#[test]
fn derivation_missing_system() {
let result = eval_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
let result = eval_deep_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
@@ -116,7 +118,7 @@ fn derivation_missing_system() {
#[test]
fn derivation_with_env_vars() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "test";
builder = "/bin/sh";
@@ -137,7 +139,7 @@ fn derivation_with_env_vars() {
#[test]
fn derivation_strict() {
let result = eval(
let result = eval_deep(
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
);
@@ -156,8 +158,8 @@ fn derivation_strict() {
fn derivation_deterministic_paths() {
let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#;
let result1 = eval(expr);
let result2 = eval(expr);
let result1 = eval_deep(expr);
let result2 = eval_deep(expr);
match (result1, result2) {
(Value::AttrSet(attrs1), Value::AttrSet(attrs2)) => {
@@ -170,7 +172,7 @@ fn derivation_deterministic_paths() {
#[test]
fn derivation_escaping_in_aterm() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "test";
builder = "/bin/sh";
@@ -190,7 +192,7 @@ fn derivation_escaping_in_aterm() {
#[test]
fn multi_output_two_outputs() {
let drv = eval(
let drv = eval_deep(
r#"derivation {
name = "multi";
builder = "/bin/sh";
@@ -233,7 +235,7 @@ fn multi_output_two_outputs() {
#[test]
fn multi_output_three_outputs() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "three";
builder = "/bin/sh";
@@ -281,7 +283,7 @@ fn multi_output_three_outputs() {
#[test]
fn multi_output_backward_compat() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "compat";
builder = "/bin/sh";
@@ -307,7 +309,7 @@ fn multi_output_backward_compat() {
#[test]
fn multi_output_deterministic() {
let result1 = eval(
let result1 = eval_deep(
r#"derivation {
name = "determ";
builder = "/bin/sh";
@@ -316,7 +318,7 @@ fn multi_output_deterministic() {
}"#,
);
let result2 = eval(
let result2 = eval_deep(
r#"derivation {
name = "determ";
builder = "/bin/sh";
@@ -330,7 +332,7 @@ fn multi_output_deterministic() {
#[test]
fn fixed_output_sha256_flat() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "fixed";
builder = "/bin/sh";
@@ -367,7 +369,7 @@ fn fixed_output_sha256_flat() {
#[test]
fn fixed_output_default_algo() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "default";
builder = "/bin/sh";
@@ -390,7 +392,7 @@ fn fixed_output_default_algo() {
#[test]
fn fixed_output_recursive_mode() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "recursive";
builder = "/bin/sh";
@@ -420,7 +422,7 @@ fn fixed_output_recursive_mode() {
#[test]
fn fixed_output_rejects_multi_output() {
let result = eval_result(
let result = eval_deep_result(
r#"derivation {
name = "invalid";
builder = "/bin/sh";
@@ -437,7 +439,7 @@ fn fixed_output_rejects_multi_output() {
#[test]
fn fixed_output_invalid_hash_mode() {
let result = eval_result(
let result = eval_deep_result(
r#"derivation {
name = "invalid";
builder = "/bin/sh";
@@ -454,7 +456,7 @@ fn fixed_output_invalid_hash_mode() {
#[test]
fn structured_attrs_basic() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "struct";
builder = "/bin/sh";
@@ -479,7 +481,7 @@ fn structured_attrs_basic() {
#[test]
fn structured_attrs_nested() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "nested";
builder = "/bin/sh";
@@ -500,7 +502,7 @@ fn structured_attrs_nested() {
#[test]
fn structured_attrs_rejects_functions() {
let result = eval_result(
let result = eval_deep_result(
r#"derivation {
name = "invalid";
builder = "/bin/sh";
@@ -517,7 +519,7 @@ fn structured_attrs_rejects_functions() {
#[test]
fn structured_attrs_false() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "normal";
builder = "/bin/sh";
@@ -540,7 +542,7 @@ fn structured_attrs_false() {
#[test]
fn ignore_nulls_true() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "ignore";
builder = "/bin/sh";
@@ -562,7 +564,7 @@ fn ignore_nulls_true() {
#[test]
fn ignore_nulls_false() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "keep";
builder = "/bin/sh";
@@ -585,7 +587,7 @@ fn ignore_nulls_false() {
#[test]
fn ignore_nulls_with_structured_attrs() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "combined";
builder = "/bin/sh";
@@ -609,7 +611,7 @@ fn ignore_nulls_with_structured_attrs() {
#[test]
fn all_features_combined() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "all";
builder = "/bin/sh";
@@ -636,7 +638,7 @@ fn all_features_combined() {
#[test]
fn fixed_output_with_structured_attrs() {
let result = eval(
let result = eval_deep(
r#"derivation {
name = "fixstruct";
builder = "/bin/sh";

View File

@@ -105,7 +105,7 @@ fn path_with_file() {
std::fs::write(&test_file, "Hello, World!").unwrap();
let expr = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result = ctx.eval_code(Source::new_eval(expr).unwrap()).unwrap();
let result = ctx.eval(Source::new_eval(expr).unwrap()).unwrap();
// Should return a store path string
if let Value::String(store_path) = result {
@@ -149,7 +149,7 @@ fn path_with_directory_recursive() {
r#"builtins.path {{ path = {}; recursive = true; }}"#,
test_dir.display()
);
let result = ctx.eval_code(Source::new_eval(expr).unwrap()).unwrap();
let result = ctx.eval(Source::new_eval(expr).unwrap()).unwrap();
if let Value::String(store_path) = result {
assert!(store_path.starts_with(ctx.get_store_dir()));
@@ -170,7 +170,7 @@ fn path_flat_with_file() {
r#"builtins.path {{ path = {}; recursive = false; }}"#,
test_file.display()
);
let result = ctx.eval_code(Source::new_eval(expr).unwrap()).unwrap();
let result = ctx.eval(Source::new_eval(expr).unwrap()).unwrap();
if let Value::String(store_path) = result {
assert!(store_path.starts_with(ctx.get_store_dir()));

View File

@@ -23,7 +23,7 @@ fn eval_file(name: &str) -> Result<(Value, Source), String> {
ty: nix_js::error::SourceType::File(nix_path.into()),
src: expr.into(),
};
ctx.eval_code(source.clone())
ctx.eval_deep(source.clone())
.map(|val| (val, source))
.map_err(|e| e.to_string())
}
@@ -181,10 +181,7 @@ eval_okay_test!(
);
eval_okay_test!(r#if);
eval_okay_test!(ind_string);
eval_okay_test!(
#[ignore = "not implemented: scopedImport"]
import
);
eval_okay_test!(import);
eval_okay_test!(inherit_attr_pos);
eval_okay_test!(
#[ignore = "__overrides is not supported"]

View File

@@ -156,7 +156,7 @@ fn string_add_merges_context() {
fn context_in_derivation_args() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
.eval(
r#"
let
dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; };
@@ -185,7 +185,7 @@ fn context_in_derivation_args() {
fn context_in_derivation_env() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
.eval(
r#"
let
dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; };
@@ -227,7 +227,7 @@ fn tostring_preserves_context() {
fn interpolation_derivation_returns_outpath() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
.eval(
r#"
let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };

View File

@@ -7,12 +7,25 @@ use nix_js::value::Value;
pub fn eval(expr: &str) -> Value {
Context::new()
.unwrap()
.eval_code(Source::new_eval(expr.into()).unwrap())
.eval(Source::new_eval(expr.into()).unwrap())
.unwrap()
}
pub fn eval_deep(expr: &str) -> Value {
Context::new()
.unwrap()
.eval_deep(Source::new_eval(expr.into()).unwrap())
.unwrap()
}
pub fn eval_deep_result(expr: &str) -> Result<Value> {
Context::new()
.unwrap()
.eval_deep(Source::new_eval(expr.into()).unwrap())
}
pub fn eval_result(expr: &str) -> Result<Value> {
Context::new()
.unwrap()
.eval_code(Source::new_eval(expr.into()).unwrap())
.eval(Source::new_eval(expr.into()).unwrap())
}