feat: initial nix-daemon implementation

This commit is contained in:
2026-01-17 19:27:59 +08:00
parent 52bf46407a
commit 2ad662c765
28 changed files with 1625 additions and 463 deletions

View File

@@ -1,7 +1,18 @@
vim.lsp.config("biome", { vim.lsp.config("biome", {
root_dir = function (bufnr, on_dir) root_dir = function (_bufnr, on_dir)
on_dir(vim.fn.getcwd()) on_dir(vim.fn.getcwd())
end end
}) })
vim.lsp.config("rust_analyzer", {
settings = {
["rust-analyzer"] = {
cargo = {
features = {
"daemon"
}
}
}
}
})
return {} return {}

331
Cargo.lock generated
View File

@@ -34,6 +34,15 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "android_system_properties"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "anes" name = "anes"
version = "0.1.6" version = "0.1.6"
@@ -61,6 +70,28 @@ dependencies = [
"derive_arbitrary", "derive_arbitrary",
] ]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "atomic-waker" name = "atomic-waker"
version = "1.1.2" version = "1.1.2"
@@ -287,6 +318,19 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-link",
]
[[package]] [[package]]
name = "ciborium" name = "ciborium"
version = "0.2.2" version = "0.2.2"
@@ -399,6 +443,12 @@ version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f" checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f"
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]] [[package]]
name = "core_maths" name = "core_maths"
version = "0.1.1" version = "0.1.1"
@@ -1206,6 +1256,30 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "iana-time-zone"
version = "0.1.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"log",
"wasm-bindgen",
"windows-core",
]
[[package]]
name = "iana-time-zone-haiku"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
dependencies = [
"cc",
]
[[package]] [[package]]
name = "icu_calendar" name = "icu_calendar"
version = "2.1.1" version = "2.1.1"
@@ -1465,6 +1539,12 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.179" version = "0.2.179"
@@ -1568,6 +1648,15 @@ dependencies = [
"pkg-config", "pkg-config",
] ]
[[package]]
name = "matchers"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
dependencies = [
"regex-automata",
]
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.7.5" version = "2.7.5"
@@ -1640,6 +1729,24 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "nix-daemon"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb28bc02b8ea18d59e15fc8e86ae35850326dc5e4e2dcf17bc659f2fd79f1a08"
dependencies = [
"async-stream",
"chrono",
"futures",
"num_enum",
"tap",
"thiserror 1.0.69",
"tokio",
"tokio-stream",
"tokio-test",
"tracing",
]
[[package]] [[package]]
name = "nix-js" name = "nix-js"
version = "0.1.0" version = "0.1.0"
@@ -1656,6 +1763,7 @@ dependencies = [
"hex", "hex",
"itertools 0.14.0", "itertools 0.14.0",
"mimalloc", "mimalloc",
"nix-daemon",
"nix-js-macros", "nix-js-macros",
"petgraph", "petgraph",
"regex", "regex",
@@ -1669,6 +1777,9 @@ dependencies = [
"tar", "tar",
"tempfile", "tempfile",
"thiserror 2.0.17", "thiserror 2.0.17",
"tokio",
"tracing",
"tracing-subscriber",
"xz2", "xz2",
"zip", "zip",
] ]
@@ -1693,6 +1804,15 @@ dependencies = [
"minimal-lexical", "minimal-lexical",
] ]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "num-bigint" name = "num-bigint"
version = "0.4.6" version = "0.4.6"
@@ -1728,6 +1848,28 @@ dependencies = [
"autocfg", "autocfg",
] ]
[[package]]
name = "num_enum"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c"
dependencies = [
"num_enum_derive",
"rustversion",
]
[[package]]
name = "num_enum_derive"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.21.3" version = "1.21.3"
@@ -1911,6 +2053,15 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "proc-macro-crate"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
dependencies = [
"toml_edit",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.95" version = "1.0.95"
@@ -2443,6 +2594,15 @@ dependencies = [
"digest", "digest",
] ]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]] [[package]]
name = "shlex" name = "shlex"
version = "1.3.0" version = "1.3.0"
@@ -2740,6 +2900,15 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "thread_local"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.44" version = "0.3.44"
@@ -2845,6 +3014,58 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "tokio-stream"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f6d24790a10a7af737693a3e8f1d03faef7e6ca0cc99aae5066f533766de545"
dependencies = [
"futures-core",
"tokio",
"tokio-stream",
]
[[package]]
name = "toml_datetime"
version = "0.7.5+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347"
dependencies = [
"serde_core",
]
[[package]]
name = "toml_edit"
version = "0.23.10+spec-1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269"
dependencies = [
"indexmap",
"toml_datetime",
"toml_parser",
"winnow",
]
[[package]]
name = "toml_parser"
version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
dependencies = [
"winnow",
]
[[package]] [[package]]
name = "tower" name = "tower"
version = "0.5.2" version = "0.5.2"
@@ -2897,9 +3118,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
dependencies = [ dependencies = [
"pin-project-lite", "pin-project-lite",
"tracing-attributes",
"tracing-core", "tracing-core",
] ]
[[package]]
name = "tracing-attributes"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "tracing-core" name = "tracing-core"
version = "0.1.36" version = "0.1.36"
@@ -2907,6 +3140,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex-automata",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
] ]
[[package]] [[package]]
@@ -3008,6 +3271,12 @@ dependencies = [
"which", "which",
] ]
[[package]]
name = "valuable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.5" version = "0.9.5"
@@ -3194,12 +3463,65 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
dependencies = [
"windows-implement",
"windows-interface",
"windows-link",
"windows-result",
"windows-strings",
]
[[package]]
name = "windows-implement"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-interface"
version = "0.59.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "windows-link" name = "windows-link"
version = "0.2.1" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-result"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
dependencies = [
"windows-link",
]
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.48.0" version = "0.48.0"
@@ -3431,6 +3753,15 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]]
name = "winnow"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winsafe" name = "winsafe"
version = "0.0.19" version = "0.0.19"

View File

@@ -1,10 +1,10 @@
[no-exit-message] [no-exit-message]
@repl: @repl:
cargo run --bin repl RUST_LOG=none,nix_js=debug cargo run --bin repl
[no-exit-message] [no-exit-message]
@eval expr: @eval expr:
cargo run --bin eval -- '{{expr}}' RUST_LOG=none,nix_js=debug cargo run --bin eval -- '{{expr}}'
[no-exit-message] [no-exit-message]
@replr: @replr:

View File

@@ -4,13 +4,24 @@ version = "0.1.0"
edition = "2024" edition = "2024"
build = "build.rs" build = "build.rs"
[features]
default = ["daemon"]
daemon = ["dep:tokio", "dep:nix-daemon"]
[dependencies] [dependencies]
mimalloc = "0.1" mimalloc = "0.1"
tokio = { version = "1.41", features = ["rt-multi-thread", "sync"], optional = true }
nix-daemon = { version = "0.1", optional = true }
# REPL # REPL
anyhow = "1.0" anyhow = "1.0"
rustyline = "14.0" rustyline = "14.0"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
derive_more = { version = "2", features = ["full"] } derive_more = { version = "2", features = ["full"] }
thiserror = "2" thiserror = "2"

View File

@@ -1,4 +1,3 @@
use std::env;
use std::path::Path; use std::path::Path;
use std::process::Command; use std::process::Command;
@@ -68,9 +67,4 @@ fn main() {
} else { } else {
panic!("dist/runtime.js not found after build"); panic!("dist/runtime.js not found after build");
} }
// Print build info
if env::var("CARGO_CFG_DEBUG_ASSERTIONS").is_ok() {
println!("Built runtime.js in DEBUG mode");
}
} }

View File

@@ -7,9 +7,10 @@ import { forceAttrs, forceBool, forceString } from "../type-assert";
import type { NixValue, NixAttrs } from "../types"; import type { NixValue, NixAttrs } from "../types";
import { isNixPath } from "../types"; import { isNixPath } from "../types";
import { force } from "../thunk"; import { force } from "../thunk";
import { coerceToPath } from "./conversion"; import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
import { getPathValue } from "../path"; import { getPathValue } from "../path";
import type { NixStringContext } from "../string-context"; import type { NixStringContext, StringWithContext } from "../string-context";
import { mkStringWithContext } from "../string-context";
export const importFunc = (path: NixValue): NixValue => { export const importFunc = (path: NixValue): NixValue => {
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
@@ -38,8 +39,14 @@ export const scopedImport =
throw new Error("Not implemented: scopedImport"); throw new Error("Not implemented: scopedImport");
}; };
export const storePath = (args: NixValue): never => { export const storePath = (pathArg: NixValue): StringWithContext => {
throw new Error("Not implemented: storePath"); const context: NixStringContext = new Set();
const pathStr = coerceToPath(pathArg, context);
const validatedPath: string = Deno.core.ops.op_store_path(pathStr);
context.add(validatedPath);
return mkStringWithContext(validatedPath, context);
}; };
export const fetchClosure = (args: NixValue): never => { export const fetchClosure = (args: NixValue): never => {
@@ -346,9 +353,32 @@ export const path = (args: NixValue): string => {
return storePath; return storePath;
}; };
export const toFile = (name: NixValue, s: NixValue): never => { export const toFile =
throw new Error("Not implemented: toFile"); (nameArg: NixValue) =>
}; (contentsArg: NixValue): StringWithContext => {
const name = forceString(nameArg);
if (name.includes('/')) {
throw new Error("builtins.toFile: name cannot contain '/'");
}
if (name === '.' || name === '..') {
throw new Error("builtins.toFile: invalid name");
}
const context: NixStringContext = new Set();
const contents = coerceToString(
contentsArg,
StringCoercionMode.ToString,
false,
context
);
const references: string[] = Array.from(context);
const storePath: string = Deno.core.ops.op_to_file(name, contents, references);
return mkStringWithContext(storePath, new Set([storePath]));
};
export const toPath = (name: NixValue, s: NixValue): never => { export const toPath = (name: NixValue, s: NixValue): never => {
throw new Error("Not implemented: toPath"); throw new Error("Not implemented: toPath");

View File

@@ -76,6 +76,8 @@ declare global {
recursive: boolean, recursive: boolean,
sha256: string | null, sha256: string | null,
): string; ): string;
function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string;
} }
} }
} }

View File

@@ -1,8 +1,15 @@
use anyhow::Result; use anyhow::Result;
use nix_js::context::Context; use nix_js::context::Context;
use std::process::exit; use std::process::exit;
use tracing_subscriber::EnvFilter;
fn main() -> Result<()> { fn main() -> Result<()> {
let format = tracing_subscriber::fmt::format().without_time();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.event_format(format)
.init();
let mut args = std::env::args(); let mut args = std::env::args();
if args.len() != 2 { if args.len() != 2 {
eprintln!("Usage: {} expr", args.next().unwrap()); eprintln!("Usage: {} expr", args.next().unwrap());

View File

@@ -1,11 +1,17 @@
use anyhow::Result; use anyhow::Result;
use nix_js::context::Context;
use regex::Regex; use regex::Regex;
use rustyline::DefaultEditor; use rustyline::DefaultEditor;
use rustyline::error::ReadlineError; use rustyline::error::ReadlineError;
use tracing_subscriber::EnvFilter;
use nix_js::context::Context;
fn main() -> Result<()> { fn main() -> Result<()> {
let format = tracing_subscriber::fmt::format().without_time();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.event_format(format)
.init();
let mut rl = DefaultEditor::new()?; let mut rl = DefaultEditor::new()?;
let mut context = Context::new()?; let mut context = Context::new()?;
let re = Regex::new(r"^\s*([a-zA-Z_][a-zA-Z0-9_'-]*)\s*=(.*)$").unwrap(); let re = Regex::new(r"^\s*([a-zA-Z_][a-zA-Z0-9_'-]*)\s*=(.*)$").unwrap();

View File

@@ -10,7 +10,9 @@ use crate::codegen::{CodegenContext, compile};
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use crate::ir::{ArgId, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, SymId, ToIr as _}; use crate::ir::{ArgId, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, SymId, ToIr as _};
use crate::runtime::{Runtime, RuntimeContext}; use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{StoreBackend, StoreConfig};
use crate::value::Value; use crate::value::Value;
use std::sync::Arc;
mod private { mod private {
use super::*; use super::*;
@@ -54,6 +56,7 @@ pub(crate) struct SccInfo {
pub struct Context { pub struct Context {
ctx: Ctx, ctx: Ctx,
runtime: Runtime<CtxPtr>, runtime: Runtime<CtxPtr>,
store: Arc<StoreBackend>,
} }
impl Context { impl Context {
@@ -61,7 +64,10 @@ impl Context {
let ctx = Ctx::new(); let ctx = Ctx::new();
let runtime = Runtime::new()?; let runtime = Runtime::new()?;
Ok(Self { ctx, runtime }) let config = StoreConfig::from_env();
let store = Arc::new(StoreBackend::new(config)?);
Ok(Self { ctx, runtime, store })
} }
pub fn eval_code(&mut self, expr: &str) -> Result<Value> { pub fn eval_code(&mut self, expr: &str) -> Result<Value> {
@@ -73,6 +79,12 @@ impl Context {
.join("__eval__.nix"), .join("__eval__.nix"),
); );
let code = self.compile_code(expr)?; let code = self.compile_code(expr)?;
self.runtime
.op_state()
.borrow_mut()
.put(self.store.clone());
self.runtime self.runtime
.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx)) .eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
} }
@@ -85,6 +97,10 @@ impl Context {
pub(crate) fn eval_js(&mut self, code: String) -> Result<Value> { pub(crate) fn eval_js(&mut self, code: String) -> Result<Value> {
self.runtime.eval(code, CtxPtr::new(&mut self.ctx)) self.runtime.eval(code, CtxPtr::new(&mut self.ctx))
} }
pub fn get_store_dir(&self) -> &str {
self.store.as_store().get_store_dir()
}
} }
pub(crate) struct Ctx { pub(crate) struct Ctx {
@@ -187,8 +203,7 @@ impl Ctx {
.downgrade_ctx() .downgrade_ctx()
.downgrade(root.tree().expr().unwrap())?; .downgrade(root.tree().expr().unwrap())?;
let code = compile(self.get_ir(root), self); let code = compile(self.get_ir(root), self);
#[cfg(debug_assertions)] tracing::debug!("generated code: {}", &code);
eprintln!("[DEBUG] generated code: {}", &code);
Ok(code) Ok(code)
} }
} }

View File

@@ -1,5 +1,9 @@
use deno_core::op2;
use serde::Serialize;
use tracing::debug;
mod archive; mod archive;
mod cache; pub(crate) mod cache;
mod download; mod download;
mod git; mod git;
mod hg; mod hg;
@@ -8,9 +12,6 @@ mod nar;
pub use cache::FetcherCache; pub use cache::FetcherCache;
pub use download::Downloader; pub use download::Downloader;
use deno_core::op2;
use serde::Serialize;
use crate::runtime::NixError; use crate::runtime::NixError;
#[derive(Serialize)] #[derive(Serialize)]
@@ -55,8 +56,7 @@ pub fn op_fetch_url(
#[string] name: Option<String>, #[string] name: Option<String>,
executable: bool, executable: bool,
) -> Result<FetchUrlResult, NixError> { ) -> Result<FetchUrlResult, NixError> {
#[cfg(debug_assertions)] debug!("fetchurl: {}", url);
eprintln!("[DEBUG] fetchurl: {}", url);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new(); let downloader = Downloader::new();
@@ -106,9 +106,8 @@ pub fn op_fetch_tarball(
#[string] expected_nar_hash: Option<String>, #[string] expected_nar_hash: Option<String>,
#[string] name: Option<String>, #[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> { ) -> Result<FetchTarballResult, NixError> {
#[cfg(debug_assertions)] debug!(
eprintln!( "fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}",
"[DEBUG] fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}",
url, expected_hash, expected_nar_hash url, expected_hash, expected_nar_hash
); );
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
@@ -119,11 +118,9 @@ pub fn op_fetch_tarball(
// Try cache lookup with narHash if provided // Try cache lookup with narHash if provided
if let Some(ref nar_hash) = expected_nar_hash { if let Some(ref nar_hash) = expected_nar_hash {
let normalized = normalize_hash(nar_hash); let normalized = normalize_hash(nar_hash);
#[cfg(debug_assertions)] debug!("fetchTarball: normalized nar_hash={}", normalized);
eprintln!("[DEBUG] fetchTarball: normalized nar_hash={}", normalized);
if let Some(cached) = cache.get_extracted_tarball(&url, &normalized) { if let Some(cached) = cache.get_extracted_tarball(&url, &normalized) {
#[cfg(debug_assertions)] debug!("fetchTarball: cache HIT (with expected nar_hash)");
eprintln!("[DEBUG] fetchTarball: cache HIT (with expected nar_hash)");
// Need to compute tarball hash if not cached // Need to compute tarball hash if not cached
let tarball_hash = expected_hash let tarball_hash = expected_hash
.as_ref() .as_ref()
@@ -135,12 +132,10 @@ pub fn op_fetch_tarball(
nar_hash: normalized, nar_hash: normalized,
}); });
} }
#[cfg(debug_assertions)] debug!("fetchTarball: cache MISS, downloading...");
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
} else if let Some((cached, cached_nar_hash)) = cache.get_extracted_tarball_by_url(&url) { } else if let Some((cached, cached_nar_hash)) = cache.get_extracted_tarball_by_url(&url) {
#[cfg(debug_assertions)] debug!(
eprintln!( "fetchTarball: cache HIT (by URL, nar_hash={})",
"[DEBUG] fetchTarball: cache HIT (by URL, nar_hash={})",
cached_nar_hash cached_nar_hash
); );
let tarball_hash = expected_hash let tarball_hash = expected_hash
@@ -153,8 +148,7 @@ pub fn op_fetch_tarball(
nar_hash: cached_nar_hash, nar_hash: cached_nar_hash,
}); });
} }
#[cfg(debug_assertions)] debug!("fetchTarball: cache MISS, downloading...");
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
let data = downloader let data = downloader
.download(&url) .download(&url)
@@ -182,9 +176,8 @@ pub fn op_fetch_tarball(
let nar_hash = let nar_hash =
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?; nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
#[cfg(debug_assertions)] debug!(
eprintln!( "fetchTarball: computed tarball_hash={}, nar_hash={}",
"[DEBUG] fetchTarball: computed tarball_hash={}, nar_hash={}",
tarball_hash, nar_hash tarball_hash, nar_hash
); );
@@ -222,11 +215,7 @@ pub fn op_fetch_git(
all_refs: bool, all_refs: bool,
#[string] name: Option<String>, #[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> { ) -> Result<FetchGitResult, NixError> {
#[cfg(debug_assertions)] debug!("fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev);
eprintln!(
"[DEBUG] fetchGit: {} (ref: {:?}, rev: {:?})",
url, git_ref, rev
);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());

View File

@@ -3,6 +3,7 @@ use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::debug;
use super::archive::ArchiveError; use super::archive::ArchiveError;
@@ -168,47 +169,34 @@ impl FetcherCache {
let meta_path = cache_dir.join(&key).join(".meta"); let meta_path = cache_dir.join(&key).join(".meta");
let data_dir = cache_dir.join(&key); let data_dir = cache_dir.join(&key);
#[cfg(debug_assertions)] debug!("get_tarball: url={}, expected_hash={}", url, expected_hash);
eprintln!(
"[CACHE] get_tarball: url={}, expected_hash={}",
url, expected_hash
);
if !meta_path.exists() || !data_dir.exists() { if !meta_path.exists() || !data_dir.exists() {
#[cfg(debug_assertions)] debug!("get_tarball: cache miss - meta or data dir not found");
eprintln!("[CACHE] get_tarball: cache miss - meta or data dir not found");
return None; return None;
} }
let meta: CacheMetadata = let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?; serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)] debug!("get_tarball: cached hash={}, name={}", meta.hash, meta.name);
eprintln!(
"[CACHE] get_tarball: cached hash={}, name={}",
meta.hash, meta.name
);
if meta.hash == expected_hash { if meta.hash == expected_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name); let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)] debug!(
eprintln!( "get_tarball: hash match, checking store_path={}",
"[CACHE] get_tarball: hash match, checking store_path={}",
store_path.display() store_path.display()
); );
if store_path.exists() { if store_path.exists() {
#[cfg(debug_assertions)] debug!("get_tarball: HIT - returning store path");
eprintln!("[CACHE] get_tarball: HIT - returning store path");
Some(store_path) Some(store_path)
} else { } else {
#[cfg(debug_assertions)] debug!("get_tarball: store path doesn't exist");
eprintln!("[CACHE] get_tarball: store path doesn't exist");
None None
} }
} else { } else {
#[cfg(debug_assertions)] debug!(
eprintln!( "get_tarball: hash mismatch (cached={}, expected={})",
"[CACHE] get_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_hash meta.hash, expected_hash
); );
None None
@@ -253,47 +241,40 @@ impl FetcherCache {
let meta_path = cache_entry_dir.join(".meta"); let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content"); let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)] debug!(
eprintln!( "get_extracted_tarball: url={}, expected_nar_hash={}",
"[CACHE] get_extracted_tarball: url={}, expected_nar_hash={}",
url, expected_nar_hash url, expected_nar_hash
); );
if !meta_path.exists() || !cached_content.exists() { if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)] debug!("get_extracted_tarball: cache miss - meta or content dir not found");
eprintln!("[CACHE] get_extracted_tarball: cache miss - meta or content dir not found");
return None; return None;
} }
let meta: CacheMetadata = let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?; serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)] debug!(
eprintln!( "get_extracted_tarball: cached hash={}, name={}",
"[CACHE] get_extracted_tarball: cached hash={}, name={}",
meta.hash, meta.name meta.hash, meta.name
); );
if meta.hash == expected_nar_hash { if meta.hash == expected_nar_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name); let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)] debug!(
eprintln!( "get_extracted_tarball: hash match, checking store_path={}",
"[CACHE] get_extracted_tarball: hash match, checking store_path={}",
store_path.display() store_path.display()
); );
if store_path.exists() { if store_path.exists() {
#[cfg(debug_assertions)] debug!("get_extracted_tarball: HIT - returning store path");
eprintln!("[CACHE] get_extracted_tarball: HIT - returning store path");
Some(store_path) Some(store_path)
} else { } else {
#[cfg(debug_assertions)] debug!("get_extracted_tarball: store path doesn't exist");
eprintln!("[CACHE] get_extracted_tarball: store path doesn't exist");
None None
} }
} else { } else {
#[cfg(debug_assertions)] debug!(
eprintln!( "get_extracted_tarball: hash mismatch (cached={}, expected={})",
"[CACHE] get_extracted_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_nar_hash meta.hash, expected_nar_hash
); );
None None
@@ -307,34 +288,27 @@ impl FetcherCache {
let meta_path = cache_entry_dir.join(".meta"); let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content"); let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)] debug!("get_extracted_tarball_by_url: url={}", url);
eprintln!("[CACHE] get_extracted_tarball_by_url: url={}", url);
if !meta_path.exists() || !cached_content.exists() { if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)] debug!("get_extracted_tarball_by_url: cache miss - meta or content dir not found");
eprintln!(
"[CACHE] get_extracted_tarball_by_url: cache miss - meta or content dir not found"
);
return None; return None;
} }
let meta: CacheMetadata = let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?; serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)] debug!(
eprintln!( "get_extracted_tarball_by_url: cached hash={}, name={}",
"[CACHE] get_extracted_tarball_by_url: cached hash={}, name={}",
meta.hash, meta.name meta.hash, meta.name
); );
let store_path = self.make_store_path(&meta.hash, &meta.name); let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() { if store_path.exists() {
#[cfg(debug_assertions)] debug!("get_extracted_tarball_by_url: HIT - returning store path and hash");
eprintln!("[CACHE] get_extracted_tarball_by_url: HIT - returning store path and hash");
Some((store_path, meta.hash)) Some((store_path, meta.hash))
} else { } else {
#[cfg(debug_assertions)] debug!("get_extracted_tarball_by_url: store path doesn't exist");
eprintln!("[CACHE] get_extracted_tarball_by_url: store path doesn't exist");
None None
} }
} }
@@ -350,9 +324,8 @@ impl FetcherCache {
let key = Self::hash_key(url); let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key); let cache_entry_dir = cache_dir.join(&key);
#[cfg(debug_assertions)] debug!(
eprintln!( "put_tarball_from_extracted: url={}, hash={}, name={}",
"[CACHE] put_tarball_from_extracted: url={}, hash={}, name={}",
url, hash, name url, hash, name
); );
@@ -371,19 +344,16 @@ impl FetcherCache {
fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?; fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?;
let store_path = self.make_store_path(hash, name); let store_path = self.make_store_path(hash, name);
#[cfg(debug_assertions)] debug!(
eprintln!( "put_tarball_from_extracted: store_path={}",
"[CACHE] put_tarball_from_extracted: store_path={}",
store_path.display() store_path.display()
); );
if !store_path.exists() { if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?; fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
copy_dir_recursive(extracted_path, &store_path)?; copy_dir_recursive(extracted_path, &store_path)?;
#[cfg(debug_assertions)] debug!("put_tarball_from_extracted: copied to store");
eprintln!("[CACHE] put_tarball_from_extracted: copied to store");
} else { } else {
#[cfg(debug_assertions)] debug!("put_tarball_from_extracted: store path already exists");
eprintln!("[CACHE] put_tarball_from_extracted: store path already exists");
} }
Ok(store_path) Ok(store_path)

View File

@@ -167,8 +167,8 @@ fn checkout_rev(
.output()?; .output()?;
if !output.status.success() { if !output.status.success() {
eprintln!( tracing::warn!(
"Warning: failed to initialize submodules: {}", "failed to initialize submodules: {}",
String::from_utf8_lossy(&output.stderr) String::from_utf8_lossy(&output.stderr)
); );
} }

View File

@@ -9,6 +9,7 @@ mod fetcher;
mod ir; mod ir;
mod nix_hash; mod nix_hash;
mod runtime; mod runtime;
mod store;
#[global_allocator] #[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

View File

@@ -1,7 +1,6 @@
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
const NIX_BASE32_CHARS: &[u8; 32] = b"0123456789abcdfghijklmnpqrsvwxyz"; const NIX_BASE32_CHARS: &[u8; 32] = b"0123456789abcdfghijklmnpqrsvwxyz";
const STORE_DIR: &str = "/nix/store";
pub fn sha256_hex(data: &str) -> String { pub fn sha256_hex(data: &str) -> String {
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
@@ -42,8 +41,8 @@ pub fn nix_base32_encode(bytes: &[u8]) -> String {
result result
} }
pub fn make_store_path(ty: &str, hash_hex: &str, name: &str) -> String { pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String {
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, STORE_DIR, name); let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name);
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(s.as_bytes()); hasher.update(s.as_bytes());
@@ -52,7 +51,7 @@ pub fn make_store_path(ty: &str, hash_hex: &str, name: &str) -> String {
let compressed = compress_hash(&hash, 20); let compressed = compress_hash(&hash, 20);
let encoded = nix_base32_encode(&compressed); let encoded = nix_base32_encode(&compressed);
format!("{}/{}-{}", STORE_DIR, encoded, name) format!("{}/{}-{}", store_dir, encoded, name)
} }
pub fn output_path_name(drv_name: &str, output_name: &str) -> String { pub fn output_path_name(drv_name: &str, output_name: &str) -> String {
@@ -111,7 +110,7 @@ mod tests {
#[test] #[test]
fn test_make_store_path() { fn test_make_store_path() {
let path = make_store_path("output:out", "abc123", "hello"); let path = make_store_path("/nix/store", "output:out", "abc123", "hello");
assert!(path.starts_with("/nix/store/")); assert!(path.starts_with("/nix/store/"));
assert!(path.ends_with("-hello")); assert!(path.ends_with("-hello"));

View File

@@ -32,6 +32,8 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_output_path_name(), op_output_path_name(),
op_make_fixed_output_path(), op_make_fixed_output_path(),
op_add_path(), op_add_path(),
op_store_path(),
op_to_file(),
]; ];
ops.extend(crate::fetcher::register_ops()); ops.extend(crate::fetcher::register_ops());
@@ -92,8 +94,7 @@ fn op_import<Ctx: RuntimeContext>(
let content = std::fs::read_to_string(&absolute_path) let content = std::fs::read_to_string(&absolute_path)
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?; .map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
#[cfg(debug_assertions)] tracing::debug!("compiling file: {}", absolute_path.display());
eprintln!("[DEBUG] compiling file: {}", absolute_path.display());
ctx.set_current_file(absolute_path); ctx.set_current_file(absolute_path);
Ok(ctx.compile_code(&content).map_err(|err| err.to_string())?) Ok(ctx.compile_code(&content).map_err(|err| err.to_string())?)
@@ -147,11 +148,17 @@ fn op_sha256_hex(#[string] data: String) -> String {
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_make_store_path( fn op_make_store_path(
state: &mut OpState,
#[string] ty: String, #[string] ty: String,
#[string] hash_hex: String, #[string] hash_hex: String,
#[string] name: String, #[string] name: String,
) -> String { ) -> String {
crate::nix_hash::make_store_path(&ty, &hash_hex, &name) use crate::store::StoreBackend;
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
} }
#[deno_core::op2] #[deno_core::op2]
@@ -163,15 +170,21 @@ fn op_output_path_name(#[string] drv_name: String, #[string] output_name: String
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_make_fixed_output_path( fn op_make_fixed_output_path(
state: &mut OpState,
#[string] hash_algo: String, #[string] hash_algo: String,
#[string] hash: String, #[string] hash: String,
#[string] hash_mode: String, #[string] hash_mode: String,
#[string] name: String, #[string] name: String,
) -> String { ) -> String {
use crate::store::StoreBackend;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" { if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_hash::make_store_path("source", &hash, &name) crate::nix_hash::make_store_path(store_dir, "source", &hash, &name)
} else { } else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" }; let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash); let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
@@ -179,21 +192,24 @@ fn op_make_fixed_output_path(
hasher.update(inner_input.as_bytes()); hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize()); let inner_hash = hex::encode(hasher.finalize());
crate::nix_hash::make_store_path("output:out", &inner_hash, &name) crate::nix_hash::make_store_path(store_dir, "output:out", &inner_hash, &name)
} }
} }
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_add_path( fn op_add_path(
state: &mut OpState,
#[string] path: String, #[string] path: String,
#[string] name: Option<String>, #[string] name: Option<String>,
recursive: bool, recursive: bool,
#[string] sha256: Option<String>, #[string] sha256: Option<String>,
) -> std::result::Result<String, NixError> { ) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::Arc;
let path_obj = Path::new(&path); let path_obj = Path::new(&path);
@@ -234,7 +250,9 @@ fn op_add_path(
))); )));
} }
let store_path = crate::nix_hash::make_store_path("source", &computed_hash, &computed_name); let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
let store_path = crate::nix_hash::make_store_path(store_dir, "source", &computed_hash, &computed_name);
Ok(store_path) Ok(store_path)
} }
@@ -274,6 +292,48 @@ fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixEr
} }
} }
#[deno_core::op2]
#[string]
fn op_store_path(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use crate::store::{validate_store_path, StoreBackend};
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
validate_store_path(store_dir, &path).map_err(|e| NixError::from(e.to_string()))?;
store
.as_store()
.ensure_path(&path)
.map_err(|e| NixError::from(e.to_string()))?;
Ok(path)
}
#[deno_core::op2]
#[string]
fn op_to_file(
state: &mut OpState,
#[string] name: String,
#[string] contents: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_path = store
.as_store()
.add_text_to_store(&name, &contents, references)
.map_err(|e| NixError::from(format!("builtins.toFile failed: {}", e)))?;
Ok(store_path)
}
pub(crate) struct Runtime<Ctx: RuntimeContext> { pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime, js_runtime: JsRuntime,
is_thunk_symbol: v8::Global<v8::Symbol>, is_thunk_symbol: v8::Global<v8::Symbol>,
@@ -314,6 +374,10 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
}) })
} }
pub(crate) fn op_state(&mut self) -> std::rc::Rc<std::cell::RefCell<OpState>> {
self.js_runtime.op_state()
}
pub(crate) fn eval(&mut self, script: String, ctx: Ctx) -> Result<Value> { pub(crate) fn eval(&mut self, script: String, ctx: Ctx) -> Result<Value> {
self.js_runtime.op_state().borrow_mut().put(ctx); self.js_runtime.op_state().borrow_mut().put(ctx);

106
nix-js/src/store.rs Normal file
View File

@@ -0,0 +1,106 @@
mod config;
mod error;
mod validation;
pub use config::{StoreConfig, StoreMode};
pub use validation::validate_store_path;
use crate::error::Result;
pub trait Store: Send + Sync {
fn get_store_dir(&self) -> &str;
fn is_valid_path(&self, path: &str) -> Result<bool>;
fn ensure_path(&self, path: &str) -> Result<()>;
fn add_to_store(
&self,
name: &str,
content: &[u8],
recursive: bool,
references: Vec<String>,
) -> Result<String>;
fn add_text_to_store(
&self,
name: &str,
content: &str,
references: Vec<String>,
) -> Result<String>;
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String>;
}
pub enum StoreBackend {
Simulated(SimulatedStore),
#[cfg(feature = "daemon")]
Daemon(DaemonStore),
}
impl StoreBackend {
pub fn new(config: StoreConfig) -> Result<Self> {
match config.mode {
#[cfg(feature = "daemon")]
StoreMode::Daemon => {
let daemon = DaemonStore::connect(&config.daemon_socket)?;
Ok(StoreBackend::Daemon(daemon))
}
#[cfg(not(feature = "daemon"))]
StoreMode::Daemon => {
tracing::warn!("Daemon mode not available (nix-js not compiled with 'daemon' feature), falling back to simulated store");
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
StoreMode::Simulated => {
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
#[cfg(feature = "daemon")]
StoreMode::Auto => match DaemonStore::connect(&config.daemon_socket) {
Ok(daemon) => {
tracing::debug!(
"Using nix-daemon at {}",
config.daemon_socket.display()
);
Ok(StoreBackend::Daemon(daemon))
}
Err(e) => {
tracing::warn!(
"Daemon unavailable ({}), using simulated store",
e
);
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
},
#[cfg(not(feature = "daemon"))]
StoreMode::Auto => {
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
}
}
pub fn as_store(&self) -> &dyn Store {
match self {
StoreBackend::Simulated(s) => s,
#[cfg(feature = "daemon")]
StoreBackend::Daemon(d) => d,
}
}
}
mod simulated;
pub use simulated::SimulatedStore;
#[cfg(feature = "daemon")]
mod daemon;
#[cfg(feature = "daemon")]
pub use daemon::DaemonStore;

View File

@@ -0,0 +1,50 @@
use std::path::PathBuf;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StoreMode {
Daemon,
Simulated,
Auto,
}
#[derive(Debug, Clone)]
pub struct StoreConfig {
pub mode: StoreMode,
pub daemon_socket: PathBuf,
}
impl StoreConfig {
pub fn from_env() -> Self {
let mode = match std::env::var("NIX_JS_STORE_MODE")
.as_deref()
.map(|s| s.to_lowercase())
.as_deref()
{
Ok("daemon") => StoreMode::Daemon,
Ok("simulated") => StoreMode::Simulated,
Ok("auto") | Err(_) => StoreMode::Auto,
Ok(other) => {
tracing::warn!(
"Invalid NIX_JS_STORE_MODE '{}', using 'auto'",
other
);
StoreMode::Auto
}
};
let daemon_socket = std::env::var("NIX_DAEMON_SOCKET")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("/nix/var/nix/daemon-socket/socket"));
Self {
mode,
daemon_socket,
}
}
}
impl Default for StoreConfig {
fn default() -> Self {
Self::from_env()
}
}

150
nix-js/src/store/daemon.rs Normal file
View File

@@ -0,0 +1,150 @@
use std::path::Path;
use std::sync::Arc;
use nix_daemon::{Progress as _, Store as _, nix};
use tokio::net::UnixStream;
use tokio::sync::Mutex;
use crate::error::{Error, Result};
use super::Store;
pub struct DaemonStore {
runtime: tokio::runtime::Runtime,
store: Arc<Mutex<nix::DaemonStore<UnixStream>>>,
}
impl DaemonStore {
pub fn connect(socket_path: &Path) -> Result<Self> {
let runtime = tokio::runtime::Runtime::new()
.map_err(|e| Error::internal(format!("Failed to create tokio runtime: {}", e)))?;
let socket_str = socket_path
.to_str()
.ok_or_else(|| Error::internal("Invalid socket path: not UTF-8".to_string()))?;
let store = runtime.block_on(async {
nix_daemon::nix::DaemonStore::builder()
.connect_unix(socket_str)
.await
.map_err(|e| {
Error::internal(format!(
"Failed to connect to nix-daemon at {}: {}",
socket_str, e
))
})
})?;
Ok(Self {
runtime,
store: Arc::new(Mutex::new(store)),
})
}
fn block_on<F>(&self, future: F) -> F::Output
where
F: std::future::Future,
{
self.runtime.block_on(future)
}
}
impl Store for DaemonStore {
fn get_store_dir(&self) -> &str {
"/nix/store"
}
fn is_valid_path(&self, path: &str) -> Result<bool> {
self.block_on(async {
let mut store = self.store.lock().await;
store
.is_valid_path(path)
.result()
.await
.map_err(|e| Error::internal(format!("Daemon error in is_valid_path: {}", e)))
})
}
fn ensure_path(&self, path: &str) -> Result<()> {
self.block_on(async {
let mut store = self.store.lock().await;
store.ensure_path(path).result().await.map_err(|e| {
Error::eval_error(
format!(
"builtins.storePath: path '{}' is not valid in nix store: {}",
path, e
),
None,
)
})
})
}
fn add_to_store(
&self,
name: &str,
content: &[u8],
recursive: bool,
references: Vec<String>,
) -> Result<String> {
let temp_dir = tempfile::tempdir()
.map_err(|e| Error::internal(format!("Failed to create temp dir: {}", e)))?;
let content_path = temp_dir.path().join(name);
std::fs::write(&content_path, content)
.map_err(|e| Error::internal(format!("Failed to write content: {}", e)))?;
let cam_str = if recursive {
"fixed:r:sha256"
} else {
"fixed:sha256"
};
self.block_on(async {
let mut store = self.store.lock().await;
let (store_path, _path_info) = store
.add_to_store(
name,
cam_str,
references,
false,
content_path.as_os_str().as_encoded_bytes(),
)
.result()
.await
.map_err(|e| Error::internal(format!("Daemon error in add_to_store: {}", e)))?;
Ok(store_path)
})
}
fn add_text_to_store(
&self,
name: &str,
content: &str,
references: Vec<String>,
) -> Result<String> {
self.block_on(async {
let mut store = self.store.lock().await;
let (store_path, _) = store
.add_to_store(name, "text:sha256", references, false, content.as_bytes())
.result()
.await
.map_err(|e| {
Error::internal(format!("Daemon error in add_text_to_store: {}", e))
})?;
Ok(store_path)
})
}
fn make_fixed_output_path(
&self,
_hash_algo: &str,
hash: &str,
_hash_mode: &str,
name: &str,
) -> Result<String> {
let short_hash = &hash[..32.min(hash.len())];
Ok(format!("/nix/store/{}-{}", short_hash, name))
}
}

32
nix-js/src/store/error.rs Normal file
View File

@@ -0,0 +1,32 @@
use std::fmt;
#[derive(Debug)]
pub enum StoreError {
DaemonConnectionFailed(String),
OperationFailed(String),
InvalidPath(String),
PathNotFound(String),
Io(std::io::Error),
}
impl fmt::Display for StoreError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
StoreError::DaemonConnectionFailed(msg) => {
write!(f, "Failed to connect to nix-daemon: {}", msg)
}
StoreError::OperationFailed(msg) => write!(f, "Store operation failed: {}", msg),
StoreError::InvalidPath(msg) => write!(f, "Invalid store path: {}", msg),
StoreError::PathNotFound(path) => write!(f, "Path not found in store: {}", path),
StoreError::Io(e) => write!(f, "I/O error: {}", e),
}
}
}
impl std::error::Error for StoreError {}
impl From<std::io::Error> for StoreError {
fn from(e: std::io::Error) -> Self {
StoreError::Io(e)
}
}

View File

@@ -0,0 +1,96 @@
use super::Store;
use crate::error::{Error, Result};
use crate::fetcher::cache::FetcherCache;
use std::fs;
use std::path::Path;
pub struct SimulatedStore {
cache: FetcherCache,
store_dir: String,
}
impl SimulatedStore {
pub fn new() -> Result<Self> {
let cache = FetcherCache::new()
.map_err(|e| Error::internal(format!("Failed to create simulated store: {}", e)))?;
let store_dir = dirs::cache_dir()
.unwrap_or_else(|| std::path::PathBuf::from("/tmp"))
.join("nix-js")
.join("fetchers")
.join("store")
.to_string_lossy()
.to_string();
Ok(Self { cache, store_dir })
}
pub fn cache(&self) -> &FetcherCache {
&self.cache
}
}
impl Store for SimulatedStore {
fn get_store_dir(&self) -> &str {
&self.store_dir
}
fn is_valid_path(&self, path: &str) -> Result<bool> {
Ok(Path::new(path).exists())
}
fn ensure_path(&self, path: &str) -> Result<()> {
if !Path::new(path).exists() {
return Err(Error::eval_error(
format!(
"builtins.storePath: path '{}' does not exist in the simulated store",
path
),
None,
));
}
Ok(())
}
fn add_to_store(
&self,
name: &str,
content: &[u8],
_recursive: bool,
_references: Vec<String>,
) -> Result<String> {
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(content));
let store_path = self.cache.make_store_path(&hash, name);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))
.map_err(|e| Error::internal(format!("Failed to create store directory: {}", e)))?;
fs::write(&store_path, content)
.map_err(|e| Error::internal(format!("Failed to write to store: {}", e)))?;
}
Ok(store_path.to_string_lossy().to_string())
}
fn add_text_to_store(
&self,
name: &str,
content: &str,
references: Vec<String>,
) -> Result<String> {
self.add_to_store(name, content.as_bytes(), false, references)
}
fn make_fixed_output_path(
&self,
_hash_algo: &str,
hash: &str,
_hash_mode: &str,
name: &str,
) -> Result<String> {
let store_path = self.cache.make_store_path(hash, name);
Ok(store_path.to_string_lossy().to_string())
}
}

View File

@@ -0,0 +1,132 @@
use crate::error::{Error, Result};
pub fn validate_store_path(store_dir: &str, path: &str) -> Result<()> {
if !path.starts_with(store_dir) {
return Err(Error::eval_error(
format!("path '{}' is not in the Nix store", path),
None,
));
}
let relative = path
.strip_prefix(store_dir)
.and_then(|s| s.strip_prefix('/'))
.ok_or_else(|| Error::eval_error(format!("invalid store path format: {}", path), None))?;
if relative.is_empty() {
return Err(Error::eval_error(
format!("store path cannot be store directory itself: {}", path),
None,
));
}
let parts: Vec<&str> = relative.splitn(2, '-').collect();
if parts.len() != 2 {
return Err(Error::eval_error(
format!("invalid store path format (missing name): {}", path),
None,
));
}
let hash = parts[0];
let name = parts[1];
if hash.len() != 32 {
return Err(Error::eval_error(
format!(
"invalid store path hash length (expected 32, got {}): {}",
hash.len(),
hash
),
None,
));
}
for ch in hash.chars() {
if !matches!(ch, '0'..='9' | 'a'..='d' | 'f'..='n' | 'p'..='s' | 'v'..='z') {
return Err(Error::eval_error(
format!("invalid character '{}' in store path hash: {}", ch, hash),
None,
));
}
}
if name.is_empty() {
return Err(Error::eval_error(
format!("store path has empty name: {}", path),
None,
));
}
if name.starts_with('.') {
return Err(Error::eval_error(
format!("store path name cannot start with '.': {}", name),
None,
));
}
for ch in name.chars() {
if !matches!(ch, '0'..='9' | 'a'..='z' | 'A'..='Z' | '+' | '-' | '.' | '_' | '?' | '=') {
return Err(Error::eval_error(
format!("invalid character '{}' in store path name: {}", ch, name),
None,
));
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_store_paths() {
let store_dir = "/nix/store";
let valid_paths = vec![
"/nix/store/0123456789abcdfghijklmnpqrsvwxyz-hello",
"/nix/store/abcdfghijklmnpqrsvwxyz0123456789-hello-1.0",
"/nix/store/00000000000000000000000000000000-test_+-.?="
];
for path in valid_paths {
assert!(
validate_store_path(store_dir, path).is_ok(),
"Expected {} to be valid, got {:?}",
path,
validate_store_path(store_dir, path)
);
}
}
#[test]
fn test_invalid_store_paths() {
let store_dir = "/nix/store";
let invalid_paths = vec![
("/tmp/foo", "not in store"),
("/nix/store", "empty relative"),
("/nix/store/tooshort-name", "hash too short"),
(
"/nix/store/abc123defghijklmnopqrstuvwxyz123-name",
"hash too long"
),
("/nix/store/abcd1234abcd1234abcd1234abcd123e-name", "e in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123o-name", "o in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123u-name", "u in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123t-name", "t in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234-.name", "name starts with dot"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234-na/me", "slash in name"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234", "missing name"),
];
for (path, reason) in invalid_paths {
assert!(
validate_store_path(store_dir, path).is_err(),
"Expected {} to be invalid ({})",
path,
reason
);
}
}
}

View File

@@ -0,0 +1,225 @@
mod utils;
use std::sync::Once;
use nix_js::value::Value;
use utils::eval_result;
fn init() {
static INIT: Once = Once::new();
INIT.call_once(|| {
unsafe { std::env::set_var("NIX_JS_STORE_MODE", "simulated") };
});
}
#[test]
fn to_file_simple() {
init();
let result =
eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate");
match result {
Value::String(path) => {
assert!(path.contains("-hello.txt"));
assert!(std::path::Path::new(&path).exists());
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "Hello, World!");
}
_ => panic!("Expected string, got {:?}", result),
}
}
#[test]
fn to_file_with_references() {
init();
let result = eval_result(
r#"
let
dep = builtins.toFile "dep.txt" "dependency";
in
builtins.toFile "main.txt" "Reference: ${dep}"
"#,
)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
assert!(path.contains("-main.txt"));
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert!(contents.contains("Reference: "));
assert!(contents.contains("-dep.txt"));
}
_ => panic!("Expected string"),
}
}
#[test]
fn to_file_invalid_name_with_slash() {
init();
let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("name cannot contain '/'")
);
}
#[test]
fn to_file_invalid_name_dot() {
init();
let result = eval_result(r#"builtins.toFile "." "content""#);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("invalid name"));
}
#[test]
fn to_file_invalid_name_dotdot() {
init();
let result = eval_result(r#"builtins.toFile ".." "content""#);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("invalid name"));
}
#[test]
fn store_path_validation_not_in_store() {
init();
let result = eval_result(r#"builtins.storePath "/tmp/foo""#);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("not in the Nix store")
);
}
#[test]
fn store_path_validation_malformed_hash() {
init();
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
.expect("Failed to create dummy file");
let dummy_path = match dummy_file_result {
Value::String(ref p) => p.clone(),
_ => panic!("Expected string"),
};
let store_dir = std::path::Path::new(&dummy_path)
.parent()
.expect("Failed to get parent dir")
.to_str()
.expect("Failed to convert to string");
let test_path = format!("{}/invalid-hash-hello", store_dir);
let result = eval_result(&format!(r#"builtins.storePath "{}""#, test_path));
assert!(result.is_err());
let err_str = result.unwrap_err().to_string();
assert!(
err_str.contains("invalid") || err_str.contains("hash"),
"Expected hash validation error, got: {}",
err_str
);
}
#[test]
fn store_path_validation_missing_name() {
init();
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
.expect("Failed to create dummy file");
let dummy_path = match dummy_file_result {
Value::String(ref p) => p.clone(),
_ => panic!("Expected string"),
};
let store_dir = std::path::Path::new(&dummy_path)
.parent()
.expect("Failed to get parent dir")
.to_str()
.expect("Failed to convert to string");
let test_path = format!("{}/abcd1234abcd1234abcd1234abcd1234", store_dir);
let result = eval_result(&format!(r#"builtins.storePath "{}""#, test_path));
assert!(result.is_err());
let err_str = result.unwrap_err().to_string();
assert!(
err_str.contains("missing name") || err_str.contains("format"),
"Expected missing name error, got: {}",
err_str
);
}
#[test]
fn to_file_curried_application() {
init();
let result = eval_result(
r#"
let
makeFile = builtins.toFile "test.txt";
in
makeFile "test content"
"#,
)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
assert!(path.contains("-test.txt"));
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "test content");
}
_ => panic!("Expected string"),
}
}
#[test]
fn to_file_number_conversion() {
init();
let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "42");
}
_ => panic!("Expected string"),
}
}
#[test]
fn to_file_list_conversion() {
init();
let result = eval_result(
r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#,
)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "line1\nline2\nline3");
}
_ => panic!("Expected string"),
}
}

View File

@@ -1,14 +1,14 @@
use nix_js::context::Context; #![cfg(feature = "daemon")]
mod utils;
use nix_js::value::Value; use nix_js::value::Value;
use utils::{eval, eval_result};
#[test] #[test]
fn derivation_minimal() { fn derivation_minimal() {
let mut ctx = Context::new().unwrap(); let result =
let result = ctx eval(r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#);
.eval_code(
r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -44,17 +44,14 @@ fn derivation_minimal() {
#[test] #[test]
fn derivation_with_args() { fn derivation_with_args() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "test";
r#"derivation { builder = "/bin/sh";
name = "test"; system = "x86_64-linux";
builder = "/bin/sh"; args = ["-c" "echo hello"];
system = "x86_64-linux"; }"#,
args = ["-c" "echo hello"]; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -69,12 +66,9 @@ fn derivation_with_args() {
#[test] #[test]
fn derivation_to_string() { fn derivation_to_string() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
.eval_code( );
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
)
.unwrap();
match result { match result {
Value::String(s) => assert_eq!(s, "/nix/store/xpcvxsx5sw4rbq666blz6sxqlmsqphmr-foo"), Value::String(s) => assert_eq!(s, "/nix/store/xpcvxsx5sw4rbq666blz6sxqlmsqphmr-foo"),
@@ -84,8 +78,7 @@ fn derivation_to_string() {
#[test] #[test]
fn derivation_missing_name() { fn derivation_missing_name() {
let mut ctx = Context::new().unwrap(); let result = eval_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
let result = ctx.eval_code(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
assert!(result.is_err()); assert!(result.is_err());
let err_msg = result.unwrap_err().to_string(); let err_msg = result.unwrap_err().to_string();
@@ -94,8 +87,7 @@ fn derivation_missing_name() {
#[test] #[test]
fn derivation_invalid_name_with_drv_suffix() { fn derivation_invalid_name_with_drv_suffix() {
let mut ctx = Context::new().unwrap(); let result = eval_result(
let result = ctx.eval_code(
r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#, r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
); );
@@ -106,8 +98,7 @@ fn derivation_invalid_name_with_drv_suffix() {
#[test] #[test]
fn derivation_missing_builder() { fn derivation_missing_builder() {
let mut ctx = Context::new().unwrap(); let result = eval_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
let result = ctx.eval_code(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
assert!(result.is_err()); assert!(result.is_err());
let err_msg = result.unwrap_err().to_string(); let err_msg = result.unwrap_err().to_string();
@@ -116,8 +107,7 @@ fn derivation_missing_builder() {
#[test] #[test]
fn derivation_missing_system() { fn derivation_missing_system() {
let mut ctx = Context::new().unwrap(); let result = eval_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
let result = ctx.eval_code(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
assert!(result.is_err()); assert!(result.is_err());
let err_msg = result.unwrap_err().to_string(); let err_msg = result.unwrap_err().to_string();
@@ -126,18 +116,15 @@ fn derivation_missing_system() {
#[test] #[test]
fn derivation_with_env_vars() { fn derivation_with_env_vars() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "test";
r#"derivation { builder = "/bin/sh";
name = "test"; system = "x86_64-linux";
builder = "/bin/sh"; MY_VAR = "hello";
system = "x86_64-linux"; ANOTHER = "world";
MY_VAR = "hello"; }"#,
ANOTHER = "world"; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -150,12 +137,9 @@ fn derivation_with_env_vars() {
#[test] #[test]
fn derivation_strict() { fn derivation_strict() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
.eval_code( );
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -169,12 +153,10 @@ fn derivation_strict() {
#[test] #[test]
fn derivation_deterministic_paths() { fn derivation_deterministic_paths() {
let mut ctx = Context::new().unwrap();
let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#; let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#;
let result1 = ctx.eval_code(expr).unwrap(); let result1 = eval(expr);
let result2 = ctx.eval_code(expr).unwrap(); let result2 = eval(expr);
match (result1, result2) { match (result1, result2) {
(Value::AttrSet(attrs1), Value::AttrSet(attrs2)) => { (Value::AttrSet(attrs1), Value::AttrSet(attrs2)) => {
@@ -187,17 +169,14 @@ fn derivation_deterministic_paths() {
#[test] #[test]
fn derivation_escaping_in_aterm() { fn derivation_escaping_in_aterm() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "test";
r#"derivation { builder = "/bin/sh";
name = "test"; system = "x86_64-linux";
builder = "/bin/sh"; args = ["-c" "echo \"hello\nworld\""];
system = "x86_64-linux"; }"#,
args = ["-c" "echo \"hello\nworld\""]; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -210,17 +189,14 @@ fn derivation_escaping_in_aterm() {
#[test] #[test]
fn multi_output_two_outputs() { fn multi_output_two_outputs() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "multi";
r#"derivation { builder = "/bin/sh";
name = "multi"; system = "x86_64-linux";
builder = "/bin/sh"; outputs = ["out" "dev"];
system = "x86_64-linux"; }"#,
outputs = ["out" "dev"]; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -271,17 +247,14 @@ fn multi_output_two_outputs() {
#[test] #[test]
fn multi_output_three_outputs() { fn multi_output_three_outputs() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "three";
r#"derivation { builder = "/bin/sh";
name = "three"; system = "x86_64-linux";
builder = "/bin/sh"; outputs = ["out" "dev" "doc"];
system = "x86_64-linux"; }"#,
outputs = ["out" "dev" "doc"]; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -322,17 +295,14 @@ fn multi_output_three_outputs() {
#[test] #[test]
fn multi_output_backward_compat() { fn multi_output_backward_compat() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "compat";
r#"derivation { builder = "/bin/sh";
name = "compat"; system = "x86_64-linux";
builder = "/bin/sh"; outputs = ["out"];
system = "x86_64-linux"; }"#,
outputs = ["out"]; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -351,47 +321,39 @@ fn multi_output_backward_compat() {
#[test] #[test]
fn multi_output_deterministic() { fn multi_output_deterministic() {
let mut ctx = Context::new().unwrap(); let result1 = eval(
let result1 = ctx r#"derivation {
.eval_code( name = "determ";
r#"derivation { builder = "/bin/sh";
name = "determ"; system = "x86_64-linux";
builder = "/bin/sh"; outputs = ["out" "dev"];
system = "x86_64-linux"; }"#,
outputs = ["out" "dev"]; );
}"#,
)
.unwrap();
let result2 = ctx let result2 = eval(
.eval_code( r#"derivation {
r#"derivation { name = "determ";
name = "determ"; builder = "/bin/sh";
builder = "/bin/sh"; system = "x86_64-linux";
system = "x86_64-linux"; outputs = ["out" "dev"];
outputs = ["out" "dev"]; }"#,
}"#, );
)
.unwrap();
assert_eq!(result1, result2); assert_eq!(result1, result2);
} }
#[test] #[test]
fn fixed_output_sha256_flat() { fn fixed_output_sha256_flat() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "fixed";
r#"derivation { builder = "/bin/sh";
name = "fixed"; system = "x86_64-linux";
builder = "/bin/sh"; outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
system = "x86_64-linux"; outputHashAlgo = "sha256";
outputHash = "0000000000000000000000000000000000000000000000000000000000000000"; outputHashMode = "flat";
outputHashAlgo = "sha256"; }"#,
outputHashMode = "flat"; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -419,17 +381,14 @@ fn fixed_output_sha256_flat() {
#[test] #[test]
fn fixed_output_default_algo() { fn fixed_output_default_algo() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "default";
r#"derivation { builder = "/bin/sh";
name = "default"; system = "x86_64-linux";
builder = "/bin/sh"; outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
system = "x86_64-linux"; }"#,
outputHash = "0000000000000000000000000000000000000000000000000000000000000000"; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -445,19 +404,16 @@ fn fixed_output_default_algo() {
#[test] #[test]
fn fixed_output_recursive_mode() { fn fixed_output_recursive_mode() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "recursive";
r#"derivation { builder = "/bin/sh";
name = "recursive"; system = "x86_64-linux";
builder = "/bin/sh"; outputHash = "1111111111111111111111111111111111111111111111111111111111111111";
system = "x86_64-linux"; outputHashAlgo = "sha256";
outputHash = "1111111111111111111111111111111111111111111111111111111111111111"; outputHashMode = "recursive";
outputHashAlgo = "sha256"; }"#,
outputHashMode = "recursive"; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -478,8 +434,7 @@ fn fixed_output_recursive_mode() {
#[test] #[test]
fn fixed_output_rejects_multi_output() { fn fixed_output_rejects_multi_output() {
let mut ctx = Context::new().unwrap(); let result = eval_result(
let result = ctx.eval_code(
r#"derivation { r#"derivation {
name = "invalid"; name = "invalid";
builder = "/bin/sh"; builder = "/bin/sh";
@@ -496,8 +451,7 @@ fn fixed_output_rejects_multi_output() {
#[test] #[test]
fn fixed_output_invalid_hash_mode() { fn fixed_output_invalid_hash_mode() {
let mut ctx = Context::new().unwrap(); let result = eval_result(
let result = ctx.eval_code(
r#"derivation { r#"derivation {
name = "invalid"; name = "invalid";
builder = "/bin/sh"; builder = "/bin/sh";
@@ -514,20 +468,17 @@ fn fixed_output_invalid_hash_mode() {
#[test] #[test]
fn structured_attrs_basic() { fn structured_attrs_basic() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "struct";
r#"derivation { builder = "/bin/sh";
name = "struct"; system = "x86_64-linux";
builder = "/bin/sh"; __structuredAttrs = true;
system = "x86_64-linux"; foo = "bar";
__structuredAttrs = true; count = 42;
foo = "bar"; enabled = true;
count = 42; }"#,
enabled = true; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -542,18 +493,15 @@ fn structured_attrs_basic() {
#[test] #[test]
fn structured_attrs_nested() { fn structured_attrs_nested() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "nested";
r#"derivation { builder = "/bin/sh";
name = "nested"; system = "x86_64-linux";
builder = "/bin/sh"; __structuredAttrs = true;
system = "x86_64-linux"; data = { x = 1; y = [2 3]; };
__structuredAttrs = true; }"#,
data = { x = 1; y = [2 3]; }; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -566,8 +514,7 @@ fn structured_attrs_nested() {
#[test] #[test]
fn structured_attrs_rejects_functions() { fn structured_attrs_rejects_functions() {
let mut ctx = Context::new().unwrap(); let result = eval_result(
let result = ctx.eval_code(
r#"derivation { r#"derivation {
name = "invalid"; name = "invalid";
builder = "/bin/sh"; builder = "/bin/sh";
@@ -584,18 +531,15 @@ fn structured_attrs_rejects_functions() {
#[test] #[test]
fn structured_attrs_false() { fn structured_attrs_false() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "normal";
r#"derivation { builder = "/bin/sh";
name = "normal"; system = "x86_64-linux";
builder = "/bin/sh"; __structuredAttrs = false;
system = "x86_64-linux"; foo = "bar";
__structuredAttrs = false; }"#,
foo = "bar"; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -610,19 +554,16 @@ fn structured_attrs_false() {
#[test] #[test]
fn ignore_nulls_true() { fn ignore_nulls_true() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "ignore";
r#"derivation { builder = "/bin/sh";
name = "ignore"; system = "x86_64-linux";
builder = "/bin/sh"; __ignoreNulls = true;
system = "x86_64-linux"; foo = "bar";
__ignoreNulls = true; nullValue = null;
foo = "bar"; }"#,
nullValue = null; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -635,18 +576,15 @@ fn ignore_nulls_true() {
#[test] #[test]
fn ignore_nulls_false() { fn ignore_nulls_false() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "keep";
r#"derivation { builder = "/bin/sh";
name = "keep"; system = "x86_64-linux";
builder = "/bin/sh"; __ignoreNulls = false;
system = "x86_64-linux"; nullValue = null;
__ignoreNulls = false; }"#,
nullValue = null; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -661,20 +599,17 @@ fn ignore_nulls_false() {
#[test] #[test]
fn ignore_nulls_with_structured_attrs() { fn ignore_nulls_with_structured_attrs() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "combined";
r#"derivation { builder = "/bin/sh";
name = "combined"; system = "x86_64-linux";
builder = "/bin/sh"; __structuredAttrs = true;
system = "x86_64-linux"; __ignoreNulls = true;
__structuredAttrs = true; foo = "bar";
__ignoreNulls = true; nullValue = null;
foo = "bar"; }"#,
nullValue = null; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -688,21 +623,18 @@ fn ignore_nulls_with_structured_attrs() {
#[test] #[test]
fn all_features_combined() { fn all_features_combined() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "all";
r#"derivation { builder = "/bin/sh";
name = "all"; system = "x86_64-linux";
builder = "/bin/sh"; outputs = ["out" "dev"];
system = "x86_64-linux"; __structuredAttrs = true;
outputs = ["out" "dev"]; __ignoreNulls = true;
__structuredAttrs = true; data = { x = 1; };
__ignoreNulls = true; nullValue = null;
data = { x = 1; }; }"#,
nullValue = null; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {
@@ -718,19 +650,16 @@ fn all_features_combined() {
#[test] #[test]
fn fixed_output_with_structured_attrs() { fn fixed_output_with_structured_attrs() {
let mut ctx = Context::new().unwrap(); let result = eval(
let result = ctx r#"derivation {
.eval_code( name = "fixstruct";
r#"derivation { builder = "/bin/sh";
name = "fixstruct"; system = "x86_64-linux";
builder = "/bin/sh"; outputHash = "abc123";
system = "x86_64-linux"; __structuredAttrs = true;
outputHash = "abc123"; data = { key = "value"; };
__structuredAttrs = true; }"#,
data = { key = "value"; }; );
}"#,
)
.unwrap();
match result { match result {
Value::AttrSet(attrs) => { Value::AttrSet(attrs) => {

View File

@@ -104,7 +104,7 @@ fn import_with_complex_dependency_graph() {
// Tests for builtins.path // Tests for builtins.path
#[test] #[test]
fn test_path_with_file() { fn path_with_file() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt"); let test_file = temp_dir.path().join("test.txt");
@@ -115,7 +115,7 @@ fn test_path_with_file() {
// Should return a store path string // Should return a store path string
if let Value::String(store_path) = result { if let Value::String(store_path) = result {
assert!(store_path.starts_with("/nix/store/")); assert!(store_path.starts_with(ctx.get_store_dir()));
assert!(store_path.contains("test.txt")); assert!(store_path.contains("test.txt"));
} else { } else {
panic!("Expected string, got {:?}", result); panic!("Expected string, got {:?}", result);
@@ -123,7 +123,7 @@ fn test_path_with_file() {
} }
#[test] #[test]
fn test_path_with_custom_name() { fn path_with_custom_name() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("original.txt"); let test_file = temp_dir.path().join("original.txt");
@@ -144,7 +144,7 @@ fn test_path_with_custom_name() {
} }
#[test] #[test]
fn test_path_with_directory_recursive() { fn path_with_directory_recursive() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("mydir"); let test_dir = temp_dir.path().join("mydir");
@@ -159,7 +159,7 @@ fn test_path_with_directory_recursive() {
let result = ctx.eval_code(&expr).unwrap(); let result = ctx.eval_code(&expr).unwrap();
if let Value::String(store_path) = result { if let Value::String(store_path) = result {
assert!(store_path.starts_with("/nix/store/")); assert!(store_path.starts_with(ctx.get_store_dir()));
assert!(store_path.contains("mydir")); assert!(store_path.contains("mydir"));
} else { } else {
panic!("Expected string, got {:?}", result); panic!("Expected string, got {:?}", result);
@@ -167,7 +167,7 @@ fn test_path_with_directory_recursive() {
} }
#[test] #[test]
fn test_path_flat_with_file() { fn path_flat_with_file() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("flat.txt"); let test_file = temp_dir.path().join("flat.txt");
@@ -180,14 +180,14 @@ fn test_path_flat_with_file() {
let result = ctx.eval_code(&expr).unwrap(); let result = ctx.eval_code(&expr).unwrap();
if let Value::String(store_path) = result { if let Value::String(store_path) = result {
assert!(store_path.starts_with("/nix/store/")); assert!(store_path.starts_with(ctx.get_store_dir()));
} else { } else {
panic!("Expected string, got {:?}", result); panic!("Expected string, got {:?}", result);
} }
} }
#[test] #[test]
fn test_path_flat_with_directory_fails() { fn path_flat_with_directory_fails() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("mydir"); let test_dir = temp_dir.path().join("mydir");
@@ -205,7 +205,7 @@ fn test_path_flat_with_directory_fails() {
} }
#[test] #[test]
fn test_path_nonexistent_fails() { fn path_nonexistent_fails() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#; let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#;
@@ -217,7 +217,7 @@ fn test_path_nonexistent_fails() {
} }
#[test] #[test]
fn test_path_missing_path_param() { fn path_missing_path_param() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let expr = r#"builtins.path { name = "test"; }"#; let expr = r#"builtins.path { name = "test"; }"#;
@@ -229,7 +229,7 @@ fn test_path_missing_path_param() {
} }
#[test] #[test]
fn test_path_with_sha256() { fn path_with_sha256() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("hash_test.txt"); let test_file = temp_dir.path().join("hash_test.txt");
@@ -257,7 +257,7 @@ fn test_path_with_sha256() {
} }
#[test] #[test]
fn test_path_deterministic() { fn path_deterministic() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("deterministic.txt"); let test_file = temp_dir.path().join("deterministic.txt");

View File

@@ -4,113 +4,113 @@ use nix_js::value::Value;
use utils::{eval, eval_result}; use utils::{eval, eval_result};
#[test] #[test]
fn test_path_type_of() { fn path_type_of() {
let result = eval("builtins.typeOf ./foo"); let result = eval("builtins.typeOf ./foo");
assert_eq!(result, Value::String("path".to_string())); assert_eq!(result, Value::String("path".to_string()));
} }
#[test] #[test]
fn test_is_path_true() { fn is_path_true() {
let result = eval("builtins.isPath ./foo"); let result = eval("builtins.isPath ./foo");
assert_eq!(result, Value::Bool(true)); assert_eq!(result, Value::Bool(true));
} }
#[test] #[test]
fn test_is_path_false_string() { fn is_path_false_string() {
let result = eval(r#"builtins.isPath "./foo""#); let result = eval(r#"builtins.isPath "./foo""#);
assert_eq!(result, Value::Bool(false)); assert_eq!(result, Value::Bool(false));
} }
#[test] #[test]
fn test_is_path_false_number() { fn is_path_false_number() {
let result = eval("builtins.isPath 42"); let result = eval("builtins.isPath 42");
assert_eq!(result, Value::Bool(false)); assert_eq!(result, Value::Bool(false));
} }
#[test] #[test]
fn test_path_concat_type() { fn path_concat_type() {
// path + string = path // path + string = path
let result = eval(r#"builtins.typeOf (./foo + "/bar")"#); let result = eval(r#"builtins.typeOf (./foo + "/bar")"#);
assert_eq!(result, Value::String("path".to_string())); assert_eq!(result, Value::String("path".to_string()));
} }
#[test] #[test]
fn test_string_path_concat_type() { fn string_path_concat_type() {
// string + path = string // string + path = string
let result = eval(r#"builtins.typeOf ("prefix-" + ./foo)"#); let result = eval(r#"builtins.typeOf ("prefix-" + ./foo)"#);
assert_eq!(result, Value::String("string".to_string())); assert_eq!(result, Value::String("string".to_string()));
} }
#[test] #[test]
fn test_basename_of_path() { fn basename_of_path() {
let result = eval("builtins.baseNameOf ./path/to/file.nix"); let result = eval("builtins.baseNameOf ./path/to/file.nix");
assert!(matches!(result, Value::String(s) if s == "file.nix")); assert!(matches!(result, Value::String(s) if s == "file.nix"));
} }
#[test] #[test]
fn test_basename_of_string() { fn basename_of_string() {
let result = eval(r#"builtins.baseNameOf "/path/to/file.nix""#); let result = eval(r#"builtins.baseNameOf "/path/to/file.nix""#);
assert_eq!(result, Value::String("file.nix".to_string())); assert_eq!(result, Value::String("file.nix".to_string()));
} }
#[test] #[test]
fn test_dir_of_path_type() { fn dir_of_path_type() {
// dirOf preserves path type // dirOf preserves path type
let result = eval("builtins.typeOf (builtins.dirOf ./path/to/file.nix)"); let result = eval("builtins.typeOf (builtins.dirOf ./path/to/file.nix)");
assert_eq!(result, Value::String("path".to_string())); assert_eq!(result, Value::String("path".to_string()));
} }
#[test] #[test]
fn test_dir_of_string_type() { fn dir_of_string_type() {
// dirOf preserves string type // dirOf preserves string type
let result = eval(r#"builtins.typeOf (builtins.dirOf "/path/to/file.nix")"#); let result = eval(r#"builtins.typeOf (builtins.dirOf "/path/to/file.nix")"#);
assert_eq!(result, Value::String("string".to_string())); assert_eq!(result, Value::String("string".to_string()));
} }
#[test] #[test]
fn test_path_equality() { fn path_equality() {
// Same path should be equal // Same path should be equal
let result = eval("./foo == ./foo"); let result = eval("./foo == ./foo");
assert_eq!(result, Value::Bool(true)); assert_eq!(result, Value::Bool(true));
} }
#[test] #[test]
fn test_path_not_equal_string() { fn path_not_equal_string() {
// Paths and strings are different types - should not be equal // Paths and strings are different types - should not be equal
let result = eval(r#"./foo == "./foo""#); let result = eval(r#"./foo == "./foo""#);
assert_eq!(result, Value::Bool(false)); assert_eq!(result, Value::Bool(false));
} }
#[test] #[test]
fn test_to_path_absolute() { fn to_path_absolute() {
// toPath with absolute path returns string // toPath with absolute path returns string
let result = eval(r#"builtins.toPath "/foo/bar""#); let result = eval(r#"builtins.toPath "/foo/bar""#);
assert_eq!(result, Value::String("/foo/bar".to_string())); assert_eq!(result, Value::String("/foo/bar".to_string()));
} }
#[test] #[test]
fn test_to_path_type_is_string() { fn to_path_type_is_string() {
// toPath returns a string, not a path // toPath returns a string, not a path
let result = eval(r#"builtins.typeOf (builtins.toPath "/foo")"#); let result = eval(r#"builtins.typeOf (builtins.toPath "/foo")"#);
assert_eq!(result, Value::String("string".to_string())); assert_eq!(result, Value::String("string".to_string()));
} }
#[test] #[test]
fn test_to_path_relative_fails() { fn to_path_relative_fails() {
// toPath with relative path should fail // toPath with relative path should fail
let result = eval_result(r#"builtins.toPath "foo/bar""#); let result = eval_result(r#"builtins.toPath "foo/bar""#);
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test]
fn test_to_path_empty_fails() { fn to_path_empty_fails() {
// toPath with empty string should fail // toPath with empty string should fail
let result = eval_result(r#"builtins.toPath """#); let result = eval_result(r#"builtins.toPath """#);
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test]
fn test_to_path_from_path_value() { fn to_path_from_path_value() {
// toPath can accept a path value too (coerces to string first) // toPath can accept a path value too (coerces to string first)
let result = eval("builtins.toPath ./foo"); let result = eval("builtins.toPath ./foo");
// Should succeed and return the absolute path as a string // Should succeed and return the absolute path as a string

View File

@@ -4,7 +4,7 @@ use nix_js::value::{List, Value};
use utils::eval; use utils::eval;
#[test] #[test]
fn test_match_exact_full_string() { fn match_exact_full_string() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "foobar" "foobar""#), eval(r#"builtins.match "foobar" "foobar""#),
Value::List(List::new(vec![])) Value::List(List::new(vec![]))
@@ -12,12 +12,12 @@ fn test_match_exact_full_string() {
} }
#[test] #[test]
fn test_match_partial_returns_null() { fn match_partial_returns_null() {
assert_eq!(eval(r#"builtins.match "foo" "foobar""#), Value::Null); assert_eq!(eval(r#"builtins.match "foo" "foobar""#), Value::Null);
} }
#[test] #[test]
fn test_match_with_capture_groups() { fn match_with_capture_groups() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "(.*)\\.nix" "foobar.nix""#), eval(r#"builtins.match "(.*)\\.nix" "foobar.nix""#),
Value::List(List::new(vec![Value::String("foobar".into())])) Value::List(List::new(vec![Value::String("foobar".into())]))
@@ -25,7 +25,7 @@ fn test_match_with_capture_groups() {
} }
#[test] #[test]
fn test_match_multiple_capture_groups() { fn match_multiple_capture_groups() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "foobar.nix""#), eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "foobar.nix""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -37,7 +37,7 @@ fn test_match_multiple_capture_groups() {
} }
#[test] #[test]
fn test_match_with_path() { fn match_with_path() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "/path/to/foobar.nix""#), eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "/path/to/foobar.nix""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -49,7 +49,7 @@ fn test_match_with_path() {
} }
#[test] #[test]
fn test_match_posix_space_class() { fn match_posix_space_class() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo ""#), eval(r#"builtins.match "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo ""#),
Value::List(List::new(vec![Value::String("foo".into())])) Value::List(List::new(vec![Value::String("foo".into())]))
@@ -57,7 +57,7 @@ fn test_match_posix_space_class() {
} }
#[test] #[test]
fn test_match_posix_upper_class() { fn match_posix_upper_class() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#), eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#),
Value::Null Value::Null
@@ -70,7 +70,7 @@ fn test_match_posix_upper_class() {
} }
#[test] #[test]
fn test_match_quantifiers() { fn match_quantifiers() {
assert_eq!( assert_eq!(
eval(r#"builtins.match "fo*" "f""#), eval(r#"builtins.match "fo*" "f""#),
Value::List(List::new(vec![])) Value::List(List::new(vec![]))
@@ -87,7 +87,7 @@ fn test_match_quantifiers() {
} }
#[test] #[test]
fn test_split_non_capturing() { fn split_non_capturing() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "foobar" "foobar""#), eval(r#"builtins.split "foobar" "foobar""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -99,7 +99,7 @@ fn test_split_non_capturing() {
} }
#[test] #[test]
fn test_split_no_match() { fn split_no_match() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "fo+" "f""#), eval(r#"builtins.split "fo+" "f""#),
Value::List(List::new(vec![Value::String("f".into())])) Value::List(List::new(vec![Value::String("f".into())]))
@@ -107,7 +107,7 @@ fn test_split_no_match() {
} }
#[test] #[test]
fn test_split_with_capture_group() { fn split_with_capture_group() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "(fo*)" "foobar""#), eval(r#"builtins.split "(fo*)" "foobar""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -119,7 +119,7 @@ fn test_split_with_capture_group() {
} }
#[test] #[test]
fn test_split_multiple_matches() { fn split_multiple_matches() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "(b)" "foobarbaz""#), eval(r#"builtins.split "(b)" "foobarbaz""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -133,7 +133,7 @@ fn test_split_multiple_matches() {
} }
#[test] #[test]
fn test_split_with_multiple_groups() { fn split_with_multiple_groups() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "(f)(o*)" "foo""#), eval(r#"builtins.split "(f)(o*)" "foo""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -148,7 +148,7 @@ fn test_split_with_multiple_groups() {
} }
#[test] #[test]
fn test_split_with_optional_groups() { fn split_with_optional_groups() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "(a)|(c)" "abc""#), eval(r#"builtins.split "(a)|(c)" "abc""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -162,7 +162,7 @@ fn test_split_with_optional_groups() {
} }
#[test] #[test]
fn test_split_greedy_matching() { fn split_greedy_matching() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "(o+)" "oooofoooo""#), eval(r#"builtins.split "(o+)" "oooofoooo""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -176,7 +176,7 @@ fn test_split_greedy_matching() {
} }
#[test] #[test]
fn test_split_posix_classes() { fn split_posix_classes() {
assert_eq!( assert_eq!(
eval(r#"builtins.split "([[:upper:]]+)" " FOO ""#), eval(r#"builtins.split "([[:upper:]]+)" " FOO ""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -188,7 +188,7 @@ fn test_split_posix_classes() {
} }
#[test] #[test]
fn test_replace_basic() { fn replace_basic() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings ["o"] ["a"] "foobar""#), eval(r#"builtins.replaceStrings ["o"] ["a"] "foobar""#),
Value::String("faabar".into()) Value::String("faabar".into())
@@ -196,7 +196,7 @@ fn test_replace_basic() {
} }
#[test] #[test]
fn test_replace_with_empty() { fn replace_with_empty() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings ["o"] [""] "foobar""#), eval(r#"builtins.replaceStrings ["o"] [""] "foobar""#),
Value::String("fbar".into()) Value::String("fbar".into())
@@ -204,7 +204,7 @@ fn test_replace_with_empty() {
} }
#[test] #[test]
fn test_replace_multiple_patterns() { fn replace_multiple_patterns() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings ["oo" "a"] ["a" "oo"] "foobar""#), eval(r#"builtins.replaceStrings ["oo" "a"] ["a" "oo"] "foobar""#),
Value::String("faboor".into()) Value::String("faboor".into())
@@ -212,7 +212,7 @@ fn test_replace_multiple_patterns() {
} }
#[test] #[test]
fn test_replace_first_match_wins() { fn replace_first_match_wins() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings ["oo" "oo"] ["u" "i"] "foobar""#), eval(r#"builtins.replaceStrings ["oo" "oo"] ["u" "i"] "foobar""#),
Value::String("fubar".into()) Value::String("fubar".into())
@@ -220,7 +220,7 @@ fn test_replace_first_match_wins() {
} }
#[test] #[test]
fn test_replace_empty_pattern() { fn replace_empty_pattern() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings [""] ["X"] "abc""#), eval(r#"builtins.replaceStrings [""] ["X"] "abc""#),
Value::String("XaXbXcX".into()) Value::String("XaXbXcX".into())
@@ -228,7 +228,7 @@ fn test_replace_empty_pattern() {
} }
#[test] #[test]
fn test_replace_empty_pattern_empty_string() { fn replace_empty_pattern_empty_string() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings [""] ["X"] """#), eval(r#"builtins.replaceStrings [""] ["X"] """#),
Value::String("X".into()) Value::String("X".into())
@@ -236,7 +236,7 @@ fn test_replace_empty_pattern_empty_string() {
} }
#[test] #[test]
fn test_replace_simple_char() { fn replace_simple_char() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings ["-"] ["_"] "a-b""#), eval(r#"builtins.replaceStrings ["-"] ["_"] "a-b""#),
Value::String("a_b".into()) Value::String("a_b".into())
@@ -244,7 +244,7 @@ fn test_replace_simple_char() {
} }
#[test] #[test]
fn test_replace_longer_pattern() { fn replace_longer_pattern() {
assert_eq!( assert_eq!(
eval(r#"builtins.replaceStrings ["oo"] ["u"] "foobar""#), eval(r#"builtins.replaceStrings ["oo"] ["u"] "foobar""#),
Value::String("fubar".into()) Value::String("fubar".into())
@@ -252,14 +252,14 @@ fn test_replace_longer_pattern() {
} }
#[test] #[test]
fn test_replace_different_lengths() { fn replace_different_lengths() {
let result = let result =
std::panic::catch_unwind(|| eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#)); std::panic::catch_unwind(|| eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#));
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test]
fn test_split_version_simple() { fn split_version_simple() {
assert_eq!( assert_eq!(
eval(r#"builtins.splitVersion "1.2.3""#), eval(r#"builtins.splitVersion "1.2.3""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -271,7 +271,7 @@ fn test_split_version_simple() {
} }
#[test] #[test]
fn test_split_version_with_pre() { fn split_version_with_pre() {
assert_eq!( assert_eq!(
eval(r#"builtins.splitVersion "2.3.0pre1234""#), eval(r#"builtins.splitVersion "2.3.0pre1234""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -285,7 +285,7 @@ fn test_split_version_with_pre() {
} }
#[test] #[test]
fn test_split_version_with_letters() { fn split_version_with_letters() {
assert_eq!( assert_eq!(
eval(r#"builtins.splitVersion "2.3a""#), eval(r#"builtins.splitVersion "2.3a""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -297,7 +297,7 @@ fn test_split_version_with_letters() {
} }
#[test] #[test]
fn test_split_version_with_dashes() { fn split_version_with_dashes() {
assert_eq!( assert_eq!(
eval(r#"builtins.splitVersion "2.3-beta1""#), eval(r#"builtins.splitVersion "2.3-beta1""#),
Value::List(List::new(vec![ Value::List(List::new(vec![
@@ -310,7 +310,7 @@ fn test_split_version_with_dashes() {
} }
#[test] #[test]
fn test_split_version_empty() { fn split_version_empty() {
assert_eq!( assert_eq!(
eval(r#"builtins.splitVersion """#), eval(r#"builtins.splitVersion """#),
Value::List(List::new(vec![])) Value::List(List::new(vec![]))

View File

@@ -152,8 +152,10 @@ fn string_add_merges_context() {
#[test] #[test]
fn context_in_derivation_args() { fn context_in_derivation_args() {
let result = eval( let mut ctx = Context::new().unwrap();
r#" let result = ctx
.eval_code(
r#"
let let
dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; }; dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; };
drv = derivation { drv = derivation {
@@ -164,10 +166,11 @@ fn context_in_derivation_args() {
}; };
in drv.drvPath in drv.drvPath
"#, "#,
); )
.unwrap();
match result { match result {
Value::String(s) => { Value::String(s) => {
assert!(s.starts_with("/nix/store/"), "Should be a store path"); assert!(s.starts_with(ctx.get_store_dir()), "Should be a store path");
assert!(s.ends_with(".drv"), "Should be a .drv file"); assert!(s.ends_with(".drv"), "Should be a .drv file");
} }
_ => panic!("Expected String, got {:?}", result), _ => panic!("Expected String, got {:?}", result),
@@ -176,8 +179,10 @@ fn context_in_derivation_args() {
#[test] #[test]
fn context_in_derivation_env() { fn context_in_derivation_env() {
let result = eval( let mut ctx = Context::new().unwrap();
r#" let result = ctx
.eval_code(
r#"
let let
dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; }; dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; };
drv = derivation { drv = derivation {
@@ -188,10 +193,11 @@ fn context_in_derivation_env() {
}; };
in drv.drvPath in drv.drvPath
"#, "#,
); )
.unwrap();
match result { match result {
Value::String(s) => { Value::String(s) => {
assert!(s.starts_with("/nix/store/"), "Should be a store path"); assert!(s.starts_with(ctx.get_store_dir()), "Should be a store path");
assert!(s.ends_with(".drv"), "Should be a .drv file"); assert!(s.ends_with(".drv"), "Should be a .drv file");
} }
_ => panic!("Expected String, got {:?}", result), _ => panic!("Expected String, got {:?}", result),
@@ -213,16 +219,19 @@ fn tostring_preserves_context() {
#[test] #[test]
fn interpolation_derivation_returns_outpath() { fn interpolation_derivation_returns_outpath() {
let result = eval( let mut ctx = Context::new().unwrap();
r#" let result = ctx
.eval_code(
r#"
let let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }; drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };
in "${drv}" in "${drv}"
"#, "#,
); )
.unwrap();
match result { match result {
Value::String(s) => { Value::String(s) => {
assert!(s.starts_with("/nix/store/"), "Should be a store path"); assert!(s.starts_with(ctx.get_store_dir()), "Should be a store path");
assert!(s.ends_with("-test"), "Should end with derivation name"); assert!(s.ends_with("-test"), "Should end with derivation name");
} }
_ => panic!("Expected String, got {:?}", result), _ => panic!("Expected String, got {:?}", result),
@@ -332,6 +341,7 @@ fn substring_zero_length_empty_value() {
} }
#[test] #[test]
#[allow(non_snake_case)]
fn concatStringsSep_preserves_context() { fn concatStringsSep_preserves_context() {
let result = eval( let result = eval(
r#" r#"
@@ -348,6 +358,7 @@ fn concatStringsSep_preserves_context() {
} }
#[test] #[test]
#[allow(non_snake_case)]
fn concatStringsSep_merges_contexts() { fn concatStringsSep_merges_contexts() {
let result = eval( let result = eval(
r#" r#"
@@ -365,6 +376,7 @@ fn concatStringsSep_merges_contexts() {
} }
#[test] #[test]
#[allow(non_snake_case)]
fn concatStringsSep_separator_has_context() { fn concatStringsSep_separator_has_context() {
let result = eval( let result = eval(
r#" r#"