Compare commits

..

4 Commits

Author SHA1 Message Date
791c20660c feat: inspector 2026-02-16 13:14:50 +08:00
5c48e5cfdd feat: implement hash related primops 2026-02-15 19:55:29 +08:00
7836f8c869 refactor: handle derivation generation on Rust side 2026-02-15 19:38:11 +08:00
e357678d70 feat: implement realisePath 2026-02-15 18:26:24 +08:00
30 changed files with 1807 additions and 750 deletions

View File

@@ -3,5 +3,16 @@ vim.lsp.config("biome", {
on_dir(vim.fn.getcwd()) on_dir(vim.fn.getcwd())
end end
}) })
vim.lsp.config("rust_analyzer", {
settings = {
["rust-analyzer"] = {
cargo = {
features = {
"inspector"
}
}
}
}
})
return {} return {}

175
Cargo.lock generated
View File

@@ -47,12 +47,56 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstream"
version = "0.6.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.13" version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "anstyle-parse"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [
"anstyle",
"once_cell_polyfill",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.101" version = "1.0.101"
@@ -123,6 +167,12 @@ dependencies = [
"backtrace", "backtrace",
] ]
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.22.1" version = "0.22.1"
@@ -384,6 +434,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63be97961acde393029492ce0be7a1af7e323e6bae9511ebfac33751be5e6806" checksum = "63be97961acde393029492ce0be7a1af7e323e6bae9511ebfac33751be5e6806"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive",
] ]
[[package]] [[package]]
@@ -392,8 +443,22 @@ version = "4.5.58"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f13174bda5dfd69d7e947827e5af4b0f2f94a4a3ee92912fba07a66150f21e2" checksum = "7f13174bda5dfd69d7e947827e5af4b0f2f94a4a3ee92912fba07a66150f21e2"
dependencies = [ dependencies = [
"anstream",
"anstyle", "anstyle",
"clap_lex", "clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
] ]
[[package]] [[package]]
@@ -420,6 +485,12 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "colorchoice"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]] [[package]]
name = "combine" name = "combine"
version = "4.6.7" version = "4.6.7"
@@ -962,6 +1033,26 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fastwebsockets"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "305d3ba574508e27190906d11707dad683e0494e6b85eae9b044cb2734a5e422"
dependencies = [
"base64 0.21.7",
"bytes",
"http-body-util",
"hyper",
"hyper-util",
"pin-project",
"rand 0.8.5",
"sha1",
"simdutf8",
"thiserror 1.0.69",
"tokio",
"utf-8",
]
[[package]] [[package]]
name = "fd-lock" name = "fd-lock"
version = "4.0.4" version = "4.0.4"
@@ -1315,6 +1406,12 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.8.1" version = "1.8.1"
@@ -1328,6 +1425,7 @@ dependencies = [
"http", "http",
"http-body", "http-body",
"httparse", "httparse",
"httpdate",
"itoa", "itoa",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
@@ -1358,7 +1456,7 @@ version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@@ -1578,6 +1676,12 @@ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.60.2",
] ]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.13.0" version = "0.13.0"
@@ -1781,6 +1885,12 @@ dependencies = [
"regex-automata", "regex-automata",
] ]
[[package]]
name = "md5"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.8.0" version = "2.8.0"
@@ -1912,18 +2022,25 @@ name = "nix-js"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64", "base64 0.22.1",
"bzip2", "bzip2",
"clap",
"criterion", "criterion",
"deno_core", "deno_core",
"deno_error", "deno_error",
"derive_more", "derive_more",
"dirs", "dirs",
"ere", "ere",
"fastwebsockets",
"flate2", "flate2",
"hashbrown 0.16.1", "hashbrown 0.16.1",
"hex", "hex",
"http",
"http-body-util",
"hyper",
"hyper-util",
"itertools 0.14.0", "itertools 0.14.0",
"md5",
"miette", "miette",
"mimalloc", "mimalloc",
"nix-compat", "nix-compat",
@@ -1939,6 +2056,7 @@ dependencies = [
"rustyline", "rustyline",
"serde", "serde",
"serde_json", "serde_json",
"sha1",
"sha2", "sha2",
"string-interner", "string-interner",
"tap", "tap",
@@ -1949,6 +2067,7 @@ dependencies = [
"toml", "toml",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
"uuid",
"xz2", "xz2",
] ]
@@ -2067,6 +2186,12 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "once_cell_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
[[package]] [[package]]
name = "oorandom" name = "oorandom"
version = "11.1.5" version = "11.1.5"
@@ -2359,6 +2484,8 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [ dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.4", "rand_core 0.6.4",
] ]
@@ -2368,10 +2495,20 @@ version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [ dependencies = [
"rand_chacha", "rand_chacha 0.9.0",
"rand_core 0.9.5", "rand_core 0.9.5",
] ]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core 0.6.4",
]
[[package]] [[package]]
name = "rand_chacha" name = "rand_chacha"
version = "0.9.0" version = "0.9.0"
@@ -2484,7 +2621,7 @@ version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801" checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@@ -2895,6 +3032,17 @@ dependencies = [
"v8", "v8",
] ]
[[package]]
name = "sha1"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.10.9" version = "0.10.9"
@@ -2946,6 +3094,12 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
[[package]]
name = "simdutf8"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.12" version = "0.4.12"
@@ -3045,6 +3199,12 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72abeda133c49d7bddece6c154728f83eec8172380c80ab7096da9487e20d27c" checksum = "72abeda133c49d7bddece6c154728f83eec8172380c80ab7096da9487e20d27c"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]] [[package]]
name = "strum" name = "strum"
version = "0.27.2" version = "0.27.2"
@@ -3615,6 +3775,12 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]] [[package]]
name = "utf8-ranges" name = "utf8-ranges"
version = "1.0.5" version = "1.0.5"
@@ -3639,6 +3805,7 @@ version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb"
dependencies = [ dependencies = [
"getrandom 0.4.1",
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
] ]

View File

@@ -1,15 +1,23 @@
[no-exit-message] [no-exit-message]
@repl: @repl:
cargo run --bin repl cargo run -- repl
[no-exit-message] [no-exit-message]
@eval expr: @eval expr:
cargo run --bin eval -- '{{expr}}' cargo run -- eval '{{expr}}'
[no-exit-message] [no-exit-message]
@replr: @replr:
cargo run --bin repl --release cargo run --release -- repl
[no-exit-message] [no-exit-message]
@evalr expr: @evalr expr:
cargo run --bin eval --release -- '{{expr}}' cargo run --release -- eval '{{expr}}'
[no-exit-message]
@repli:
cargo run --release --features inspector -- --inspect-brk 127.0.0.1:9229 repl
[no-exit-message]
@evali expr:
cargo run --release --features inspector -- --inspect-brk 127.0.0.1:9229 eval '{{expr}}'

View File

@@ -34,6 +34,7 @@
just just
samply samply
jq jq
tokei
nodejs nodejs
nodePackages.npm nodePackages.npm

View File

@@ -14,6 +14,9 @@ nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", fe
anyhow = "1.0" anyhow = "1.0"
rustyline = "17.0" rustyline = "17.0"
# CLI
clap = { version = "4", features = ["derive"] }
# Logging # Logging
tracing = "0.1" tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }
@@ -36,6 +39,8 @@ deno_error = "0.7"
nix-nar = "0.3" nix-nar = "0.3"
sha2 = "0.10" sha2 = "0.10"
sha1 = "0.10"
md5 = "0.8"
hex = "0.4" hex = "0.4"
base64 = "0.22" base64 = "0.22"
@@ -61,6 +66,17 @@ ere = "0.2.4"
num_enum = "0.7.5" num_enum = "0.7.5"
tap = "1.0.1" tap = "1.0.1"
# Inspector (optional)
fastwebsockets = { version = "0.10", features = ["upgrade"], optional = true }
hyper = { version = "1", features = ["http1", "server"], optional = true }
hyper-util = { version = "0.1", features = ["tokio"], optional = true }
http-body-util = { version = "0.1", optional = true }
http = { version = "1", optional = true }
uuid = { version = "1", features = ["v4"], optional = true }
[features]
inspector = ["dep:fastwebsockets", "dep:hyper", "dep:hyper-util", "dep:http-body-util", "dep:http", "dep:uuid"]
[dev-dependencies] [dev-dependencies]
criterion = { version = "0.8", features = ["html_reports"] } criterion = { version = "0.8", features = ["html_reports"] }

View File

@@ -1,6 +1,7 @@
import { mkPos } from "../helpers";
import { createThunk } from "../thunk"; import { createThunk } from "../thunk";
import { forceAttrs, forceFunction, forceList, forceStringValue } from "../type-assert"; import { forceAttrs, forceFunction, forceList, forceStringValue } from "../type-assert";
import type { NixAttrs, NixList, NixValue } from "../types"; import { ATTR_POSITIONS, type NixAttrs, type NixList, type NixValue } from "../types";
export const attrNames = (set: NixValue): string[] => Object.keys(forceAttrs(set)).sort(); export const attrNames = (set: NixValue): string[] => Object.keys(forceAttrs(set)).sort();
@@ -139,7 +140,7 @@ export const unsafeGetAttrPos =
return null; return null;
} }
const positions = (attrs as NixAttrs & Record<symbol, unknown>)[Nix.ATTR_POSITIONS] as const positions = (attrs as NixAttrs & Record<symbol, unknown>)[ATTR_POSITIONS] as
| Record<string, string> | Record<string, string>
| undefined; | undefined;
if (!positions || !(name in positions)) { if (!positions || !(name in positions)) {
@@ -147,5 +148,5 @@ export const unsafeGetAttrPos =
} }
const span = positions[name]; const span = positions[name];
return Nix.mkPos(span); return mkPos(span);
}; };

View File

@@ -86,7 +86,7 @@ export const coerceToString = (
value: NixValue, value: NixValue,
mode: StringCoercionMode, mode: StringCoercionMode,
copyToStore: boolean = false, copyToStore: boolean = false,
outContext?: NixStringContext, outContext: NixStringContext,
): string => { ): string => {
const v = force(value); const v = force(value);
@@ -96,11 +96,9 @@ export const coerceToString = (
} }
if (isStringWithContext(v)) { if (isStringWithContext(v)) {
if (outContext) {
for (const elem of v.context) { for (const elem of v.context) {
outContext.add(elem); outContext.add(elem);
} }
}
return v.value; return v.value;
} }
@@ -109,9 +107,7 @@ export const coerceToString = (
if (copyToStore) { if (copyToStore) {
const pathStr = v.value; const pathStr = v.value;
const storePath = Deno.core.ops.op_copy_path_to_store(pathStr); const storePath = Deno.core.ops.op_copy_path_to_store(pathStr);
if (outContext) {
outContext.add(storePath); outContext.add(storePath);
}
return storePath; return storePath;
} }
return v.value; return v.value;
@@ -253,7 +249,7 @@ export const coerceToStringWithContext = (
* - Returns the path string (not a NixPath object) * - Returns the path string (not a NixPath object)
* - Preserves string context if present * - Preserves string context if present
*/ */
export const coerceToPath = (value: NixValue, outContext?: NixStringContext): string => { export const coerceToPath = (value: NixValue, outContext: NixStringContext): string => {
const forced = force(value); const forced = force(value);
if (isPath(forced)) { if (isPath(forced)) {
@@ -347,11 +343,9 @@ export const nixValueToJson = (
return result; return result;
} }
if (isStringWithContext(result)) { if (isStringWithContext(result)) {
if (outContext) {
for (const elem of result.context) { for (const elem of result.context) {
outContext.add(elem); outContext.add(elem);
} }
}
return result.value; return result.value;
} }
return nixValueToJson(result, strict, outContext, copyToStore, seen); return nixValueToJson(result, strict, outContext, copyToStore, seen);

View File

@@ -1,7 +1,6 @@
import { import {
addBuiltContext, addBuiltContext,
addDrvDeepContext, addDrvDeepContext,
extractInputDrvsAndSrcs,
mkStringWithContext, mkStringWithContext,
type NixStringContext, type NixStringContext,
} from "../string-context"; } from "../string-context";
@@ -10,8 +9,6 @@ import { forceAttrs, forceList, forceStringNoCtx, forceStringValue } from "../ty
import type { NixAttrs, NixValue } from "../types"; import type { NixAttrs, NixValue } from "../types";
import { coerceToString, nixValueToJson, StringCoercionMode } from "./conversion"; import { coerceToString, nixValueToJson, StringCoercionMode } from "./conversion";
const drvHashCache = new Map<string, string>();
export interface OutputInfo { export interface OutputInfo {
path: string; path: string;
hashAlgo: string; hashAlgo: string;
@@ -190,13 +187,6 @@ const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] =>
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext)); return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext));
}; };
const outputPathName = (drvName: string, output: string) => {
if (output === "out") {
return drvName;
}
return `${drvName}-${output}`;
};
const structuredAttrsExcludedKeys = new Set([ const structuredAttrsExcludedKeys = new Set([
"__structuredAttrs", "__structuredAttrs",
"__ignoreNulls", "__ignoreNulls",
@@ -369,134 +359,33 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const drvArgs = extractArgs(attrs, collectedContext); const drvArgs = extractArgs(attrs, collectedContext);
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName); const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName);
const { inputDrvs, inputSrcs } = extractInputDrvsAndSrcs(collectedContext); const envEntries: [string, string][] = Array.from(env.entries());
const contextArray: string[] = Array.from(collectedContext);
const collectDrvReferences = (): string[] => { const rustResult: {
const refs = new Set<string>(); drvPath: string;
for (const src of inputSrcs) { outputs: [string, string][];
refs.add(src); } = Deno.core.ops.op_finalize_derivation({
}
for (const drvPath of inputDrvs.keys()) {
refs.add(drvPath);
}
return Array.from(refs).sort();
};
let outputInfos: Map<string, OutputInfo>;
let drvPath: string;
if (fixedOutputInfo) {
const pathName = outputPathName(drvName, "out");
const outPath = Deno.core.ops.op_make_fixed_output_path(
fixedOutputInfo.hashAlgo,
fixedOutputInfo.hash,
fixedOutputInfo.hashMode,
pathName,
);
const hashAlgoPrefix = fixedOutputInfo.hashMode === "recursive" ? "r:" : "";
outputInfos = new Map([
[
"out",
{
path: outPath,
hashAlgo: hashAlgoPrefix + fixedOutputInfo.hashAlgo,
hash: fixedOutputInfo.hash,
},
],
]);
env.set("out", outPath);
const finalDrv: DerivationData = {
name: drvName, name: drvName,
outputs: outputInfos,
inputDrvs,
inputSrcs,
platform,
builder, builder,
args: drvArgs,
env,
};
const finalAterm = generateAterm(finalDrv);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const fixedHashFingerprint = `fixed:out:${hashAlgoPrefix}${fixedOutputInfo.hashAlgo}:${fixedOutputInfo.hash}:${outPath}`;
const fixedModuloHash = Deno.core.ops.op_sha256_hex(fixedHashFingerprint);
drvHashCache.set(drvPath, fixedModuloHash);
} else {
const maskedOutputs = new Map<string, OutputInfo>(
outputs.map((o) => [
o,
{
path: "",
hashAlgo: "",
hash: "",
},
]),
);
const maskedEnv = new Map(env);
for (const output of outputs) {
maskedEnv.set(output, "");
}
const maskedDrv: DerivationData = {
name: drvName,
outputs: maskedOutputs,
inputDrvs,
inputSrcs,
platform, platform,
builder, outputs,
args: drvArgs, args: drvArgs,
env: maskedEnv, env: envEntries,
}; context: contextArray,
fixedOutput: fixedOutputInfo,
const inputDrvHashes = new Map<string, string>();
for (const [drvPath, outputNames] of inputDrvs) {
const cachedHash = drvHashCache.get(drvPath);
if (!cachedHash) {
throw new Error(`Missing modulo hash for input derivation: ${drvPath}`);
}
inputDrvHashes.set(cachedHash, Array.from(outputNames).join(","));
}
const maskedAterm = generateAtermModulo(maskedDrv, inputDrvHashes);
const drvModuloHash = Deno.core.ops.op_sha256_hex(maskedAterm);
outputInfos = new Map<string, OutputInfo>();
for (const outputName of outputs) {
const pathName = outputPathName(drvName, outputName);
const outPath = Deno.core.ops.op_make_store_path(`output:${outputName}`, drvModuloHash, pathName);
outputInfos.set(outputName, {
path: outPath,
hashAlgo: "",
hash: "",
}); });
env.set(outputName, outPath);
}
const finalDrv: DerivationData = {
...maskedDrv,
outputs: outputInfos,
env,
};
const finalAterm = generateAterm(finalDrv);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const finalAtermModulo = generateAtermModulo(finalDrv, inputDrvHashes);
const cachedModuloHash = Deno.core.ops.op_sha256_hex(finalAtermModulo);
drvHashCache.set(drvPath, cachedModuloHash);
}
const result: NixAttrs = {}; const result: NixAttrs = {};
const drvPathContext = new Set<string>(); const drvPathContext = new Set<string>();
addDrvDeepContext(drvPathContext, drvPath); addDrvDeepContext(drvPathContext, rustResult.drvPath);
result.drvPath = mkStringWithContext(drvPath, drvPathContext); result.drvPath = mkStringWithContext(rustResult.drvPath, drvPathContext);
for (const [outputName, outputInfo] of outputInfos.entries()) { for (const [outputName, outputPath] of rustResult.outputs) {
const outputContext = new Set<string>(); const outputContext = new Set<string>();
addBuiltContext(outputContext, drvPath, outputName); addBuiltContext(outputContext, rustResult.drvPath, outputName);
result[outputName] = mkStringWithContext(outputInfo.path, outputContext); result[outputName] = mkStringWithContext(outputPath, outputContext);
} }
return result; return result;

View File

@@ -41,7 +41,7 @@ export const abort = (s: NixValue): never => {
}; };
export const throwFunc = (s: NixValue): never => { export const throwFunc = (s: NixValue): never => {
throw new CatchableError(coerceToString(s, StringCoercionMode.Base)); throw new CatchableError(coerceToString(s, StringCoercionMode.Base, false, new Set()));
}; };
export const trace = export const trace =

View File

@@ -1,19 +1,37 @@
import { forceStringNoCtx } from "../type-assert"; import { forceAttrs, forceStringNoCtx, forceStringValue } from "../type-assert";
import type { NixValue } from "../types"; import type { NixValue } from "../types";
import { realisePath } from "./io";
export const hashFile = export const hashFile =
(type: NixValue) => (type: NixValue) =>
(_p: NixValue): never => { (p: NixValue): string => {
const _ty = forceStringNoCtx(type); const algo = forceStringNoCtx(type);
throw new Error("Not implemented: hashFile"); const pathStr = realisePath(p);
return Deno.core.ops.op_hash_file(algo, pathStr);
}; };
export const hashString = export const hashString =
(_type: NixValue) => (type: NixValue) =>
(_p: NixValue): never => { (s: NixValue): string => {
throw new Error("Not implemented: hashString"); const algo = forceStringNoCtx(type);
const data = forceStringValue(s);
return Deno.core.ops.op_hash_string(algo, data);
}; };
export const convertHash = (_args: NixValue): never => { export const convertHash = (args: NixValue): string => {
throw new Error("Not implemented: convertHash"); const attrs = forceAttrs(args);
const hash = forceStringNoCtx(attrs.hash);
let hashAlgo: string | null = null;
if ("hashAlgo" in attrs) {
hashAlgo = forceStringNoCtx(attrs.hashAlgo);
}
const toHashFormat = forceStringNoCtx(attrs.toHashFormat);
return Deno.core.ops.op_convert_hash({
hash,
hashAlgo,
toHashFormat,
});
}; };

View File

@@ -1,6 +1,6 @@
import { getPathValue } from "../path"; import { getPathValue } from "../path";
import type { NixStringContext, StringWithContext } from "../string-context"; import type { NixStringContext, StringWithContext } from "../string-context";
import { addOpaqueContext, mkStringWithContext } from "../string-context"; import { addOpaqueContext, decodeContextElem, mkStringWithContext } from "../string-context";
import { force } from "../thunk"; import { force } from "../thunk";
import { import {
forceAttrs, forceAttrs,
@@ -14,32 +14,40 @@ import type { NixAttrs, NixPath, NixString, NixValue } from "../types";
import { CatchableError, IS_PATH, isNixPath } from "../types"; import { CatchableError, IS_PATH, isNixPath } from "../types";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion"; import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
import { baseNameOf } from "./path"; import { baseNameOf } from "./path";
import { isAttrs, isPath } from "./type-check"; import { isAttrs, isPath, isString } from "./type-check";
const importCache = new Map<string, NixValue>(); const importCache = new Map<string, NixValue>();
export const importFunc = (path: NixValue): NixValue => { const realiseContext = (context: NixStringContext): void => {
const context: NixStringContext = new Set(); for (const encoded of context) {
const pathStr = coerceToPath(path, context); const elem = decodeContextElem(encoded);
if (elem.type === "built") {
// FIXME: Context collected but not yet propagated to build system throw new Error(
// This means derivation dependencies from imported paths are not `cannot build derivation '${elem.drvPath}' during evaluation because import-from-derivation is not supported`,
// currently tracked. This will cause issues when:
// 1. Importing from derivation outputs: import "${drv}/file.nix"
// 2. Building packages that depend on imported configurations
if (context.size > 0) {
console.warn(
`[WARN] import: Path has string context which is not yet fully tracked.
Dependency tracking for imported derivations may be incomplete.`,
); );
} }
}
};
export const realisePath = (value: NixValue): string => {
const context: NixStringContext = new Set();
const pathStr = coerceToPath(value, context);
if (context.size > 0) {
realiseContext(context);
}
return pathStr;
};
export const importFunc = (path: NixValue): NixValue => {
const pathStr = realisePath(path);
const cached = importCache.get(pathStr); const cached = importCache.get(pathStr);
if (cached !== undefined) { if (cached !== undefined) {
return cached; return cached;
} }
// Call Rust op - returns JS code string
const code = Deno.core.ops.op_import(pathStr); const code = Deno.core.ops.op_import(pathStr);
const result = Function(`return (${code})`)(); const result = Function(`return (${code})`)();
@@ -53,15 +61,7 @@ export const scopedImport =
const scopeAttrs = forceAttrs(scope); const scopeAttrs = forceAttrs(scope);
const scopeKeys = Object.keys(scopeAttrs); const scopeKeys = Object.keys(scopeAttrs);
const context: NixStringContext = new Set(); const pathStr = realisePath(path);
const pathStr = coerceToPath(path, context);
if (context.size > 0) {
console.warn(
`[WARN] scopedImport: Path has string context which is not yet fully tracked.
Dependency tracking for imported derivations may be incomplete.`,
);
}
const code = Deno.core.ops.op_scoped_import(pathStr, scopeKeys); const code = Deno.core.ops.op_scoped_import(pathStr, scopeKeys);
@@ -169,9 +169,10 @@ export const fetchTarball = (args: NixValue): NixString => {
export const fetchGit = (args: NixValue): NixAttrs => { export const fetchGit = (args: NixValue): NixAttrs => {
const forced = force(args); const forced = force(args);
if (typeof forced === "string" || isPath(forced)) { const disposedContext: NixStringContext = new Set();
const path = coerceToPath(forced); if (isString(forced) || isPath(forced)) {
const result: FetchGitResult = Deno.core.ops.op_fetch_git(path, null, null, false, false, false, null); const url = coerceToString(forced, StringCoercionMode.Base, false, disposedContext);
const result = Deno.core.ops.op_fetch_git(url, null, null, false, false, false, null);
const outContext: NixStringContext = new Set(); const outContext: NixStringContext = new Set();
addOpaqueContext(outContext, result.out_path); addOpaqueContext(outContext, result.out_path);
return { return {
@@ -303,7 +304,7 @@ const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
}; };
export const readDir = (path: NixValue): NixAttrs => { export const readDir = (path: NixValue): NixAttrs => {
const pathStr = coerceToPath(path); const pathStr = realisePath(path);
const entries: Record<string, string> = Deno.core.ops.op_read_dir(pathStr); const entries: Record<string, string> = Deno.core.ops.op_read_dir(pathStr);
const result: NixAttrs = {}; const result: NixAttrs = {};
for (const [name, type] of Object.entries(entries)) { for (const [name, type] of Object.entries(entries)) {
@@ -313,18 +314,22 @@ export const readDir = (path: NixValue): NixAttrs => {
}; };
export const readFile = (path: NixValue): string => { export const readFile = (path: NixValue): string => {
const pathStr = coerceToPath(path); const pathStr = realisePath(path);
return Deno.core.ops.op_read_file(pathStr); return Deno.core.ops.op_read_file(pathStr);
}; };
export const readFileType = (path: NixValue): string => { export const readFileType = (path: NixValue): string => {
const pathStr = coerceToPath(path); const pathStr = realisePath(path);
return Deno.core.ops.op_read_file_type(pathStr); return Deno.core.ops.op_read_file_type(pathStr);
}; };
export const pathExists = (path: NixValue): boolean => { export const pathExists = (path: NixValue): boolean => {
const pathStr = coerceToPath(path); try {
const pathStr = realisePath(path);
return Deno.core.ops.op_path_exists(pathStr); return Deno.core.ops.op_path_exists(pathStr);
} catch {
return false;
}
}; };
/** /**

View File

@@ -209,7 +209,7 @@ export const genericClosure = (args: NixValue): NixValue => {
export const outputOf = export const outputOf =
(_drv: NixValue) => (_drv: NixValue) =>
(_out: NixValue): never => { (_out: NixValue): never => {
throw new Error("Not implemented: outputOf"); throw new Error("Not implemented: outputOf (part of dynamic-derivation)");
}; };
export const parseDrvName = (s: NixValue): NixAttrs => { export const parseDrvName = (s: NixValue): NixAttrs => {
@@ -320,8 +320,9 @@ export const splitVersion = (s: NixValue): NixValue => {
return components; return components;
}; };
export const traceVerbose = (_e1: NixValue, _e2: NixValue): never => { export const traceVerbose = (_e1: NixValue, e2: NixValue): NixStrictValue => {
throw new Error("Not implemented: traceVerbose"); // TODO: implement traceVerbose
return force(e2)
}; };
export const tryEval = (e: NixValue): { success: NixBool; value: NixStrictValue } => { export const tryEval = (e: NixValue): { success: NixBool; value: NixStrictValue } => {

View File

@@ -2,7 +2,7 @@ import { mkPath } from "../path";
import { mkStringWithContext, type NixStringContext } from "../string-context"; import { mkStringWithContext, type NixStringContext } from "../string-context";
import { force } from "../thunk"; import { force } from "../thunk";
import type { NixPath, NixString, NixValue } from "../types"; import type { NixPath, NixString, NixValue } from "../types";
import { isNixPath, isStringWithContext } from "../types"; import { isNixPath } from "../types";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion"; import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
/** /**
@@ -86,21 +86,8 @@ export const dirOf = (s: NixValue): NixPath | NixString => {
} }
// String input → string output // String input → string output
const strValue: NixString = coerceToString(s, StringCoercionMode.Base, false) as NixString; const outContext: NixStringContext = new Set();
const pathStr = coerceToString(s, StringCoercionMode.Base, false, outContext);
let pathStr: string;
let hasContext = false;
let originalContext: Set<string> | undefined;
if (typeof strValue === "string") {
pathStr = strValue;
} else if (isStringWithContext(strValue)) {
pathStr = strValue.value;
hasContext = strValue.context.size > 0;
originalContext = strValue.context;
} else {
pathStr = strValue as string;
}
const lastSlash = pathStr.lastIndexOf("/"); const lastSlash = pathStr.lastIndexOf("/");
@@ -113,9 +100,8 @@ export const dirOf = (s: NixValue): NixPath | NixString => {
const result = pathStr.slice(0, lastSlash); const result = pathStr.slice(0, lastSlash);
// Preserve string context if present if (outContext.size > 0) {
if (hasContext && originalContext) { return mkStringWithContext(result, outContext);
return mkStringWithContext(result, originalContext);
} }
return result; return result;

View File

@@ -146,52 +146,3 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
return result; return result;
}; };
export const extractInputDrvsAndSrcs = (
context: NixStringContext,
): { inputDrvs: Map<string, Set<string>>; inputSrcs: Set<string> } => {
const inputDrvs = new Map<string, Set<string>>();
const inputSrcs = new Set<string>();
for (const encoded of context) {
const elem = decodeContextElem(encoded);
switch (elem.type) {
case "opaque":
inputSrcs.add(elem.path);
break;
case "drvDeep": {
const closure: {
input_drvs: [string, string[]][];
input_srcs: string[];
} = Deno.core.ops.op_compute_fs_closure(elem.drvPath);
for (const src of closure.input_srcs) {
inputSrcs.add(src);
}
for (const [drvPath, outputs] of closure.input_drvs) {
let existingOutputs = inputDrvs.get(drvPath);
if (!existingOutputs) {
existingOutputs = new Set<string>();
inputDrvs.set(drvPath, existingOutputs);
}
for (const output of outputs) {
existingOutputs.add(output);
}
}
break;
}
case "built": {
let outputs = inputDrvs.get(elem.drvPath);
if (!outputs) {
outputs = new Set<string>();
inputDrvs.set(elem.drvPath, outputs);
}
outputs.add(elem.output);
break;
}
}
}
return { inputDrvs, inputSrcs };
};

View File

@@ -1,33 +1,77 @@
import type { NixRuntime } from ".."; import type { NixRuntime } from "..";
import type { FetchTarballResult, FetchUrlResult, FetchGitResult } from "../builtins/io"; import type { FetchGitResult, FetchTarballResult, FetchUrlResult } from "../builtins/io";
declare global { declare global {
var Nix: NixRuntime; var Nix: NixRuntime;
namespace Deno { namespace Deno {
namespace core { namespace core {
namespace ops { namespace ops {
function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string; function op_import(path: string): string;
function op_scoped_import(path: string, scopeKeys: string[]): string; function op_scoped_import(path: string, scopeKeys: string[]): string;
function op_resolve_path(currentDir: string, path: string): string;
function op_read_file(path: string): string; function op_read_file(path: string): string;
function op_read_file_type(path: string): string; function op_read_file_type(path: string): string;
function op_read_dir(path: string): Record<string, string>; function op_read_dir(path: string): Record<string, string>;
function op_path_exists(path: string): boolean; function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string; function op_walk_dir(path: string): [string, string][];
function op_make_placeholder(output: string): string; function op_make_placeholder(output: string): string;
function op_store_path(path: string): string;
function op_convert_hash(input: {
hash: string;
hashAlgo: string | null;
toHashFormat: string;
}): string;
function op_hash_string(algo: string, data: string): string;
function op_hash_file(algo: string, path: string): string;
function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string };
function op_add_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
): string;
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
includePaths: string[],
): string;
function op_decode_span(span: string): { function op_decode_span(span: string): {
file: string | null; file: string | null;
line: number | null; line: number | null;
column: number | null; column: number | null;
}; };
function op_make_store_path(ty: string, hashHex: string, name: string): string;
function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string }; function op_to_file(name: string, contents: string, references: string[]): string;
function op_make_fixed_output_path(
hashAlgo: string, function op_copy_path_to_store(path: string): string;
hash: string,
hashMode: string, function op_get_env(key: string): string;
name: string,
): string; function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): unknown;
function op_from_toml(toml: string): unknown;
function op_finalize_derivation(input: {
name: string;
builder: string;
platform: string;
outputs: string[];
args: string[];
env: [string, string][];
context: string[];
fixedOutput: { hashAlgo: string; hash: string; hashMode: string } | null;
}): { drvPath: string; outputs: [string, string][] };
function op_fetch_url( function op_fetch_url(
url: string, url: string,
expectedHash: string | null, expectedHash: string | null,
@@ -48,34 +92,6 @@ declare global {
allRefs: boolean, allRefs: boolean,
name: string | null, name: string | null,
): FetchGitResult; ): FetchGitResult;
function op_add_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
): string;
function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string;
function op_write_derivation(drvName: string, aterm: string, references: string[]): string;
function op_read_derivation_outputs(drvPath: string): string[];
function op_compute_fs_closure(drvPath: string): {
input_drvs: [string, string[]][];
input_srcs: string[];
};
function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string;
function op_walk_dir(path: string): [string, string][];
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
includePaths: string[],
): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): unknown;
function op_from_toml(toml: string): unknown;
} }
} }
} }

View File

@@ -1,26 +0,0 @@
use anyhow::Result;
use nix_js::{context::Context, error::Source};
use std::process::exit;
fn main() -> Result<()> {
nix_js::logging::init_logging();
let mut args = std::env::args();
if args.len() != 2 {
eprintln!("Usage: {} expr", args.next().unwrap());
exit(1);
}
args.next();
let expr = args.next().unwrap();
let src = Source::new_eval(expr)?;
match Context::new()?.eval(src) {
Ok(value) => {
println!("{value}");
Ok(())
}
Err(err) => {
eprintln!("{:?}", miette::Report::new(*err));
exit(1);
}
}
}

View File

@@ -36,9 +36,7 @@ pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
} }
code!(&mut buf, ctx; code!(&mut buf, ctx;
"Nix.builtins.storeDir=" "const __currentDir="
quoted(ctx.get_store_dir())
";const __currentDir="
quoted(&ctx.get_current_dir().display().to_string()) quoted(&ctx.get_current_dir().display().to_string())
";const __with=null;return " ";const __with=null;return "
expr expr
@@ -57,9 +55,7 @@ pub(crate) fn compile_scoped(expr: &Ir, ctx: &impl CodegenContext) -> String {
} }
code!(&mut buf, ctx; code!(&mut buf, ctx;
"Nix.builtins.storeDir=" "const __currentDir="
quoted(ctx.get_store_dir())
";const __currentDir="
quoted(&ctx.get_current_dir().display().to_string()) quoted(&ctx.get_current_dir().display().to_string())
";return " ";return "
expr expr

View File

@@ -47,6 +47,8 @@ fn handle_parse_error<'a>(
pub struct Context { pub struct Context {
ctx: Ctx, ctx: Ctx,
runtime: Runtime<Ctx>, runtime: Runtime<Ctx>,
#[cfg(feature = "inspector")]
_inspector_server: Option<crate::runtime::inspector::InspectorServer>,
} }
macro_rules! eval { macro_rules! eval {
@@ -66,15 +68,51 @@ macro_rules! eval {
impl Context { impl Context {
pub fn new() -> Result<Self> { pub fn new() -> Result<Self> {
let ctx = Ctx::new()?; let ctx = Ctx::new()?;
#[cfg(feature = "inspector")]
let runtime = Runtime::new(Default::default())?;
#[cfg(not(feature = "inspector"))]
let runtime = Runtime::new()?; let runtime = Runtime::new()?;
let mut context = Self { ctx, runtime }; let mut context = Self {
context.init_derivation()?; ctx,
runtime,
#[cfg(feature = "inspector")]
_inspector_server: None,
};
context.init()?;
Ok(context) Ok(context)
} }
fn init_derivation(&mut self) -> Result<()> { #[cfg(feature = "inspector")]
pub fn new_with_inspector(addr: std::net::SocketAddr, wait_for_session: bool) -> Result<Self> {
use crate::runtime::InspectorOptions;
let ctx = Ctx::new()?;
let runtime = Runtime::new(InspectorOptions {
enable: true,
wait: wait_for_session,
})?;
let server = crate::runtime::inspector::InspectorServer::new(addr, "nix-js")
.map_err(|e| Error::internal(e.to_string()))?;
server.register_inspector("nix-js".to_string(), runtime.inspector(), wait_for_session);
let mut context = Self {
ctx,
runtime,
_inspector_server: Some(server),
};
context.init()?;
Ok(context)
}
#[cfg(feature = "inspector")]
pub fn wait_for_inspector_disconnect(&mut self) {
self.runtime.wait_for_inspector_disconnect();
}
fn init(&mut self) -> Result<()> {
const DERIVATION_NIX: &str = include_str!("runtime/corepkgs/derivation.nix"); const DERIVATION_NIX: &str = include_str!("runtime/corepkgs/derivation.nix");
let source = Source::new_virtual( let source = Source::new_virtual(
"<nix/derivation-internal.nix>".into(), "<nix/derivation-internal.nix>".into(),
@@ -82,7 +120,7 @@ impl Context {
); );
let code = self.ctx.compile(source, None)?; let code = self.ctx.compile(source, None)?;
self.runtime self.runtime
.eval(format!("Nix.builtins.derivation = {}", code), &mut self.ctx)?; .eval(format!("Nix.builtins.derivation = {};Nix.builtins.storeDir=\"{}\"", code, self.get_store_dir()), &mut self.ctx)?;
Ok(()) Ok(())
} }

142
nix-js/src/derivation.rs Normal file
View File

@@ -0,0 +1,142 @@
use std::collections::{BTreeMap, BTreeSet};
pub struct OutputInfo {
pub path: String,
pub hash_algo: String,
pub hash: String,
}
pub struct DerivationData {
pub name: String,
pub outputs: BTreeMap<String, OutputInfo>,
pub input_drvs: BTreeMap<String, BTreeSet<String>>,
pub input_srcs: BTreeSet<String>,
pub platform: String,
pub builder: String,
pub args: Vec<String>,
pub env: BTreeMap<String, String>,
}
fn escape_string(s: &str) -> String {
let mut result = String::with_capacity(s.len() + 2);
result.push('"');
for c in s.chars() {
match c {
'"' => result.push_str("\\\""),
'\\' => result.push_str("\\\\"),
'\n' => result.push_str("\\n"),
'\r' => result.push_str("\\r"),
'\t' => result.push_str("\\t"),
_ => result.push(c),
}
}
result.push('"');
result
}
fn quote_string(s: &str) -> String {
format!("\"{}\"", s)
}
impl DerivationData {
pub fn generate_aterm(&self) -> String {
let mut output_entries = Vec::new();
for (name, info) in &self.outputs {
output_entries.push(format!(
"({},{},{},{})",
quote_string(name),
quote_string(&info.path),
quote_string(&info.hash_algo),
quote_string(&info.hash),
));
}
let outputs = output_entries.join(",");
let mut input_drv_entries = Vec::new();
for (drv_path, output_names) in &self.input_drvs {
let sorted_outs: Vec<String> = output_names.iter().map(|s| quote_string(s)).collect();
let out_list = format!("[{}]", sorted_outs.join(","));
input_drv_entries.push(format!("({},{})", quote_string(drv_path), out_list));
}
let input_drvs = input_drv_entries.join(",");
let input_srcs: Vec<String> = self.input_srcs.iter().map(|s| quote_string(s)).collect();
let input_srcs = input_srcs.join(",");
let args: Vec<String> = self.args.iter().map(|s| escape_string(s)).collect();
let args = args.join(",");
let mut env_entries: Vec<String> = Vec::new();
for (k, v) in &self.env {
env_entries.push(format!("({},{})", escape_string(k), escape_string(v)));
}
format!(
"Derive([{}],[{}],[{}],{},{},[{}],[{}])",
outputs,
input_drvs,
input_srcs,
quote_string(&self.platform),
escape_string(&self.builder),
args,
env_entries.join(","),
)
}
pub fn generate_aterm_modulo(&self, input_drv_hashes: &BTreeMap<String, String>) -> String {
let mut output_entries = Vec::new();
for (name, info) in &self.outputs {
output_entries.push(format!(
"({},{},{},{})",
quote_string(name),
quote_string(&info.path),
quote_string(&info.hash_algo),
quote_string(&info.hash),
));
}
let outputs = output_entries.join(",");
let mut input_drv_entries = Vec::new();
for (drv_hash, outputs_csv) in input_drv_hashes {
let mut sorted_outs: Vec<&str> = outputs_csv.split(',').collect();
sorted_outs.sort();
let out_list: Vec<String> = sorted_outs.iter().map(|s| quote_string(s)).collect();
let out_list = format!("[{}]", out_list.join(","));
input_drv_entries.push(format!("({},{})", quote_string(drv_hash), out_list));
}
let input_drvs = input_drv_entries.join(",");
let input_srcs: Vec<String> = self.input_srcs.iter().map(|s| quote_string(s)).collect();
let input_srcs = input_srcs.join(",");
let args: Vec<String> = self.args.iter().map(|s| escape_string(s)).collect();
let args = args.join(",");
let mut env_entries: Vec<String> = Vec::new();
for (k, v) in &self.env {
env_entries.push(format!("({},{})", escape_string(k), escape_string(v)));
}
format!(
"Derive([{}],[{}],[{}],{},{},[{}],[{}])",
outputs,
input_drvs,
input_srcs,
quote_string(&self.platform),
escape_string(&self.builder),
args,
env_entries.join(","),
)
}
pub fn collect_references(&self) -> Vec<String> {
let mut refs = BTreeSet::new();
for src in &self.input_srcs {
refs.insert(src.clone());
}
for drv_path in self.input_drvs.keys() {
refs.insert(drv_path.clone());
}
refs.into_iter().collect()
}
}

View File

@@ -6,6 +6,7 @@ pub mod logging;
pub mod value; pub mod value;
mod codegen; mod codegen;
mod derivation;
mod downgrade; mod downgrade;
mod fetcher; mod fetcher;
mod ir; mod ir;
@@ -13,6 +14,7 @@ mod nar;
mod nix_utils; mod nix_utils;
mod runtime; mod runtime;
mod store; mod store;
mod string_context;
#[global_allocator] #[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

View File

@@ -1,15 +1,73 @@
use std::process::exit;
use anyhow::Result; use anyhow::Result;
use clap::{Parser, Subcommand};
use hashbrown::HashSet; use hashbrown::HashSet;
use nix_js::context::Context; use nix_js::context::Context;
use nix_js::error::Source; use nix_js::error::Source;
use rustyline::DefaultEditor; use rustyline::DefaultEditor;
use rustyline::error::ReadlineError; use rustyline::error::ReadlineError;
fn main() -> Result<()> { #[derive(Parser)]
nix_js::logging::init_logging(); #[command(name = "nix-js", about = "Nix expression evaluator")]
struct Cli {
#[cfg(feature = "inspector")]
#[arg(long, value_name = "HOST:PORT", num_args = 0..=1, default_missing_value = "127.0.0.1:9229")]
inspect: Option<String>,
#[cfg(feature = "inspector")]
#[arg(long, value_name = "HOST:PORT", num_args = 0..=1, default_missing_value = "127.0.0.1:9229")]
inspect_brk: Option<String>,
#[command(subcommand)]
command: Command,
}
#[derive(Subcommand)]
enum Command {
Eval { expr: String },
Repl,
}
fn create_context(#[cfg(feature = "inspector")] cli: &Cli) -> Result<Context> {
#[cfg(feature = "inspector")]
{
let (addr_str, wait) = if let Some(ref addr) = cli.inspect_brk {
(Some(addr.as_str()), true)
} else if let Some(ref addr) = cli.inspect {
(Some(addr.as_str()), false)
} else {
(None, false)
};
if let Some(addr_str) = addr_str {
let addr: std::net::SocketAddr = addr_str
.parse()
.map_err(|e| anyhow::anyhow!("invalid inspector address '{}': {}", addr_str, e))?;
return Ok(Context::new_with_inspector(addr, wait)?);
}
}
Ok(Context::new()?)
}
fn run_eval(context: &mut Context, expr: String) -> Result<()> {
let src = Source::new_eval(expr)?;
match context.eval(src) {
Ok(value) => {
println!("{value}");
}
Err(err) => {
eprintln!("{:?}", miette::Report::new(*err));
exit(1);
}
};
#[cfg(feature = "inspector")]
context.wait_for_inspector_disconnect();
Ok(())
}
fn run_repl(context: &mut Context) -> Result<()> {
let mut rl = DefaultEditor::new()?; let mut rl = DefaultEditor::new()?;
let mut context = Context::new()?;
let mut scope = HashSet::new(); let mut scope = HashSet::new();
const RE: ere::Regex<3> = ere::compile_regex!("^[ \t]*([a-zA-Z_][a-zA-Z0-9_'-]*)[ \t]*(.*)$"); const RE: ere::Regex<3> = ere::compile_regex!("^[ \t]*([a-zA-Z_][a-zA-Z0-9_'-]*)[ \t]*(.*)$");
loop { loop {
@@ -61,3 +119,19 @@ fn main() -> Result<()> {
} }
Ok(()) Ok(())
} }
fn main() -> Result<()> {
nix_js::logging::init_logging();
let cli = Cli::parse();
let mut context = create_context(
#[cfg(feature = "inspector")]
&cli,
)?;
match cli.command {
Command::Eval { expr } => run_eval(&mut context, expr),
Command::Repl => run_repl(&mut context),
}
}

View File

@@ -2,12 +2,16 @@ use std::borrow::Cow;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::path::Path; use std::path::Path;
#[cfg(feature = "inspector")]
use deno_core::PollEventLoopOptions;
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8}; use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
use crate::error::{Error, Result, Source}; use crate::error::{Error, Result, Source};
use crate::store::DaemonStore; use crate::store::DaemonStore;
use crate::value::{AttrSet, List, Symbol, Value}; use crate::value::{AttrSet, List, Symbol, Value};
#[cfg(feature = "inspector")]
pub(crate) mod inspector;
mod ops; mod ops;
use ops::*; use ops::*;
@@ -46,31 +50,29 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
let mut ops = vec![ let mut ops = vec![
op_import::<Ctx>(), op_import::<Ctx>(),
op_scoped_import::<Ctx>(), op_scoped_import::<Ctx>(),
op_resolve_path(),
op_read_file(), op_read_file(),
op_read_file_type(), op_read_file_type(),
op_read_dir(), op_read_dir(),
op_path_exists(), op_path_exists(),
op_resolve_path(), op_walk_dir(),
op_sha256_hex(),
op_make_placeholder(), op_make_placeholder(),
op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(),
op_parse_hash(),
op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(),
op_store_path::<Ctx>(), op_store_path::<Ctx>(),
op_convert_hash(),
op_hash_string(),
op_hash_file(),
op_parse_hash(),
op_add_path::<Ctx>(),
op_add_filtered_path::<Ctx>(),
op_decode_span::<Ctx>(),
op_to_file::<Ctx>(), op_to_file::<Ctx>(),
op_write_derivation::<Ctx>(),
op_read_derivation_outputs(),
op_compute_fs_closure(),
op_copy_path_to_store::<Ctx>(), op_copy_path_to_store::<Ctx>(),
op_get_env(), op_get_env(),
op_walk_dir(),
op_add_filtered_path::<Ctx>(),
op_match(), op_match(),
op_split(), op_split(),
op_from_json(), op_from_json(),
op_from_toml(), op_from_toml(),
op_finalize_derivation::<Ctx>(),
]; ];
ops.extend(crate::fetcher::register_ops::<Ctx>()); ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -86,7 +88,6 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
mod private { mod private {
use deno_error::js_error_wrapper; use deno_error::js_error_wrapper;
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub struct SimpleErrorWrapper(pub(crate) String); pub struct SimpleErrorWrapper(pub(crate) String);
impl std::fmt::Display for SimpleErrorWrapper { impl std::fmt::Display for SimpleErrorWrapper {
@@ -113,6 +114,9 @@ pub(crate) use private::NixRuntimeError;
pub(crate) struct Runtime<Ctx: RuntimeContext> { pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime, js_runtime: JsRuntime,
rt: tokio::runtime::Runtime,
#[cfg(feature = "inspector")]
wait_for_inspector: bool,
is_thunk_symbol: v8::Global<v8::Symbol>, is_thunk_symbol: v8::Global<v8::Symbol>,
primop_metadata_symbol: v8::Global<v8::Symbol>, primop_metadata_symbol: v8::Global<v8::Symbol>,
has_context_symbol: v8::Global<v8::Symbol>, has_context_symbol: v8::Global<v8::Symbol>,
@@ -121,14 +125,21 @@ pub(crate) struct Runtime<Ctx: RuntimeContext> {
_marker: PhantomData<Ctx>, _marker: PhantomData<Ctx>,
} }
#[cfg(feature = "inspector")]
#[derive(Debug, Clone, Copy, Default)]
pub(crate) struct InspectorOptions {
pub(crate) enable: bool,
pub(crate) wait: bool,
}
impl<Ctx: RuntimeContext> Runtime<Ctx> { impl<Ctx: RuntimeContext> Runtime<Ctx> {
pub(crate) fn new() -> Result<Self> { pub(crate) fn new(
#[cfg(feature = "inspector")] inspector_options: InspectorOptions,
) -> Result<Self> {
use std::sync::Once; use std::sync::Once;
// Initialize V8 once
static INIT: Once = Once::new(); static INIT: Once = Once::new();
INIT.call_once(|| { INIT.call_once(|| {
// First flag is always not recognized
assert_eq!( assert_eq!(
deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]), deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]),
[""] [""]
@@ -138,10 +149,14 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
let mut js_runtime = JsRuntime::new(RuntimeOptions { let mut js_runtime = JsRuntime::new(RuntimeOptions {
extensions: vec![runtime_extension::<Ctx>()], extensions: vec![runtime_extension::<Ctx>()],
#[cfg(feature = "inspector")]
inspector: inspector_options.enable,
is_main: true,
..Default::default() ..Default::default()
}); });
js_runtime.op_state().borrow_mut().put(RegexCache::new()); js_runtime.op_state().borrow_mut().put(RegexCache::new());
js_runtime.op_state().borrow_mut().put(DrvHashCache::new());
let ( let (
is_thunk_symbol, is_thunk_symbol,
@@ -156,6 +171,12 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
Ok(Self { Ok(Self {
js_runtime, js_runtime,
rt: tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("failed to build tokio runtime"),
#[cfg(feature = "inspector")]
wait_for_inspector: inspector_options.wait,
is_thunk_symbol, is_thunk_symbol,
primop_metadata_symbol, primop_metadata_symbol,
has_context_symbol, has_context_symbol,
@@ -165,10 +186,32 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
}) })
} }
#[cfg(feature = "inspector")]
pub(crate) fn inspector(&self) -> std::rc::Rc<deno_core::JsRuntimeInspector> {
self.js_runtime.inspector()
}
#[cfg(feature = "inspector")]
pub(crate) fn wait_for_inspector_disconnect(&mut self) {
let _ = self.rt
.block_on(self.js_runtime.run_event_loop(PollEventLoopOptions {
wait_for_inspector: true,
..Default::default()
}));
}
pub(crate) fn eval(&mut self, script: String, ctx: &mut Ctx) -> Result<Value> { pub(crate) fn eval(&mut self, script: String, ctx: &mut Ctx) -> Result<Value> {
let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) }; let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) };
self.js_runtime.op_state().borrow_mut().put(ctx); self.js_runtime.op_state().borrow_mut().put(ctx);
#[cfg(feature = "inspector")]
if self.wait_for_inspector {
self.js_runtime
.inspector()
.wait_for_session_and_break_on_next_statement();
} else {
self.js_runtime.inspector().wait_for_session();
}
let global_value = self let global_value = self
.js_runtime .js_runtime
.execute_script("<eval>", script) .execute_script("<eval>", script)
@@ -179,6 +222,22 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
crate::error::parse_js_error(error, ctx) crate::error::parse_js_error(error, ctx)
})?; })?;
let global_value = self
.rt
.block_on(self.js_runtime.resolve(global_value))
.map_err(|error| {
let op_state = self.js_runtime.op_state();
let op_state_borrow = op_state.borrow();
let ctx: &Ctx = op_state_borrow.get_ctx();
crate::error::parse_js_error(error, ctx)
})?;
#[cfg(feature = "inspector")]
{
let _ = self
.rt
.block_on(self.js_runtime.run_event_loop(Default::default()));
}
// Retrieve scope from JsRuntime // Retrieve scope from JsRuntime
deno_core::scope!(scope, self.js_runtime); deno_core::scope!(scope, self.js_runtime);

View File

@@ -0,0 +1,491 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Alias for the future `!` type.
use core::convert::Infallible as Never;
use deno_core::InspectorMsg;
use deno_core::InspectorSessionChannels;
use deno_core::InspectorSessionKind;
use deno_core::InspectorSessionProxy;
use deno_core::JsRuntimeInspector;
use deno_core::anyhow::Context;
use deno_core::futures::channel::mpsc;
use deno_core::futures::channel::mpsc::UnboundedReceiver;
use deno_core::futures::channel::mpsc::UnboundedSender;
use deno_core::futures::channel::oneshot;
use deno_core::futures::prelude::*;
use deno_core::futures::stream::StreamExt;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::unsync::spawn;
use deno_core::url::Url;
use fastwebsockets::Frame;
use fastwebsockets::OpCode;
use fastwebsockets::WebSocket;
use hyper::body::Bytes;
use hyper_util::rt::TokioIo;
use std::cell::RefCell;
use std::collections::HashMap;
use std::net::SocketAddr;
use std::pin::pin;
use std::process;
use std::rc::Rc;
use std::task::Poll;
use std::thread;
use tokio::net::TcpListener;
use tokio::sync::broadcast;
use uuid::Uuid;
/// Websocket server that is used to proxy connections from
/// devtools to the inspector.
pub struct InspectorServer {
pub host: SocketAddr,
register_inspector_tx: UnboundedSender<InspectorInfo>,
shutdown_server_tx: Option<broadcast::Sender<()>>,
thread_handle: Option<thread::JoinHandle<()>>,
}
impl InspectorServer {
pub fn new(host: SocketAddr, name: &'static str) -> Result<Self, anyhow::Error> {
let (register_inspector_tx, register_inspector_rx) = mpsc::unbounded::<InspectorInfo>();
let (shutdown_server_tx, shutdown_server_rx) = broadcast::channel(1);
let tcp_listener = std::net::TcpListener::bind(host)
.with_context(|| format!("Failed to bind inspector server socket at {}", host))?;
tcp_listener.set_nonblocking(true)?;
let thread_handle = thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("failed to build tokio runtime");
let local = tokio::task::LocalSet::new();
local.block_on(
&rt,
server(
tcp_listener,
register_inspector_rx,
shutdown_server_rx,
name,
),
)
});
Ok(Self {
host,
register_inspector_tx,
shutdown_server_tx: Some(shutdown_server_tx),
thread_handle: Some(thread_handle),
})
}
pub fn register_inspector(
&self,
module_url: String,
inspector: Rc<JsRuntimeInspector>,
wait_for_session: bool,
) {
let session_sender = inspector.get_session_sender();
let deregister_rx = inspector.add_deregister_handler();
let info = InspectorInfo::new(
self.host,
session_sender,
deregister_rx,
module_url,
wait_for_session,
);
self.register_inspector_tx
.unbounded_send(info)
.expect("unreachable");
}
}
impl Drop for InspectorServer {
fn drop(&mut self) {
if let Some(shutdown_server_tx) = self.shutdown_server_tx.take() {
shutdown_server_tx
.send(())
.expect("unable to send shutdown signal");
}
if let Some(thread_handle) = self.thread_handle.take() {
thread_handle.join().expect("unable to join thread");
}
}
}
fn handle_ws_request(
req: http::Request<hyper::body::Incoming>,
inspector_map_rc: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
let (parts, body) = req.into_parts();
let req = http::Request::from_parts(parts, ());
let maybe_uuid = req
.uri()
.path()
.strip_prefix("/ws/")
.and_then(|s| Uuid::parse_str(s).ok());
let Some(uuid) = maybe_uuid else {
return http::Response::builder()
.status(http::StatusCode::BAD_REQUEST)
.body(Box::new(Bytes::from("Malformed inspector UUID").into()));
};
// run in a block to not hold borrow to `inspector_map` for too long
let new_session_tx = {
let inspector_map = inspector_map_rc.borrow();
let maybe_inspector_info = inspector_map.get(&uuid);
let Some(info) = maybe_inspector_info else {
return http::Response::builder()
.status(http::StatusCode::NOT_FOUND)
.body(Box::new(Bytes::from("Invalid inspector UUID").into()));
};
info.new_session_tx.clone()
};
let (parts, _) = req.into_parts();
let mut req = http::Request::from_parts(parts, body);
let Ok((resp, upgrade_fut)) = fastwebsockets::upgrade::upgrade(&mut req) else {
return http::Response::builder()
.status(http::StatusCode::BAD_REQUEST)
.body(Box::new(
Bytes::from("Not a valid Websocket Request").into(),
));
};
// spawn a task that will wait for websocket connection and then pump messages between
// the socket and inspector proxy
spawn(async move {
let websocket = match upgrade_fut.await {
Ok(w) => w,
Err(err) => {
eprintln!(
"Inspector server failed to upgrade to WS connection: {:?}",
err
);
return;
}
};
// The 'outbound' channel carries messages sent to the websocket.
let (outbound_tx, outbound_rx) = mpsc::unbounded();
// The 'inbound' channel carries messages received from the websocket.
let (inbound_tx, inbound_rx) = mpsc::unbounded();
let inspector_session_proxy = InspectorSessionProxy {
channels: InspectorSessionChannels::Regular {
tx: outbound_tx,
rx: inbound_rx,
},
kind: InspectorSessionKind::NonBlocking {
wait_for_disconnect: true,
},
};
eprintln!("Debugger session started.");
let _ = new_session_tx.unbounded_send(inspector_session_proxy);
pump_websocket_messages(websocket, inbound_tx, outbound_rx).await;
});
let (parts, _body) = resp.into_parts();
let resp = http::Response::from_parts(parts, Box::new(http_body_util::Full::new(Bytes::new())));
Ok(resp)
}
fn handle_json_request(
inspector_map: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
host: Option<String>,
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
let data = inspector_map
.borrow()
.values()
.map(move |info| info.get_json_metadata(&host))
.collect::<Vec<_>>();
let body: http_body_util::Full<Bytes> =
Bytes::from(serde_json::to_string(&data).expect("unreachable")).into();
http::Response::builder()
.status(http::StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.body(Box::new(body))
}
fn handle_json_version_request(
version_response: Value,
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
let body = Box::new(http_body_util::Full::from(
serde_json::to_string(&version_response).expect("unreachable"),
));
http::Response::builder()
.status(http::StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.body(body)
}
async fn server(
listener: std::net::TcpListener,
register_inspector_rx: UnboundedReceiver<InspectorInfo>,
shutdown_server_rx: broadcast::Receiver<()>,
name: &str,
) {
let inspector_map_ = Rc::new(RefCell::new(HashMap::<Uuid, InspectorInfo>::new()));
let inspector_map = Rc::clone(&inspector_map_);
let register_inspector_handler =
listen_for_new_inspectors(register_inspector_rx, inspector_map.clone()).boxed_local();
let inspector_map = Rc::clone(&inspector_map_);
let deregister_inspector_handler = future::poll_fn(|cx| {
inspector_map
.borrow_mut()
.retain(|_, info| info.deregister_rx.poll_unpin(cx) == Poll::Pending);
Poll::<Never>::Pending
})
.boxed_local();
let json_version_response = json!({
"Browser": name,
"Protocol-Version": "1.3",
"V8-Version": deno_core::v8::VERSION_STRING,
});
// Create the server manually so it can use the Local Executor
let listener = match TcpListener::from_std(listener) {
Ok(l) => l,
Err(err) => {
eprintln!("Cannot create async listener from std listener: {:?}", err);
return;
}
};
let server_handler = async move {
loop {
let mut rx = shutdown_server_rx.resubscribe();
let mut shutdown_rx = pin!(rx.recv());
let mut accept = pin!(listener.accept());
let stream = tokio::select! {
accept_result = &mut accept => {
match accept_result {
Ok((s, _)) => s,
Err(err) => {
eprintln!("Failed to accept inspector connection: {:?}", err);
continue;
}
}
},
_ = &mut shutdown_rx => {
break;
}
};
let io = TokioIo::new(stream);
let inspector_map = Rc::clone(&inspector_map_);
let json_version_response = json_version_response.clone();
let mut shutdown_server_rx = shutdown_server_rx.resubscribe();
let service =
hyper::service::service_fn(move |req: http::Request<hyper::body::Incoming>| {
future::ready({
// If the host header can make a valid URL, use it
let host = req
.headers()
.get("host")
.and_then(|host| host.to_str().ok())
.and_then(|host| Url::parse(&format!("http://{host}")).ok())
.and_then(|url| match (url.host(), url.port()) {
(Some(host), Some(port)) => Some(format!("{host}:{port}")),
(Some(host), None) => Some(format!("{host}")),
_ => None,
});
match (req.method(), req.uri().path()) {
(&http::Method::GET, path) if path.starts_with("/ws/") => {
handle_ws_request(req, Rc::clone(&inspector_map))
}
(&http::Method::GET, "/json/version") => {
handle_json_version_request(json_version_response.clone())
}
(&http::Method::GET, "/json") => {
handle_json_request(Rc::clone(&inspector_map), host)
}
(&http::Method::GET, "/json/list") => {
handle_json_request(Rc::clone(&inspector_map), host)
}
_ => http::Response::builder()
.status(http::StatusCode::NOT_FOUND)
.body(Box::new(http_body_util::Full::new(Bytes::from(
"Not Found",
)))),
}
})
});
deno_core::unsync::spawn(async move {
let server = hyper::server::conn::http1::Builder::new();
let mut conn = pin!(server.serve_connection(io, service).with_upgrades());
let mut shutdown_rx = pin!(shutdown_server_rx.recv());
tokio::select! {
result = conn.as_mut() => {
if let Err(err) = result {
eprintln!("Failed to serve connection: {:?}", err);
}
},
_ = &mut shutdown_rx => {
conn.as_mut().graceful_shutdown();
let _ = conn.await;
}
}
});
}
}
.boxed_local();
tokio::select! {
_ = register_inspector_handler => {},
_ = deregister_inspector_handler => unreachable!(),
_ = server_handler => {},
}
}
async fn listen_for_new_inspectors(
mut register_inspector_rx: UnboundedReceiver<InspectorInfo>,
inspector_map: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
) {
while let Some(info) = register_inspector_rx.next().await {
eprintln!(
"Debugger listening on {}",
info.get_websocket_debugger_url(&info.host.to_string())
);
eprintln!("Visit chrome://inspect to connect to the debugger.");
if info.wait_for_session {
eprintln!("Deno is waiting for debugger to connect.");
}
if inspector_map.borrow_mut().insert(info.uuid, info).is_some() {
panic!("Inspector UUID already in map");
}
}
}
/// The pump future takes care of forwarding messages between the websocket
/// and channels. It resolves when either side disconnects, ignoring any
/// errors.
///
/// The future proxies messages sent and received on a WebSocket
/// to a UnboundedSender/UnboundedReceiver pair. We need these "unbounded" channel ends to sidestep
/// Tokio's task budget, which causes issues when JsRuntimeInspector::poll_sessions()
/// needs to block the thread because JavaScript execution is paused.
///
/// This works because UnboundedSender/UnboundedReceiver are implemented in the
/// 'futures' crate, therefore they can't participate in Tokio's cooperative
/// task yielding.
async fn pump_websocket_messages(
mut websocket: WebSocket<TokioIo<hyper::upgrade::Upgraded>>,
inbound_tx: UnboundedSender<String>,
mut outbound_rx: UnboundedReceiver<InspectorMsg>,
) {
'pump: loop {
tokio::select! {
Some(msg) = outbound_rx.next() => {
let msg = Frame::text(msg.content.into_bytes().into());
let _ = websocket.write_frame(msg).await;
}
Ok(msg) = websocket.read_frame() => {
match msg.opcode {
OpCode::Text => {
if let Ok(s) = String::from_utf8(msg.payload.to_vec()) {
let _ = inbound_tx.unbounded_send(s);
}
}
OpCode::Close => {
// Users don't care if there was an error coming from debugger,
// just about the fact that debugger did disconnect.
eprintln!("Debugger session ended");
break 'pump;
}
_ => {
// Ignore other messages.
}
}
}
else => {
break 'pump;
}
}
}
}
/// Inspector information that is sent from the isolate thread to the server
/// thread when a new inspector is created.
pub struct InspectorInfo {
pub host: SocketAddr,
pub uuid: Uuid,
pub thread_name: Option<String>,
pub new_session_tx: UnboundedSender<InspectorSessionProxy>,
pub deregister_rx: oneshot::Receiver<()>,
pub url: String,
pub wait_for_session: bool,
}
impl InspectorInfo {
pub fn new(
host: SocketAddr,
new_session_tx: mpsc::UnboundedSender<InspectorSessionProxy>,
deregister_rx: oneshot::Receiver<()>,
url: String,
wait_for_session: bool,
) -> Self {
Self {
host,
uuid: Uuid::new_v4(),
thread_name: thread::current().name().map(|n| n.to_owned()),
new_session_tx,
deregister_rx,
url,
wait_for_session,
}
}
fn get_json_metadata(&self, host: &Option<String>) -> Value {
let host_listen = format!("{}", self.host);
let host = host.as_ref().unwrap_or(&host_listen);
json!({
"description": "deno",
"devtoolsFrontendUrl": self.get_frontend_url(host),
"faviconUrl": "https://deno.land/favicon.ico",
"id": self.uuid.to_string(),
"title": self.get_title(),
"type": "node",
"url": self.url.to_string(),
"webSocketDebuggerUrl": self.get_websocket_debugger_url(host),
})
}
pub fn get_websocket_debugger_url(&self, host: &str) -> String {
format!("ws://{}/ws/{}", host, &self.uuid)
}
fn get_frontend_url(&self, host: &str) -> String {
format!(
"devtools://devtools/bundled/js_app.html?ws={}/ws/{}&experiments=true&v8only=true",
host, &self.uuid
)
}
fn get_title(&self) -> String {
format!(
"deno{} [pid: {}]",
self.thread_name
.as_ref()
.map(|n| format!(" - {n}"))
.unwrap_or_default(),
process::id(),
)
}
}

View File

@@ -244,12 +244,6 @@ pub(super) fn op_resolve_path(
Ok(normalized.to_string_lossy().to_string()) Ok(normalized.to_string_lossy().to_string())
} }
#[deno_core::op2]
#[string]
pub(super) fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_utils::sha256_hex(data.as_bytes())
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
pub(super) fn op_make_placeholder(#[string] output: String) -> String { pub(super) fn op_make_placeholder(#[string] output: String) -> String {
@@ -312,20 +306,6 @@ fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
(line, col) (line, col)
} }
#[deno_core::op2]
#[string]
pub(super) fn op_make_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] ty: String,
#[string] hash_hex: String,
#[string] name: String,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_utils::make_store_path(store_dir, &ty, &hash_hex, &name)
}
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
pub(super) struct ParsedHash { pub(super) struct ParsedHash {
hex: String, hex: String,
@@ -359,34 +339,6 @@ pub(super) fn op_parse_hash(
}) })
} }
#[deno_core::op2]
#[string]
pub(super) fn op_make_fixed_output_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_algo: String,
#[string] hash: String,
#[string] hash_mode: String,
#[string] name: String,
) -> String {
use sha2::{Digest, Sha256};
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_utils::make_store_path(store_dir, "source", &hash, &name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
let mut hasher = Sha256::new();
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_utils::make_store_path(store_dir, "output:out", &inner_hash, &name)
}
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
pub(super) fn op_add_path<Ctx: RuntimeContext>( pub(super) fn op_add_path<Ctx: RuntimeContext>(
@@ -504,269 +456,6 @@ pub(super) fn op_to_file<Ctx: RuntimeContext>(
Ok(store_path) Ok(store_path)
} }
#[deno_core::op2]
#[string]
pub(super) fn op_write_derivation<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] drv_name: String,
#[string] aterm: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixRuntimeError> {
tracing::debug!(
"op_write_derivation: name={}.drv, references={:?}",
drv_name,
references
);
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_text_to_store(&format!("{}.drv", drv_name), &aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[serde]
pub(super) fn op_read_derivation_outputs(
#[string] drv_path: String,
) -> std::result::Result<Vec<String>, NixRuntimeError> {
let content = std::fs::read_to_string(&drv_path).map_err(|e| {
NixRuntimeError::from(format!("failed to read derivation {}: {}", drv_path, e))
})?;
let outputs = parse_derivation_outputs(&content)
.ok_or_else(|| NixRuntimeError::from(format!("failed to parse derivation {}", drv_path)))?;
Ok(outputs)
}
fn parse_derivation_outputs(aterm: &str) -> Option<Vec<String>> {
let aterm = aterm.strip_prefix("Derive([")?;
let outputs_end = aterm.find("],[")?;
let outputs_section = &aterm[..outputs_end];
let mut outputs = Vec::new();
let mut pos = 0;
let bytes = outputs_section.as_bytes();
while pos < bytes.len() {
while pos < bytes.len() && bytes[pos] != b'(' {
pos += 1;
}
if pos >= bytes.len() {
break;
}
pos += 1;
if pos >= bytes.len() || bytes[pos] != b'"' {
break;
}
pos += 1;
let name_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
pos += 1;
}
let name = std::str::from_utf8(&bytes[name_start..pos]).ok()?;
outputs.push(name.to_string());
while pos < bytes.len() && bytes[pos] != b')' {
pos += 1;
}
pos += 1;
}
Some(outputs)
}
#[derive(serde::Serialize)]
pub(super) struct DerivationInputs {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
let aterm = aterm.strip_prefix("Derive([")?;
let mut bracket_count = 1;
let mut pos = 0;
let bytes = aterm.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
_ => {}
}
pos += 1;
}
if bracket_count != 0 {
return None;
}
let rest = &aterm[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_drvs = Vec::new();
let mut bracket_count = 1;
let mut start = 0;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'(' if bracket_count == 1 => {
start = pos;
}
b')' if bracket_count == 1 => {
let entry = &rest[start + 1..pos];
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
input_drvs.push((drv_path, outputs));
}
}
_ => {}
}
pos += 1;
}
let rest = &rest[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_srcs = Vec::new();
bracket_count = 1;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'"' if bracket_count == 1 => {
pos += 1;
let src_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
pos += 2;
} else {
pos += 1;
}
}
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
input_srcs.push(src.to_string());
}
_ => {}
}
pos += 1;
}
Some(DerivationInputs {
input_drvs,
input_srcs,
})
}
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
let entry = entry.strip_prefix('"')?;
let quote_end = entry.find('"')?;
let drv_path = entry[..quote_end].to_string();
let rest = &entry[quote_end + 1..];
let rest = rest.strip_prefix(",[")?;
let rest = rest.strip_suffix(']')?;
let mut outputs = Vec::new();
for part in rest.split(',') {
let part = part.trim();
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
outputs.push(name.to_string());
}
}
Some((drv_path, outputs))
}
#[derive(serde::Serialize)]
pub(super) struct FsClosureResult {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
#[deno_core::op2]
#[serde]
pub(super) fn op_compute_fs_closure(
#[string] drv_path: String,
) -> std::result::Result<FsClosureResult, NixRuntimeError> {
use std::collections::{BTreeMap, BTreeSet, VecDeque};
let mut all_input_srcs: BTreeSet<String> = BTreeSet::new();
let mut all_input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut queue: VecDeque<String> = VecDeque::new();
let mut visited: BTreeSet<String> = BTreeSet::new();
queue.push_back(drv_path);
while let Some(current_path) = queue.pop_front() {
if visited.contains(&current_path) {
continue;
}
visited.insert(current_path.clone());
all_input_srcs.insert(current_path.clone());
if !current_path.ends_with(".drv") {
continue;
}
let content = match std::fs::read_to_string(&current_path) {
Ok(c) => c,
Err(e) => {
return Err(NixRuntimeError::from(format!(
"failed to read derivation {}: {}",
current_path, e
)));
}
};
let inputs = parse_derivation_inputs(&content).ok_or_else(|| {
NixRuntimeError::from(format!("failed to parse derivation {}", current_path))
})?;
for src in inputs.input_srcs {
all_input_srcs.insert(src.clone());
if !visited.contains(&src) {
queue.push_back(src);
}
}
for (dep_drv, outputs) in inputs.input_drvs {
all_input_srcs.insert(dep_drv.clone());
let entry = all_input_drvs.entry(dep_drv.clone()).or_default();
for output in outputs {
entry.insert(output);
}
if !visited.contains(&dep_drv) {
queue.push_back(dep_drv);
}
}
}
let input_drvs: Vec<(String, Vec<String>)> = all_input_drvs
.into_iter()
.map(|(k, v)| (k, v.into_iter().collect()))
.collect();
let input_srcs: Vec<String> = all_input_srcs.into_iter().collect();
Ok(FsClosureResult {
input_drvs,
input_srcs,
})
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>( pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
@@ -1113,6 +802,19 @@ fn json_to_nix(value: serde_json::Value) -> NixJsonValue {
} }
} }
#[derive(Debug, Default)]
pub(super) struct DrvHashCache {
cache: HashMap<String, String>,
}
impl DrvHashCache {
pub(super) fn new() -> Self {
Self {
cache: HashMap::new(),
}
}
}
fn toml_to_nix(value: toml::Value) -> std::result::Result<NixJsonValue, NixRuntimeError> { fn toml_to_nix(value: toml::Value) -> std::result::Result<NixJsonValue, NixRuntimeError> {
match value { match value {
toml::Value::String(s) => Ok(NixJsonValue::Str(s)), toml::Value::String(s) => Ok(NixJsonValue::Str(s)),
@@ -1153,3 +855,377 @@ pub(super) fn op_from_toml(
.map_err(|e| NixRuntimeError::from(format!("while parsing TOML: {e}")))?; .map_err(|e| NixRuntimeError::from(format!("while parsing TOML: {e}")))?;
toml_to_nix(parsed) toml_to_nix(parsed)
} }
#[derive(serde::Deserialize)]
pub(super) struct FixedOutputInput {
#[serde(rename = "hashAlgo")]
hash_algo: String,
hash: String,
#[serde(rename = "hashMode")]
hash_mode: String,
}
#[derive(serde::Deserialize)]
pub(super) struct FinalizeDerivationInput {
name: String,
builder: String,
platform: String,
outputs: Vec<String>,
args: Vec<String>,
env: Vec<(String, String)>,
context: Vec<String>,
#[serde(rename = "fixedOutput")]
fixed_output: Option<FixedOutputInput>,
}
#[derive(serde::Serialize)]
pub(super) struct FinalizeDerivationOutput {
#[serde(rename = "drvPath")]
drv_path: String,
outputs: Vec<(String, String)>,
}
fn output_path_name(drv_name: &str, output: &str) -> String {
if output == "out" {
drv_name.to_string()
} else {
format!("{}-{}", drv_name, output)
}
}
#[deno_core::op2]
#[serde]
pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
state: &mut OpState,
#[serde] input: FinalizeDerivationInput,
) -> std::result::Result<FinalizeDerivationOutput, NixRuntimeError> {
use crate::derivation::{DerivationData, OutputInfo};
use crate::string_context::extract_input_drvs_and_srcs;
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir().to_string();
let (input_drvs, input_srcs) =
extract_input_drvs_and_srcs(&input.context).map_err(NixRuntimeError::from)?;
let env: std::collections::BTreeMap<String, String> = input.env.into_iter().collect();
let drv_path;
let output_paths: Vec<(String, String)>;
if let Some(fixed) = &input.fixed_output {
let path_name = output_path_name(&input.name, "out");
let out_path = crate::runtime::ops::op_make_fixed_output_path_impl(
&store_dir,
&fixed.hash_algo,
&fixed.hash,
&fixed.hash_mode,
&path_name,
);
let hash_algo_prefix = if fixed.hash_mode == "recursive" {
"r:"
} else {
""
};
let mut final_outputs = std::collections::BTreeMap::new();
final_outputs.insert(
"out".to_string(),
OutputInfo {
path: out_path.clone(),
hash_algo: format!("{}{}", hash_algo_prefix, fixed.hash_algo),
hash: fixed.hash.clone(),
},
);
let mut final_env = env;
final_env.insert("out".to_string(), out_path.clone());
let drv = DerivationData {
name: input.name.clone(),
outputs: final_outputs,
input_drvs: input_drvs.clone(),
input_srcs: input_srcs.clone(),
platform: input.platform,
builder: input.builder,
args: input.args,
env: final_env,
};
let final_aterm = drv.generate_aterm();
let references = drv.collect_references();
drv_path = store
.add_text_to_store(&format!("{}.drv", input.name), &final_aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
let fixed_hash_fingerprint = format!(
"fixed:out:{}{}:{}:{}",
hash_algo_prefix, fixed.hash_algo, fixed.hash, out_path,
);
let fixed_modulo_hash = crate::nix_utils::sha256_hex(fixed_hash_fingerprint.as_bytes());
let cache = state.borrow_mut::<DrvHashCache>();
cache.cache.insert(drv_path.clone(), fixed_modulo_hash);
output_paths = vec![("out".to_string(), out_path)];
} else {
let masked_outputs: std::collections::BTreeMap<String, OutputInfo> = input
.outputs
.iter()
.map(|o| {
(
o.clone(),
OutputInfo {
path: String::new(),
hash_algo: String::new(),
hash: String::new(),
},
)
})
.collect();
let mut masked_env = env.clone();
for output in &input.outputs {
masked_env.insert(output.clone(), String::new());
}
let masked_drv = DerivationData {
name: input.name.clone(),
outputs: masked_outputs,
input_drvs: input_drvs.clone(),
input_srcs: input_srcs.clone(),
platform: input.platform.clone(),
builder: input.builder.clone(),
args: input.args.clone(),
env: masked_env,
};
let mut input_drv_hashes = std::collections::BTreeMap::new();
{
let cache = state.borrow::<DrvHashCache>();
for (dep_drv_path, output_names) in &input_drvs {
let cached_hash = cache.cache.get(dep_drv_path).ok_or_else(|| {
NixRuntimeError::from(format!(
"Missing modulo hash for input derivation: {}",
dep_drv_path
))
})?;
let mut sorted_outs: Vec<&String> = output_names.iter().collect();
sorted_outs.sort();
let outputs_csv: Vec<&str> = sorted_outs.iter().map(|s| s.as_str()).collect();
input_drv_hashes.insert(cached_hash.clone(), outputs_csv.join(","));
}
}
let masked_aterm = masked_drv.generate_aterm_modulo(&input_drv_hashes);
let drv_modulo_hash = crate::nix_utils::sha256_hex(masked_aterm.as_bytes());
let mut final_outputs = std::collections::BTreeMap::new();
let mut final_env = env;
let mut result_output_paths = Vec::new();
for output_name in &input.outputs {
let path_name = output_path_name(&input.name, output_name);
let out_path = crate::nix_utils::make_store_path(
&store_dir,
&format!("output:{}", output_name),
&drv_modulo_hash,
&path_name,
);
final_outputs.insert(
output_name.clone(),
OutputInfo {
path: out_path.clone(),
hash_algo: String::new(),
hash: String::new(),
},
);
final_env.insert(output_name.clone(), out_path.clone());
result_output_paths.push((output_name.clone(), out_path));
}
let final_drv = DerivationData {
name: input.name,
outputs: final_outputs,
input_drvs,
input_srcs,
platform: input.platform,
builder: input.builder,
args: input.args,
env: final_env,
};
let final_aterm = final_drv.generate_aterm();
let references = final_drv.collect_references();
drv_path = store
.add_text_to_store(&format!("{}.drv", final_drv.name), &final_aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
let final_aterm_modulo = final_drv.generate_aterm_modulo(&input_drv_hashes);
let cached_modulo_hash = crate::nix_utils::sha256_hex(final_aterm_modulo.as_bytes());
let cache = state.borrow_mut::<DrvHashCache>();
cache.cache.insert(drv_path.clone(), cached_modulo_hash);
output_paths = result_output_paths;
}
Ok(FinalizeDerivationOutput {
drv_path,
outputs: output_paths,
})
}
fn op_make_fixed_output_path_impl(
store_dir: &str,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> String {
use sha2::{Digest, Sha256};
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_utils::make_store_path(store_dir, "source", hash, name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
let mut hasher = Sha256::new();
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_utils::make_store_path(store_dir, "output:out", &inner_hash, name)
}
}
#[deno_core::op2]
#[string]
pub(super) fn op_hash_string(
#[string] algo: String,
#[string] data: String,
) -> std::result::Result<String, NixRuntimeError> {
use sha2::{Digest, Sha256, Sha512};
let hash_bytes: Vec<u8> = match algo.as_str() {
"sha256" => {
let mut hasher = Sha256::new();
hasher.update(data.as_bytes());
hasher.finalize().to_vec()
}
"sha512" => {
let mut hasher = Sha512::new();
hasher.update(data.as_bytes());
hasher.finalize().to_vec()
}
"sha1" => {
use sha1::Digest as _;
let mut hasher = sha1::Sha1::new();
hasher.update(data.as_bytes());
hasher.finalize().to_vec()
}
"md5" => {
let digest = md5::compute(data.as_bytes());
digest.to_vec()
}
_ => {
return Err(NixRuntimeError::from(format!(
"unknown hash algorithm '{}'",
algo
)));
}
};
Ok(hex::encode(hash_bytes))
}
#[deno_core::op2]
#[string]
pub(super) fn op_hash_file(
#[string] algo: String,
#[string] path: String,
) -> std::result::Result<String, NixRuntimeError> {
let data = std::fs::read(&path)
.map_err(|e| NixRuntimeError::from(format!("cannot read '{}': {}", path, e)))?;
use sha2::{Digest, Sha256, Sha512};
let hash_bytes: Vec<u8> = match algo.as_str() {
"sha256" => {
let mut hasher = Sha256::new();
hasher.update(&data);
hasher.finalize().to_vec()
}
"sha512" => {
let mut hasher = Sha512::new();
hasher.update(&data);
hasher.finalize().to_vec()
}
"sha1" => {
use sha1::Digest as _;
let mut hasher = sha1::Sha1::new();
hasher.update(&data);
hasher.finalize().to_vec()
}
"md5" => {
let digest = md5::compute(&data);
digest.to_vec()
}
_ => {
return Err(NixRuntimeError::from(format!(
"unknown hash algorithm '{}'",
algo
)));
}
};
Ok(hex::encode(hash_bytes))
}
#[deno_core::op2]
#[string]
pub(super) fn op_convert_hash(
#[serde] input: ConvertHashInput,
) -> std::result::Result<String, NixRuntimeError> {
use nix_compat::nixhash::{HashAlgo, NixHash};
let hash_algo = input
.hash_algo
.as_deref()
.and_then(|a| HashAlgo::from_str(a).ok());
let hash = NixHash::from_str(&input.hash, hash_algo).map_err(|e| {
NixRuntimeError::from(format!("cannot convert hash '{}': {}", input.hash, e))
})?;
let bytes = hash.digest_as_bytes();
match input.to_format.as_str() {
"base16" => Ok(hex::encode(bytes)),
"nix32" | "base32" => Ok(nix_compat::nixbase32::encode(bytes)),
"base64" => {
use base64::Engine as _;
Ok(base64::engine::general_purpose::STANDARD.encode(bytes))
}
"sri" => Ok(format!("{}-{}", hash.algo(), {
use base64::Engine as _;
base64::engine::general_purpose::STANDARD.encode(bytes)
})),
_ => Err(NixRuntimeError::from(format!(
"unknown hash format '{}'",
input.to_format
))),
}
}
#[derive(serde::Deserialize)]
pub(super) struct ConvertHashInput {
hash: String,
#[serde(rename = "hashAlgo")]
hash_algo: Option<String>,
#[serde(rename = "toHashFormat")]
to_format: String,
}

View File

@@ -1,5 +1,3 @@
#![allow(dead_code)]
use crate::error::Result; use crate::error::Result;
mod config; mod config;
@@ -39,12 +37,4 @@ pub trait Store: Send + Sync {
content: &str, content: &str,
references: Vec<String>, references: Vec<String>,
) -> Result<String>; ) -> Result<String>;
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String>;
} }

View File

@@ -304,51 +304,6 @@ impl Store for DaemonStore {
Ok(store_path_str) Ok(store_path_str)
} }
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String> {
use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::build_ca_path;
let nix_hash = match hash_algo {
"sha256" => {
let hash_bytes = hex::decode(hash)
.map_err(|e| Error::internal(format!("Invalid hash hex: {}", e)))?;
if hash_bytes.len() != 32 {
return Err(Error::internal(format!(
"Invalid sha256 hash length: expected 32, got {}",
hash_bytes.len()
)));
}
let mut arr = [0u8; 32];
arr.copy_from_slice(&hash_bytes);
NixHash::Sha256(arr)
}
_ => {
return Err(Error::internal(format!(
"Unsupported hash algorithm: {}",
hash_algo
)));
}
};
let ca_hash = if hash_mode == "r" {
CAHash::Nar(nix_hash)
} else {
CAHash::Flat(nix_hash)
};
let store_path: nix_compat::store_path::StorePath<String> =
build_ca_path(name, &ca_hash, Vec::<String>::new(), false)
.map_err(|e| Error::internal(format!("Failed to build store path: {}", e)))?;
Ok(store_path.to_absolute_path())
}
} }
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37); const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37);

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use std::fmt; use std::fmt;
#[derive(Debug)] #[derive(Debug)]

View File

@@ -0,0 +1,209 @@
use std::collections::{BTreeMap, BTreeSet, VecDeque};
pub enum StringContextElem {
Opaque { path: String },
DrvDeep { drv_path: String },
Built { drv_path: String, output: String },
}
impl StringContextElem {
pub fn decode(encoded: &str) -> Self {
if let Some(drv_path) = encoded.strip_prefix('=') {
StringContextElem::DrvDeep {
drv_path: drv_path.to_string(),
}
} else if let Some(rest) = encoded.strip_prefix('!') {
if let Some(second_bang) = rest.find('!') {
let output = rest[..second_bang].to_string();
let drv_path = rest[second_bang + 1..].to_string();
StringContextElem::Built { drv_path, output }
} else {
StringContextElem::Opaque {
path: encoded.to_string(),
}
}
} else {
StringContextElem::Opaque {
path: encoded.to_string(),
}
}
}
}
pub type InputDrvs = BTreeMap<String, BTreeSet<String>>;
pub type Srcs = BTreeSet<String>;
pub fn extract_input_drvs_and_srcs(context: &[String]) -> Result<(InputDrvs, Srcs), String> {
let mut input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut input_srcs: BTreeSet<String> = BTreeSet::new();
for encoded in context {
match StringContextElem::decode(encoded) {
StringContextElem::Opaque { path } => {
input_srcs.insert(path);
}
StringContextElem::DrvDeep { drv_path } => {
compute_fs_closure(&drv_path, &mut input_drvs, &mut input_srcs)?;
}
StringContextElem::Built { drv_path, output } => {
input_drvs.entry(drv_path).or_default().insert(output);
}
}
}
Ok((input_drvs, input_srcs))
}
fn compute_fs_closure(
drv_path: &str,
input_drvs: &mut BTreeMap<String, BTreeSet<String>>,
input_srcs: &mut BTreeSet<String>,
) -> Result<(), String> {
let mut queue: VecDeque<String> = VecDeque::new();
let mut visited: BTreeSet<String> = BTreeSet::new();
queue.push_back(drv_path.to_string());
while let Some(current_path) = queue.pop_front() {
if visited.contains(&current_path) {
continue;
}
visited.insert(current_path.clone());
input_srcs.insert(current_path.clone());
if !current_path.ends_with(".drv") {
continue;
}
let content = std::fs::read_to_string(&current_path)
.map_err(|e| format!("failed to read derivation {}: {}", current_path, e))?;
let inputs = parse_derivation_inputs(&content)
.ok_or_else(|| format!("failed to parse derivation {}", current_path))?;
for src in inputs.input_srcs {
input_srcs.insert(src.clone());
if !visited.contains(&src) {
queue.push_back(src);
}
}
for (dep_drv, outputs) in inputs.input_drvs {
input_srcs.insert(dep_drv.clone());
let entry = input_drvs.entry(dep_drv.clone()).or_default();
for output in outputs {
entry.insert(output);
}
if !visited.contains(&dep_drv) {
queue.push_back(dep_drv);
}
}
}
Ok(())
}
struct DerivationInputs {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
let aterm = aterm.strip_prefix("Derive([")?;
let mut bracket_count: i32 = 1;
let mut pos = 0;
let bytes = aterm.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
_ => {}
}
pos += 1;
}
if bracket_count != 0 {
return None;
}
let rest = &aterm[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_drvs = Vec::new();
let mut bracket_count: i32 = 1;
let mut start = 0;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'(' if bracket_count == 1 => {
start = pos;
}
b')' if bracket_count == 1 => {
let entry = &rest[start + 1..pos];
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
input_drvs.push((drv_path, outputs));
}
}
_ => {}
}
pos += 1;
}
let rest = &rest[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_srcs = Vec::new();
bracket_count = 1;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'"' if bracket_count == 1 => {
pos += 1;
let src_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
pos += 2;
} else {
pos += 1;
}
}
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
input_srcs.push(src.to_string());
}
_ => {}
}
pos += 1;
}
Some(DerivationInputs {
input_drvs,
input_srcs,
})
}
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
let entry = entry.strip_prefix('"')?;
let quote_end = entry.find('"')?;
let drv_path = entry[..quote_end].to_string();
let rest = &entry[quote_end + 1..];
let rest = rest.strip_prefix(",[")?;
let rest = rest.strip_suffix(']')?;
let mut outputs = Vec::new();
for part in rest.split(',') {
let part = part.trim();
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
outputs.push(name.to_string());
}
}
Some((drv_path, outputs))
}

View File

@@ -122,10 +122,7 @@ eval_okay_test!(concatmap);
eval_okay_test!(concatstringssep); eval_okay_test!(concatstringssep);
eval_okay_test!(context); eval_okay_test!(context);
eval_okay_test!(context_introspection); eval_okay_test!(context_introspection);
eval_okay_test!( eval_okay_test!(convertHash);
#[ignore = "not implemented: convertHash"]
convertHash
);
eval_okay_test!(curpos); eval_okay_test!(curpos);
eval_okay_test!(deepseq); eval_okay_test!(deepseq);
eval_okay_test!(delayed_with); eval_okay_test!(delayed_with);
@@ -158,24 +155,15 @@ eval_okay_test!(
fromTOML_timestamps fromTOML_timestamps
); );
eval_okay_test!(functionargs); eval_okay_test!(functionargs);
eval_okay_test!( eval_okay_test!(hashfile);
#[ignore = "not implemented: hashFile"] eval_okay_test!(hashstring);
hashfile
);
eval_okay_test!(
#[ignore = "not implemented: hashString"]
hashstring
);
eval_okay_test!(getattrpos); eval_okay_test!(getattrpos);
eval_okay_test!(getattrpos_functionargs); eval_okay_test!(getattrpos_functionargs);
eval_okay_test!(getattrpos_undefined); eval_okay_test!(getattrpos_undefined);
eval_okay_test!(getenv, || { eval_okay_test!(getenv, || {
unsafe { std::env::set_var("TEST_VAR", "foo") }; unsafe { std::env::set_var("TEST_VAR", "foo") };
}); });
eval_okay_test!( eval_okay_test!(groupBy);
#[ignore = "not implemented: hashString"]
groupBy
);
eval_okay_test!(r#if); eval_okay_test!(r#if);
eval_okay_test!(ind_string); eval_okay_test!(ind_string);
eval_okay_test!(import); eval_okay_test!(import);
@@ -265,10 +253,7 @@ eval_okay_test!(tryeval);
eval_okay_test!(types); eval_okay_test!(types);
eval_okay_test!(versions); eval_okay_test!(versions);
eval_okay_test!(with); eval_okay_test!(with);
eval_okay_test!( eval_okay_test!(zipAttrsWith);
#[ignore = "not implemented: hashString"]
zipAttrsWith
);
eval_fail_test!(fail_abort); eval_fail_test!(fail_abort);
eval_fail_test!(fail_addDrvOutputDependencies_empty_context); eval_fail_test!(fail_addDrvOutputDependencies_empty_context);

Binary file not shown.