Compare commits

..

31 Commits

Author SHA1 Message Date
1083b8fdfa temp 2026-03-13 21:57:23 +08:00
7a7229d70e fmt: group_imports = "StdExternalCrate" 2026-03-08 17:41:06 +08:00
e4004ccb6d feat: bytecode 2026-03-08 17:41:06 +08:00
843ae6cfb4 refactor: use GhostCell to provide interior mutability in Ir 2026-03-08 16:57:34 +08:00
c24d6a8bb3 chore: merge codegen::compile and codegen::compile_scoped 2026-02-25 21:14:35 +08:00
d7351e907b feat: thunk caching (WIP) 2026-02-25 21:14:27 +08:00
550223a1d7 refactor: tidy 2026-02-21 22:30:13 +08:00
53dbee3514 refactor(downgrade): use bumpalo 2026-02-21 21:54:40 +08:00
e1517c338e chore: tidy 2026-02-20 16:12:54 +08:00
45096f5254 fix: fetchGit 2026-02-19 22:34:41 +08:00
b57fea3104 optimize: short-circuit update (//) 2026-02-19 21:54:55 +08:00
4380fa85c4 optimize: compact 2026-02-19 21:14:02 +08:00
99045aa76c chore: fmt 2026-02-19 20:14:06 +08:00
7eb3acf26f optimize: use v8::Script::compile to run script directly in op_import 2026-02-19 19:58:02 +08:00
b424f60f9f optimize: avoid using #[serde] in ops 2026-02-19 19:11:56 +08:00
42031edac1 optimize: generate shorter code 2026-02-19 17:16:35 +08:00
04dcadfd61 optimize 2026-02-18 20:45:50 +08:00
c3c39bda0c feat: v8 profiling 2026-02-18 20:45:50 +08:00
782092b91e optimize: type check 2026-02-18 20:45:50 +08:00
ae5febd5dd feat(cli): compile 2026-02-18 20:45:42 +08:00
3cc7c7be75 chore: update deps; restructure tests; use Map over Record 2026-02-17 12:02:54 +08:00
f49634ccc0 optimize: use Map to represent NixAttrs 2026-02-17 10:35:37 +08:00
4a885c18b8 chore: add eslint 2026-02-17 10:35:37 +08:00
37e395c0e3 optimize: builtins.intersectAttrs 2026-02-16 23:07:52 +08:00
16a8480d29 feat(cli): support eval file 2026-02-16 21:52:08 +08:00
f0a0593d4c feat: implement builtins.toXML 2026-02-16 19:52:33 +08:00
ce64a82da3 feat: inspector 2026-02-16 19:52:33 +08:00
5c48e5cfdd feat: implement hash related primops 2026-02-15 19:55:29 +08:00
7836f8c869 refactor: handle derivation generation on Rust side 2026-02-15 19:38:11 +08:00
e357678d70 feat: implement realisePath 2026-02-15 18:26:24 +08:00
2f2c690023 chore(runtime-ts): fix linter errors 2026-02-15 12:20:31 +08:00
512 changed files with 10280 additions and 4501 deletions

3
.gitignore vendored
View File

@@ -7,3 +7,6 @@ flamegraph*.svg
perf.data* perf.data*
profile.json.gz profile.json.gz
prof.json prof.json
*.cpuprofile
*.cpuprofile.gz
*v8.log*

View File

@@ -3,5 +3,25 @@ vim.lsp.config("biome", {
on_dir(vim.fn.getcwd()) on_dir(vim.fn.getcwd())
end end
}) })
vim.lsp.config("eslint", {
settings = {
eslint = {
options = {
configFile = "./nix-js/runtime-ts/eslint.config.mts"
}
}
}
})
vim.lsp.config("rust_analyzer", {
settings = {
["rust-analyzer"] = {
cargo = {
features = {
"inspector"
}
}
}
}
})
return {} return {}

392
Cargo.lock generated
View File

@@ -41,18 +41,68 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "allocator-api2"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c880a97d28a3681c0267bd29cff89621202715b065127cd445fa0f0fe0aa2880"
[[package]] [[package]]
name = "anes" name = "anes"
version = "0.1.6" version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstream"
version = "0.6.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.13" version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "anstyle-parse"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [
"anstyle",
"once_cell_polyfill",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.101" version = "1.0.101"
@@ -123,6 +173,12 @@ dependencies = [
"backtrace", "backtrace",
] ]
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.22.1" version = "0.22.1"
@@ -226,6 +282,15 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "boxing"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a817f12ef805b34fe1565bea00630d84f8f08bf26200b05c41456c77cdada88"
dependencies = [
"sptr",
]
[[package]] [[package]]
name = "bstr" name = "bstr"
version = "1.12.1" version = "1.12.1"
@@ -239,9 +304,12 @@ dependencies = [
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.19.1" version = "3.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb"
dependencies = [
"allocator-api2 0.2.21",
]
[[package]] [[package]]
name = "bytes" name = "bytes"
@@ -379,21 +447,36 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.58" version = "4.5.59"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63be97961acde393029492ce0be7a1af7e323e6bae9511ebfac33751be5e6806" checksum = "c5caf74d17c3aec5495110c34cc3f78644bfa89af6c8993ed4de2790e49b6499"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
"clap_derive",
] ]
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.58" version = "4.5.59"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f13174bda5dfd69d7e947827e5af4b0f2f94a4a3ee92912fba07a66150f21e2" checksum = "370daa45065b80218950227371916a1633217ae42b2715b2287b606dcd618e24"
dependencies = [ dependencies = [
"anstream",
"anstyle", "anstyle",
"clap_lex", "clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
] ]
[[package]] [[package]]
@@ -420,6 +503,21 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "colorchoice"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
name = "colored"
version = "3.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "faf9468729b8cbcea668e36183cb69d317348c2e08e994829fb56ebfdfbaac34"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "combine" name = "combine"
version = "4.6.7" version = "4.6.7"
@@ -445,15 +543,6 @@ dependencies = [
"unicode-segmentation", "unicode-segmentation",
] ]
[[package]]
name = "convert_case"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "affbf0190ed2caf063e3def54ff444b449371d55c58e513a95ab98eca50adb49"
dependencies = [
"unicode-segmentation",
]
[[package]] [[package]]
name = "cooked-waker" name = "cooked-waker"
version = "5.0.0" version = "5.0.0"
@@ -764,7 +853,7 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
dependencies = [ dependencies = [
"convert_case 0.10.0", "convert_case",
"proc-macro2", "proc-macro2",
"quote", "quote",
"rustc_version", "rustc_version",
@@ -888,6 +977,27 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
[[package]]
name = "env_filter"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f"
dependencies = [
"log",
]
[[package]]
name = "env_logger"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d"
dependencies = [
"anstream",
"anstyle",
"env_filter",
"log",
]
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.2" version = "1.0.2"
@@ -962,6 +1072,26 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fastwebsockets"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "305d3ba574508e27190906d11707dad683e0494e6b85eae9b044cb2734a5e422"
dependencies = [
"base64 0.21.7",
"bytes",
"http-body-util",
"hyper",
"hyper-util",
"pin-project",
"rand 0.8.5",
"sha1",
"simdutf8",
"thiserror 1.0.69",
"tokio",
"utf-8",
]
[[package]] [[package]]
name = "fd-lock" name = "fd-lock"
version = "4.0.4" version = "4.0.4"
@@ -1051,9 +1181,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]] [[package]]
name = "futures" name = "futures"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@@ -1066,9 +1196,9 @@ dependencies = [
[[package]] [[package]]
name = "futures-channel" name = "futures-channel"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-sink", "futures-sink",
@@ -1076,15 +1206,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-core" name = "futures-core"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
[[package]] [[package]]
name = "futures-executor" name = "futures-executor"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@@ -1093,15 +1223,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-io" name = "futures-io"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1110,21 +1240,21 @@ dependencies = [
[[package]] [[package]]
name = "futures-sink" name = "futures-sink"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
[[package]] [[package]]
name = "futures-util" name = "futures-util"
version = "0.3.31" version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@@ -1134,10 +1264,33 @@ dependencies = [
"futures-task", "futures-task",
"memchr", "memchr",
"pin-project-lite", "pin-project-lite",
"pin-utils",
"slab", "slab",
] ]
[[package]]
name = "gc-arena"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cd70cf88a32937834aae9614ff2569b5d9467fa0c42c5d7762fd94a8de88266"
dependencies = [
"allocator-api2 0.2.21",
"gc-arena-derive",
"hashbrown 0.14.5",
"sptr",
]
[[package]]
name = "gc-arena-derive"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c612a69f5557a11046b77a7408d2836fe77077f842171cd211c5ef504bd3cddd"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.7" version = "0.14.7"
@@ -1188,6 +1341,12 @@ dependencies = [
"wasip3", "wasip3",
] ]
[[package]]
name = "ghost-cell"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8449d342b1c67f49169e92e71deb7b9b27f30062301a16dbc27a4cc8d2351b7"
[[package]] [[package]]
name = "gimli" name = "gimli"
version = "0.32.3" version = "0.32.3"
@@ -1225,6 +1384,9 @@ name = "hashbrown"
version = "0.14.5" version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"allocator-api2 0.2.21",
]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
@@ -1241,7 +1403,7 @@ version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
dependencies = [ dependencies = [
"allocator-api2", "allocator-api2 0.2.21",
"equivalent", "equivalent",
"foldhash 0.2.0", "foldhash 0.2.0",
] ]
@@ -1315,6 +1477,12 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.8.1" version = "1.8.1"
@@ -1328,6 +1496,7 @@ dependencies = [
"http", "http",
"http-body", "http-body",
"httparse", "httparse",
"httpdate",
"itoa", "itoa",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
@@ -1358,7 +1527,7 @@ version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@@ -1578,6 +1747,12 @@ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.60.2",
] ]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.13.0" version = "0.13.0"
@@ -1781,6 +1956,12 @@ dependencies = [
"regex-automata", "regex-automata",
] ]
[[package]]
name = "md5"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.8.0" version = "2.8.0"
@@ -1877,7 +2058,7 @@ dependencies = [
[[package]] [[package]]
name = "nix-compat" name = "nix-compat"
version = "0.1.0" version = "0.1.0"
source = "git+https://git.snix.dev/snix/snix.git#9d414bec7c7fff1fca6ba6d14dda6c4295521260" source = "git+https://git.snix.dev/snix/snix.git#1b37f68842a7e5e226d9dc009e9a90d400c5fb14"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"bstr", "bstr",
@@ -1900,7 +2081,7 @@ dependencies = [
[[package]] [[package]]
name = "nix-compat-derive" name = "nix-compat-derive"
version = "0.1.0" version = "0.1.0"
source = "git+https://git.snix.dev/snix/snix.git#9d414bec7c7fff1fca6ba6d14dda6c4295521260" source = "git+https://git.snix.dev/snix/snix.git#1b37f68842a7e5e226d9dc009e9a90d400c5fb14"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1911,23 +2092,35 @@ dependencies = [
name = "nix-js" name = "nix-js"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"allocator-api2 0.4.0",
"anyhow", "anyhow",
"base64", "base64 0.22.1",
"boxing",
"bumpalo",
"bzip2", "bzip2",
"clap",
"colored",
"criterion", "criterion",
"deno_core", "deno_core",
"deno_error", "deno_error",
"derive_more", "derive_more",
"dirs", "dirs",
"ere", "ere",
"fastwebsockets",
"flate2", "flate2",
"gc-arena",
"ghost-cell",
"hashbrown 0.16.1", "hashbrown 0.16.1",
"hex", "hex",
"http",
"http-body-util",
"hyper",
"hyper-util",
"itertools 0.14.0", "itertools 0.14.0",
"md5",
"miette", "miette",
"mimalloc", "mimalloc",
"nix-compat", "nix-compat",
"nix-js-macros",
"nix-nar", "nix-nar",
"num_enum", "num_enum",
"regex", "regex",
@@ -1939,28 +2132,23 @@ dependencies = [
"rustyline", "rustyline",
"serde", "serde",
"serde_json", "serde_json",
"sha1",
"sha2", "sha2",
"smallvec",
"string-interner", "string-interner",
"tap", "tap",
"tar", "tar",
"tempfile", "tempfile",
"test-log",
"thiserror 2.0.18", "thiserror 2.0.18",
"tokio", "tokio",
"toml", "toml",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
"uuid",
"xz2", "xz2",
] ]
[[package]]
name = "nix-js-macros"
version = "0.1.0"
dependencies = [
"convert_case 0.11.0",
"quote",
"syn",
]
[[package]] [[package]]
name = "nix-nar" name = "nix-nar"
version = "0.3.1" version = "0.3.1"
@@ -2067,6 +2255,12 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "once_cell_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
[[package]] [[package]]
name = "oorandom" name = "oorandom"
version = "11.1.5" version = "11.1.5"
@@ -2359,6 +2553,8 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [ dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.4", "rand_core 0.6.4",
] ]
@@ -2368,10 +2564,20 @@ version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [ dependencies = [
"rand_chacha", "rand_chacha 0.9.0",
"rand_core 0.9.5", "rand_core 0.9.5",
] ]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core 0.6.4",
]
[[package]] [[package]]
name = "rand_chacha" name = "rand_chacha"
version = "0.9.0" version = "0.9.0"
@@ -2484,7 +2690,7 @@ version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801" checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@@ -2541,9 +2747,9 @@ dependencies = [
[[package]] [[package]]
name = "rnix" name = "rnix"
version = "0.13.0" version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b8276b540c344ec04cd215fb3d35db378c2b1861cc44802c2c097f3490f6e52" checksum = "c163bd17372eecdf10d351c34584b7de7c1a33be4e92a32f3fb3f5a7fe3f579b"
dependencies = [ dependencies = [
"rowan", "rowan",
] ]
@@ -2895,6 +3101,17 @@ dependencies = [
"v8", "v8",
] ]
[[package]]
name = "sha1"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.10.9" version = "0.10.9"
@@ -2946,6 +3163,12 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
[[package]]
name = "simdutf8"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.12" version = "0.4.12"
@@ -2996,6 +3219,12 @@ dependencies = [
"der", "der",
] ]
[[package]]
name = "sptr"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a"
[[package]] [[package]]
name = "sqlite-wasm-rs" name = "sqlite-wasm-rs"
version = "0.5.2" version = "0.5.2"
@@ -3045,6 +3274,12 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72abeda133c49d7bddece6c154728f83eec8172380c80ab7096da9487e20d27c" checksum = "72abeda133c49d7bddece6c154728f83eec8172380c80ab7096da9487e20d27c"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]] [[package]]
name = "strum" name = "strum"
version = "0.27.2" version = "0.27.2"
@@ -3101,9 +3336,9 @@ checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.115" version = "2.0.116"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e614ed320ac28113fa64972c4262d5dbc89deacdfd00c34a3e4cea073243c12" checksum = "3df424c70518695237746f84cede799c9c58fcb37450d7b23716568cc8bc69cb"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -3112,9 +3347,9 @@ dependencies = [
[[package]] [[package]]
name = "syn-match" name = "syn-match"
version = "0.3.0" version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "783c4140d7ed89f37116e865b49e5a9fdd28608b9071a9dd1e158b50fc0a31fc" checksum = "54b8f0a9004d6aafa6a588602a1119e6cdaacec9921aa1605383e6e7d6258fd6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -3234,6 +3469,28 @@ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.60.2",
] ]
[[package]]
name = "test-log"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d53ac171c92a39e4769491c4b4dde7022c60042254b5fc044ae409d34a24d4"
dependencies = [
"env_logger",
"test-log-macros",
"tracing-subscriber",
]
[[package]]
name = "test-log-macros"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be35209fd0781c5401458ab66e4f98accf63553e8fae7425503e92fdd319783b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "text-size" name = "text-size"
version = "1.1.1" version = "1.1.1"
@@ -3387,9 +3644,9 @@ dependencies = [
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.9.12+spec-1.1.0" version = "0.9.9+spec-1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" checksum = "eb5238e643fc34a1d5d7e753e1532a91912d74b63b92b3ea51fde8d1b7bc79dd"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"serde_core", "serde_core",
@@ -3423,9 +3680,9 @@ dependencies = [
[[package]] [[package]]
name = "toml_parser" name = "toml_parser"
version = "1.0.8+spec-1.1.0" version = "1.0.9+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc" checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4"
dependencies = [ dependencies = [
"winnow", "winnow",
] ]
@@ -3562,9 +3819,9 @@ checksum = "81b79ad29b5e19de4260020f8919b443b2ef0277d242ce532ec7b7a2cc8b6007"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.23" version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e" checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
[[package]] [[package]]
name = "unicode-linebreak" name = "unicode-linebreak"
@@ -3615,6 +3872,12 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]] [[package]]
name = "utf8-ranges" name = "utf8-ranges"
version = "1.0.5" version = "1.0.5"
@@ -3639,6 +3902,7 @@ version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb"
dependencies = [ dependencies = [
"getrandom 0.4.1",
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
] ]

View File

@@ -1,8 +1,7 @@
[workspace] [workspace]
resolver = "3" resolver = "3"
members = [ members = [
"nix-js", "nix-js"
"nix-js-macros"
] ]
[profile.profiling] [profile.profiling]

View File

@@ -1,15 +1,31 @@
[no-exit-message] [no-exit-message]
@repl: @repl:
cargo run --bin repl cargo run -- repl
[no-exit-message] [no-exit-message]
@eval expr: @eval expr:
cargo run --bin eval -- '{{expr}}' cargo run -- eval --expr '{{expr}}'
[no-exit-message] [no-exit-message]
@replr: @replr:
cargo run --bin repl --release cargo run --release -- repl
[no-exit-message] [no-exit-message]
@evalr expr: @evalr expr:
cargo run --bin eval --release -- '{{expr}}' cargo run --release -- eval --expr '{{expr}}'
[no-exit-message]
@repli:
cargo run --release --features inspector -- --inspect-brk 127.0.0.1:9229 repl
[no-exit-message]
@evali expr:
cargo run --release --features inspector -- --inspect-brk 127.0.0.1:9229 eval --expr '{{expr}}'
[no-exit-message]
@replp:
cargo run --release --features prof -- repl
[no-exit-message]
@evalp expr:
cargo run --release --features prof -- eval --expr '{{expr}}'

View File

@@ -1,5 +1,5 @@
{ {
"$schema": "https://biomejs.dev/schemas/2.3.9/schema.json", "$schema": "https://biomejs.dev/schemas/2.3.14/schema.json",
"vcs": { "vcs": {
"enabled": true, "enabled": true,
"clientKind": "git", "clientKind": "git",
@@ -20,10 +20,37 @@
"linter": { "linter": {
"rules": { "rules": {
"style": { "style": {
"useNamingConvention": "warn" "useNamingConvention": {
"level": "warn",
"options": {
"strictCase": false,
"conventions": [
{
"selector": { "kind": "objectLiteralProperty" },
"formats": ["camelCase", "PascalCase", "CONSTANT_CASE"]
},
{
"selector": { "kind": "typeProperty" },
"formats": ["camelCase", "snake_case"]
}
]
}
}
} }
} }
}, },
"overrides": [
{
"includes": ["**/global.d.ts"],
"linter": {
"rules": {
"style": {
"useNamingConvention": "off"
}
}
}
}
],
"javascript": { "javascript": {
"formatter": { "formatter": {
"arrowParentheses": "always", "arrowParentheses": "always",

View File

@@ -34,6 +34,7 @@
just just
samply samply
jq jq
tokei
nodejs nodejs
nodePackages.npm nodePackages.npm

View File

@@ -1,12 +0,0 @@
[package]
name = "nix-js-macros"
version = "0.1.0"
edition = "2024"
[lib]
proc-macro = true
[dependencies]
convert_case = "0.11"
quote = "1.0"
syn = { version = "2.0", features = ["full"] }

View File

@@ -1,252 +0,0 @@
//! Implements the `ir!` procedural macro.
//!
//! This macro is designed to reduce the boilerplate associated with defining
//! an Intermediate Representation (IR) that follows a specific pattern. It generates:
//! 1. An enum representing the different kinds of IR nodes.
//! 2. Structs for each of the variants that have fields.
//! 3. `Ref` and `Mut` versions of the main enum for ergonomic pattern matching on references.
//! 4. `From` implementations to easily convert from a struct variant (e.g., `BinOp`) to the main enum (`Ir::BinOp`).
//! 5. A `To[IrName]` trait to provide a convenient `.to_ir()` method on the variant structs.
use convert_case::{Case, Casing};
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{
FieldsNamed, Ident, Token, Type, parenthesized,
parse::{Parse, ParseStream, Result},
punctuated::Punctuated,
token,
};
/// Represents one of the variants passed to the `ir!` macro.
pub enum VariantInput {
/// A unit-like variant, e.g., `Arg`.
Unit(Ident),
/// A tuple-like variant with one unnamed field, e.g., `ExprRef(ExprId)`.
Tuple(Ident, Type),
/// A struct-like variant with named fields, e.g., `BinOp { lhs: ExprId, rhs: ExprId, kind: BinOpKind }`.
Struct(Ident, FieldsNamed),
}
/// The top-level input for the `ir!` macro.
pub struct MacroInput {
/// The name of the main IR enum to be generated (e.g., `Ir`).
pub base_name: Ident,
/// The list of variants for the enum.
pub variants: Punctuated<VariantInput, Token![,]>,
}
impl Parse for VariantInput {
fn parse(input: ParseStream) -> Result<Self> {
let name: Ident = input.parse()?;
if input.peek(token::Paren) {
// Parse a tuple-like variant: `Variant(Type)`
let content;
parenthesized!(content in input);
let ty: Type = content.parse()?;
if !content.is_empty() {
return Err(content.error("Expected a single type inside parentheses"));
}
Ok(VariantInput::Tuple(name, ty))
} else if input.peek(token::Brace) {
// Parse a struct-like variant: `Variant { field: Type, ... }`
let fields: FieldsNamed = input.parse()?;
Ok(VariantInput::Struct(name, fields))
} else {
// Parse a unit-like variant: `Variant`
Ok(VariantInput::Unit(name))
}
}
}
impl Parse for MacroInput {
fn parse(input: ParseStream) -> Result<Self> {
// The macro input is expected to be: `IrName, Variant1, Variant2, ...`
let base_name = input.parse()?;
input.parse::<Token![,]>()?;
let variants = Punctuated::parse_terminated(input)?;
Ok(MacroInput {
base_name,
variants,
})
}
}
/// The implementation of the `ir!` macro.
pub fn ir_impl(input: TokenStream) -> TokenStream {
let parsed_input = syn::parse_macro_input!(input as MacroInput);
let base_name = &parsed_input.base_name;
let ref_name = format_ident!("{}Ref", base_name);
let mut_name = format_ident!("{}Mut", base_name);
let to_trait_name = format_ident!("To{}", base_name);
let to_trait_fn_name = format_ident!("to_{}", base_name.to_string().to_case(Case::Snake));
let mut enum_variants = Vec::new();
let mut struct_defs = Vec::new();
let mut ref_variants = Vec::new();
let mut mut_variants = Vec::new();
let mut as_ref_arms = Vec::new();
let mut as_mut_arms = Vec::new();
let mut span_arms = Vec::new();
let mut from_impls = Vec::new();
let mut to_trait_impls = Vec::new();
for variant in parsed_input.variants {
match variant {
VariantInput::Unit(name) => {
let inner_type = name.clone();
struct_defs.push(quote! {
#[derive(Debug)]
pub struct #name {
pub span: rnix::TextRange,
}
});
enum_variants.push(quote! { #name(#inner_type) });
ref_variants.push(quote! { #name(&'a #inner_type) });
mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
span_arms.push(quote! { Self::#name(inner) => inner.span });
from_impls.push(quote! {
impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
}
});
to_trait_impls.push(quote! {
impl #to_trait_name for #name {
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
}
});
}
VariantInput::Tuple(name, ty) => {
let field_name = format_ident!("inner");
struct_defs.push(quote! {
#[derive(Debug)]
pub struct #name {
pub #field_name: #ty,
pub span: rnix::TextRange,
}
});
let inner_type = name.clone();
enum_variants.push(quote! { #name(#inner_type) });
ref_variants.push(quote! { #name(&'a #inner_type) });
mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
span_arms.push(quote! { Self::#name(inner) => inner.span });
from_impls.push(quote! {
impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
}
});
to_trait_impls.push(quote! {
impl #to_trait_name for #name {
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
}
});
}
VariantInput::Struct(name, mut fields) => {
let inner_type = name.clone();
fields.named.push(syn::Field {
attrs: vec![],
vis: syn::Visibility::Public(syn::token::Pub::default()),
mutability: syn::FieldMutability::None,
ident: Some(format_ident!("span")),
colon_token: Some(syn::token::Colon::default()),
ty: syn::parse_quote!(rnix::TextRange),
});
struct_defs.push(quote! {
#[derive(Debug)]
pub struct #name #fields
});
enum_variants.push(quote! { #name(#inner_type) });
ref_variants.push(quote! { #name(&'a #inner_type) });
mut_variants.push(quote! { #name(&'a mut #inner_type) });
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
span_arms.push(quote! { Self::#name(inner) => inner.span });
from_impls.push(quote! {
impl From<#inner_type> for #base_name {
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
}
});
to_trait_impls.push(quote! {
impl #to_trait_name for #name {
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
}
});
}
}
}
// Assemble the final generated code.
let expanded = quote! {
/// The main IR enum, generated by the `ir!` macro.
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
pub enum #base_name {
#( #enum_variants ),*
}
// The struct definitions for the enum variants.
#( #struct_defs )*
/// An immutable reference version of the IR enum.
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
pub enum #ref_name<'a> {
#( #ref_variants ),*
}
/// A mutable reference version of the IR enum.
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
pub enum #mut_name<'a> {
#( #mut_variants ),*
}
impl #base_name {
/// Converts a `&Ir` into a `IrRef`.
pub fn as_ref(&self) -> #ref_name<'_> {
match self {
#( #as_ref_arms ),*
}
}
/// Converts a `&mut Ir` into a `IrMut`.
pub fn as_mut(&mut self) -> #mut_name<'_> {
match self {
#( #as_mut_arms ),*
}
}
pub fn span(&self) -> rnix::TextRange {
match self {
#( #span_arms ),*
}
}
}
// `From` implementations for converting variant structs into the main enum.
#( #from_impls )*
/// A trait for converting a variant struct into the main IR enum.
pub trait #to_trait_name {
/// Performs the conversion.
fn #to_trait_fn_name(self) -> #base_name;
}
// Implement the `ToIr` trait for each variant struct.
#( #to_trait_impls )*
};
TokenStream::from(expanded)
}

View File

@@ -1,13 +0,0 @@
//! This crate provides procedural macros for the nix-js project.
use proc_macro::TokenStream;
mod ir;
/// A procedural macro to reduce boilerplate when defining an Intermediate Representation (IR).
///
/// It generates an enum for the IR, along with `Ref` and `Mut` variants,
/// `From` implementations, and a `ToIr` trait.
#[proc_macro]
pub fn ir(input: TokenStream) -> TokenStream {
ir::ir_impl(input)
}

View File

@@ -2,7 +2,6 @@
name = "nix-js" name = "nix-js"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
build = "build.rs"
[dependencies] [dependencies]
mimalloc = "0.1" mimalloc = "0.1"
@@ -14,6 +13,9 @@ nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", fe
anyhow = "1.0" anyhow = "1.0"
rustyline = "17.0" rustyline = "17.0"
# CLI
clap = { version = "4", features = ["derive"] }
# Logging # Logging
tracing = "0.1" tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }
@@ -24,6 +26,7 @@ miette = { version = "7.4", features = ["fancy"] }
hashbrown = "0.16" hashbrown = "0.16"
string-interner = "0.19" string-interner = "0.19"
bumpalo = { version = "3.20", features = ["allocator-api2", "boxed", "collections"] }
rust-embed="8.11" rust-embed="8.11"
@@ -36,6 +39,8 @@ deno_error = "0.7"
nix-nar = "0.3" nix-nar = "0.3"
sha2 = "0.10" sha2 = "0.10"
sha1 = "0.10"
md5 = "0.8"
hex = "0.4" hex = "0.4"
base64 = "0.22" base64 = "0.22"
@@ -48,21 +53,40 @@ bzip2 = "0.6"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
# spec 1.0.0 # spec 1.0.0
toml = "0.9.9" toml = "=0.9.9"
dirs = "6.0" dirs = "6.0"
tempfile = "3.24" tempfile = "3.24"
rusqlite = { version = "0.38", features = ["bundled"] } rusqlite = { version = "0.38", features = ["bundled"] }
rnix = "0.13" rnix = "0.14"
rowan = "0.16" rowan = "0.16"
nix-js-macros = { path = "../nix-js-macros" }
ere = "0.2.4" ere = "0.2.4"
num_enum = "0.7.5" num_enum = "0.7.5"
tap = "1.0.1" tap = "1.0.1"
# Inspector (optional)
fastwebsockets = { version = "0.10", features = ["upgrade"], optional = true }
hyper = { version = "1", features = ["http1", "server"], optional = true }
hyper-util = { version = "0.1", features = ["tokio"], optional = true }
http-body-util = { version = "0.1", optional = true }
http = { version = "1", optional = true }
uuid = { version = "1", features = ["v4"], optional = true }
ghost-cell = "0.2"
colored = "3.1"
boxing = "0.1"
gc-arena = { version = "0.5.3", features = ["allocator-api2"] }
allocator-api2 = "0.4.0"
smallvec = "1.15.1"
[features]
inspector = ["dep:fastwebsockets", "dep:hyper", "dep:hyper-util", "dep:http-body-util", "dep:http", "dep:uuid"]
prof = []
[dev-dependencies] [dev-dependencies]
criterion = { version = "0.8", features = ["html_reports"] } criterion = { version = "0.8", features = ["html_reports"] }
test-log = { version = "0.2", features = ["trace"] }
[[bench]] [[bench]]
name = "basic_ops" name = "basic_ops"

View File

@@ -1,7 +1,8 @@
mod utils; mod utils;
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box; use std::hint::black_box;
use criterion::{Criterion, criterion_group, criterion_main};
use utils::eval; use utils::eval;
fn bench_arithmetic(c: &mut Criterion) { fn bench_arithmetic(c: &mut Criterion) {

View File

@@ -1,7 +1,8 @@
mod utils; mod utils;
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box; use std::hint::black_box;
use criterion::{Criterion, criterion_group, criterion_main};
use utils::eval; use utils::eval;
fn bench_builtin_math(c: &mut Criterion) { fn bench_builtin_math(c: &mut Criterion) {

View File

@@ -1,8 +1,9 @@
mod utils; mod utils;
use std::hint::black_box;
use criterion::{Criterion, criterion_group, criterion_main}; use criterion::{Criterion, criterion_group, criterion_main};
use nix_js::context::Context; use nix_js::context::Context;
use std::hint::black_box;
use utils::compile; use utils::compile;
fn bench_parse_and_downgrade(c: &mut Criterion) { fn bench_parse_and_downgrade(c: &mut Criterion) {

View File

@@ -1,7 +1,8 @@
mod utils; mod utils;
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box; use std::hint::black_box;
use criterion::{Criterion, criterion_group, criterion_main};
use utils::eval; use utils::eval;
fn bench_non_recursive(c: &mut Criterion) { fn bench_non_recursive(c: &mut Criterion) {

View File

@@ -1,70 +0,0 @@
use std::path::Path;
use std::process::Command;
fn main() {
let runtime_ts_dir = Path::new("runtime-ts");
let dist_runtime = runtime_ts_dir.join("dist/runtime.js");
if !runtime_ts_dir.exists() {
println!("cargo::warning=runtime-ts directory not found, using existing runtime.js");
return;
}
println!("cargo::rerun-if-changed=runtime-ts/src");
println!("cargo::rerun-if-changed=runtime-ts/package.json");
println!("cargo::rerun-if-changed=runtime-ts/tsconfig.json");
println!("cargo::rerun-if-changed=runtime-ts/build.mjs");
if !runtime_ts_dir.join("node_modules").exists() {
println!("Installing npm dependencies...");
let npm_cmd = if cfg!(target_os = "windows") {
"npm.cmd"
} else {
"npm"
};
let status = Command::new(npm_cmd)
.arg("install")
.current_dir(runtime_ts_dir)
.status()
.expect("Failed to run npm install. Is Node.js installed?");
if !status.success() {
panic!("npm install failed. Please check your Node.js installation.");
}
}
println!("Running TypeScript type checking...");
let npm_cmd = if cfg!(target_os = "windows") {
"npm.cmd"
} else {
"npm"
};
let status = Command::new(npm_cmd)
.arg("run")
.arg("typecheck")
.current_dir(runtime_ts_dir)
.status()
.expect("Failed to run type checking");
if !status.success() {
panic!("TypeScript type checking failed! Fix type errors before building.");
}
println!("Building runtime.js from TypeScript...");
let status = Command::new(npm_cmd)
.arg("run")
.arg("build")
.current_dir(runtime_ts_dir)
.status()
.expect("Failed to build runtime");
if !status.success() {
panic!("Runtime build failed!");
}
if dist_runtime.exists() {
println!("Successfully built runtime.js",);
} else {
panic!("dist/runtime.js not found after build");
}
}

View File

@@ -0,0 +1,20 @@
import js from "@eslint/js";
import { defineConfig } from "eslint/config";
import globals from "globals";
import tseslint from "typescript-eslint";
export default defineConfig([
js.configs.recommended,
...tseslint.configs.recommended,
{
files: ["**/*.{js,mjs,cjs,ts,mts,cts}"],
languageOptions: { globals: globals.es2022 },
rules: {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { varsIgnorePattern: "^_", argsIgnorePattern: "^_" }],
},
},
{
ignores: ["dist/**/*"],
},
]);

File diff suppressed because it is too large Load Diff

View File

@@ -3,15 +3,20 @@
"version": "0.1.0", "version": "0.1.0",
"private": true, "private": true,
"scripts": { "scripts": {
"check": "tsc --noEmit && npx eslint && biome check",
"typecheck": "tsc --noEmit", "typecheck": "tsc --noEmit",
"build": "node build.mjs", "build": "node build.mjs",
"dev": "npm run typecheck && npm run build" "dev": "npm run typecheck && npm run build"
}, },
"devDependencies": { "devDependencies": {
"esbuild": "^0.24.2", "esbuild": "^0.24.2",
"typescript": "^5.7.2" "eslint": "^9.39.2",
"typescript": "^5.7.2",
"typescript-eslint": "^8.55.0",
"jiti": "^2.6.1"
}, },
"dependencies": { "dependencies": {
"globals": "^17.3.0",
"js-sdsl": "^4.4.2" "js-sdsl": "^4.4.2"
} }
} }

View File

@@ -1,30 +1,26 @@
/**
* Arithmetic builtin functions
*/
import type { NixBool, NixInt, NixNumber, NixValue } from "../types";
import { forceNumeric, coerceNumeric, forceInt } from "../type-assert";
import { op } from "../operators"; import { op } from "../operators";
import { coerceNumeric, forceInt, forceNumeric } from "../type-assert";
import type { NixBool, NixInt, NixNumber, NixValue } from "../types";
export const add = export const add =
(a: NixValue) => (a: NixValue) =>
(b: NixValue): bigint | number => { (b: NixValue): bigint | number => {
const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b)); const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b));
return (av as any) + (bv as any); return (av as never) + (bv as never);
}; };
export const sub = export const sub =
(a: NixValue) => (a: NixValue) =>
(b: NixValue): bigint | number => { (b: NixValue): bigint | number => {
const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b)); const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b));
return (av as any) - (bv as any); return (av as never) - (bv as never);
}; };
export const mul = export const mul =
(a: NixValue) => (a: NixValue) =>
(b: NixValue): bigint | number => { (b: NixValue): bigint | number => {
const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b)); const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b));
return (av as any) * (bv as any); return (av as never) * (bv as never);
}; };
export const div = export const div =
@@ -36,10 +32,9 @@ export const div =
throw new RangeError("Division by zero"); throw new RangeError("Division by zero");
} }
return (av as any) / (bv as any); return (av as never) / (bv as never);
}; };
// Bitwise operations - only for integers
export const bitAnd = export const bitAnd =
(a: NixValue) => (a: NixValue) =>
(b: NixValue): NixInt => { (b: NixValue): NixInt => {

View File

@@ -1,15 +1,12 @@
/** import { mkPos, select } from "../helpers";
* Attribute set operation builtin functions
*/
import type { NixValue, NixAttrs, NixList } from "../types";
import { forceAttrs, forceStringValue, forceFunction, forceList } from "../type-assert";
import { createThunk } from "../thunk"; import { createThunk } from "../thunk";
import { forceAttrs, forceFunction, forceList, forceStringValue } from "../type-assert";
import { ATTR_POSITIONS, type NixAttrs, type NixList, type NixValue } from "../types";
export const attrNames = (set: NixValue): string[] => Object.keys(forceAttrs(set)).sort(); export const attrNames = (set: NixValue): string[] => Array.from(forceAttrs(set).keys()).sort();
export const attrValues = (set: NixValue): NixValue[] => export const attrValues = (set: NixValue): NixValue[] =>
Object.entries(forceAttrs(set)) Array.from(forceAttrs(set).entries())
.sort(([a], [b]) => { .sort(([a], [b]) => {
if (a < b) { if (a < b) {
return -1; return -1;
@@ -24,21 +21,24 @@ export const attrValues = (set: NixValue): NixValue[] =>
export const getAttr = export const getAttr =
(s: NixValue) => (s: NixValue) =>
(set: NixValue): NixValue => (set: NixValue): NixValue =>
forceAttrs(set)[forceStringValue(s)]; select(forceAttrs(set), [s]);
export const hasAttr = export const hasAttr =
(s: NixValue) => (s: NixValue) =>
(set: NixValue): boolean => (set: NixValue): boolean =>
Object.hasOwn(forceAttrs(set), forceStringValue(s)); forceAttrs(set).has(forceStringValue(s));
export const mapAttrs = export const mapAttrs =
(f: NixValue) => (f: NixValue) =>
(attrs: NixValue): NixAttrs => { (attrs: NixValue): NixAttrs => {
const forcedAttrs = forceAttrs(attrs); const forcedAttrs = forceAttrs(attrs);
const forcedF = forceFunction(f); const forcedF = forceFunction(f);
const newAttrs: NixAttrs = {}; const newAttrs: NixAttrs = new Map();
for (const key in forcedAttrs) { for (const [key, val] of forcedAttrs) {
newAttrs[key] = createThunk(() => forceFunction(forcedF(key))(forcedAttrs[key]), "created by mapAttrs"); newAttrs.set(
key,
createThunk(() => forceFunction(forcedF(key))(val), "created by mapAttrs"),
);
} }
return newAttrs; return newAttrs;
}; };
@@ -46,25 +46,20 @@ export const mapAttrs =
export const removeAttrs = export const removeAttrs =
(attrs: NixValue) => (attrs: NixValue) =>
(list: NixValue): NixAttrs => { (list: NixValue): NixAttrs => {
const new_attrs: NixAttrs = {}; const newAttrs: NixAttrs = new Map(forceAttrs(attrs));
const forced_attrs = forceAttrs(attrs); const forcedList = forceList(list);
const forced_list = forceList(list); for (const item of forcedList) {
const keys_to_remove = new Set(forced_list.map(forceStringValue)); newAttrs.delete(forceStringValue(item));
for (const key in forced_attrs) {
if (!keys_to_remove.has(key)) {
new_attrs[key] = forced_attrs[key];
}
} }
return new_attrs; return newAttrs;
}; };
export const listToAttrs = (e: NixValue): NixAttrs => { export const listToAttrs = (e: NixValue): NixAttrs => {
const attrs: NixAttrs = {}; const attrs: NixAttrs = new Map();
const forced_e = [...forceList(e)].reverse(); const forcedE = [...forceList(e)].reverse();
for (const obj of forced_e) { for (const obj of forcedE) {
const item = forceAttrs(obj); const item = forceAttrs(obj);
attrs[forceStringValue(item.name)] = item.value; attrs.set(forceStringValue(select(item, ["name"])), select(item, ["value"]));
} }
return attrs; return attrs;
}; };
@@ -74,10 +69,18 @@ export const intersectAttrs =
(e2: NixValue): NixAttrs => { (e2: NixValue): NixAttrs => {
const f1 = forceAttrs(e1); const f1 = forceAttrs(e1);
const f2 = forceAttrs(e2); const f2 = forceAttrs(e2);
const attrs: NixAttrs = {}; const attrs: NixAttrs = new Map();
for (const key of Object.keys(f2)) { if (f1.size < f2.size) {
if (Object.hasOwn(f1, key)) { for (const [key] of f1) {
attrs[key] = f2[key]; if (f2.has(key)) {
attrs.set(key, f2.get(key) as NixValue);
}
}
} else {
for (const [key] of f2) {
if (f1.has(key)) {
attrs.set(key, f2.get(key) as NixValue);
}
} }
} }
return attrs; return attrs;
@@ -88,20 +91,20 @@ export const catAttrs =
(list: NixValue): NixList => { (list: NixValue): NixList => {
const key = forceStringValue(attr); const key = forceStringValue(attr);
return forceList(list) return forceList(list)
.map((set) => forceAttrs(set)[key]) .map((set) => forceAttrs(set).get(key))
.filter((val) => val !== undefined); .filter((val) => val !== undefined) as NixList;
}; };
export const groupBy = export const groupBy =
(f: NixValue) => (f: NixValue) =>
(list: NixValue): NixAttrs => { (list: NixValue): NixAttrs => {
const attrs: NixAttrs = {}; const attrs: NixAttrs = new Map();
const forced_f = forceFunction(f); const forcedF = forceFunction(f);
const forced_list = forceList(list); const forcedList = forceList(list);
for (const elem of forced_list) { for (const elem of forcedList) {
const key = forceStringValue(forced_f(elem)); const key = forceStringValue(forcedF(elem));
if (!attrs[key]) attrs[key] = []; if (!attrs.has(key)) attrs.set(key, []);
(attrs[key] as NixList).push(elem); (attrs.get(key) as NixList).push(elem);
} }
return attrs; return attrs;
}; };
@@ -111,29 +114,26 @@ export const zipAttrsWith =
(list: NixValue): NixValue => { (list: NixValue): NixValue => {
const listForced = forceList(list); const listForced = forceList(list);
// Map to collect all values for each attribute name
const attrMap = new Map<string, NixValue[]>(); const attrMap = new Map<string, NixValue[]>();
// Iterate through each attribute set in the list
for (const item of listForced) { for (const item of listForced) {
const attrs = forceAttrs(item); const attrs = forceAttrs(item);
// Collect all attribute names and their values for (const [key, value] of attrs) {
for (const [key, value] of Object.entries(attrs)) {
if (!attrMap.has(key)) { if (!attrMap.has(key)) {
attrMap.set(key, []); attrMap.set(key, []);
} }
attrMap.get(key)!.push(value); (attrMap.get(key) as NixValue[]).push(value);
} }
} }
// Build the result attribute set const result: NixAttrs = new Map();
const result: Record<string, NixValue> = {};
for (const [name, values] of attrMap.entries()) { for (const [name, values] of attrMap.entries()) {
// Apply f to name and values list result.set(
// f is curried: f name values name,
result[name] = createThunk(() => forceFunction(forceFunction(f)(name))(values)); createThunk(() => forceFunction(forceFunction(f)(name))(values)),
);
} }
return result; return result;
@@ -145,15 +145,15 @@ export const unsafeGetAttrPos =
const name = forceStringValue(attrName); const name = forceStringValue(attrName);
const attrs = forceAttrs(attrSet); const attrs = forceAttrs(attrSet);
if (!(name in attrs)) { if (!attrs.has(name)) {
return null; return null;
} }
const positions = (attrs as any)[Nix.ATTR_POSITIONS]; const positions = attrs[ATTR_POSITIONS];
if (!positions || !(name in positions)) { if (!positions || !positions.has(name)) {
return null; return null;
} }
const span = positions[name]; const span = positions.get(name) as number;
return Nix.mkPos(span); return mkPos(span);
}; };

View File

@@ -1,15 +1,15 @@
import type { NixValue, NixAttrs, NixString } from "../types";
import { isStringWithContext } from "../types";
import { forceString, forceAttrs, forceList, forceStringValue } from "../type-assert";
import { force } from "../thunk";
import { import {
type NixStringContext,
getStringValue,
getStringContext,
mkStringWithContext,
decodeContextElem, decodeContextElem,
getStringContext,
getStringValue,
mkStringWithContext,
type NixStringContext,
parseContextToInfoMap, parseContextToInfoMap,
} from "../string-context"; } from "../string-context";
import { force } from "../thunk";
import { forceAttrs, forceList, forceString, forceStringValue } from "../type-assert";
import type { NixAttrs, NixString, NixValue } from "../types";
import { isStringWithContext } from "../types";
/** /**
* builtins.hasContext - Check if string has context * builtins.hasContext - Check if string has context
@@ -113,20 +113,20 @@ export const getContext = (value: NixValue): NixAttrs => {
const context = getStringContext(s); const context = getStringContext(s);
const infoMap = parseContextToInfoMap(context); const infoMap = parseContextToInfoMap(context);
const result: NixAttrs = {}; const result: NixAttrs = new Map();
for (const [path, info] of infoMap) { for (const [path, info] of infoMap) {
const attrs: NixAttrs = {}; const attrs: NixAttrs = new Map();
if (info.path) { if (info.path) {
attrs["path"] = true; attrs.set("path", true);
} }
if (info.allOutputs) { if (info.allOutputs) {
attrs["allOutputs"] = true; attrs.set("allOutputs", true);
} }
if (info.outputs.length > 0) { if (info.outputs.length > 0) {
attrs["outputs"] = info.outputs; attrs.set("outputs", info.outputs);
} }
result[path] = attrs; result.set(path, attrs);
} }
return result; return result;
@@ -154,22 +154,22 @@ export const appendContext =
const ctxAttrs = forceAttrs(ctxValue); const ctxAttrs = forceAttrs(ctxValue);
const newContext: NixStringContext = new Set(existingContext); const newContext: NixStringContext = new Set(existingContext);
for (const [path, infoVal] of Object.entries(ctxAttrs)) { for (const [path, infoVal] of ctxAttrs) {
if (!path.startsWith("/nix/store/")) { if (!path.startsWith("/nix/store/")) {
throw new Error(`context key '${path}' is not a store path`); throw new Error(`context key '${path}' is not a store path`);
} }
const info = forceAttrs(infoVal); const info = forceAttrs(infoVal as NixValue);
if ("path" in info) { if (info.has("path")) {
const pathVal = force(info["path"]); const pathVal = force(info.get("path") as NixValue);
if (pathVal === true) { if (pathVal === true) {
newContext.add(path); newContext.add(path);
} }
} }
if ("allOutputs" in info) { if (info.has("allOutputs")) {
const allOutputs = force(info["allOutputs"]); const allOutputs = force(info.get("allOutputs") as NixValue);
if (allOutputs === true) { if (allOutputs === true) {
if (!path.endsWith(".drv")) { if (!path.endsWith(".drv")) {
throw new Error( throw new Error(
@@ -180,8 +180,8 @@ export const appendContext =
} }
} }
if ("outputs" in info) { if (info.has("outputs")) {
const outputs = forceList(info["outputs"]); const outputs = forceList(info.get("outputs") as NixValue);
if (outputs.length > 0 && !path.endsWith(".drv")) { if (outputs.length > 0 && !path.endsWith(".drv")) {
throw new Error( throw new Error(
`tried to add derivation output context of ${path}, which is not a derivation, to a string`, `tried to add derivation output context of ${path}, which is not a derivation, to a string`,

View File

@@ -2,12 +2,16 @@
* Conversion and serialization builtin functions * Conversion and serialization builtin functions
*/ */
import type { NixString, NixValue } from "../types"; import {
import { isStringWithContext, isNixPath } from "../types"; addBuiltContext,
import { force } from "../thunk"; mkStringWithContext,
import { type NixStringContext, mkStringWithContext, addBuiltContext } from "../string-context"; type NixStringContext,
StringWithContext,
} from "../string-context";
import { force, isThunk } from "../thunk";
import { forceFunction, forceStringNoCtx } from "../type-assert"; import { forceFunction, forceStringNoCtx } from "../type-assert";
import { nixValueToJson } from "../conversion"; import type { NixString, NixValue } from "../types";
import { isNixPath, isStringWithContext, NixPath } from "../types";
import { isAttrs, isPath, typeOf } from "./type-check"; import { isAttrs, isPath, typeOf } from "./type-check";
export const fromJSON = (e: NixValue): NixValue => { export const fromJSON = (e: NixValue): NixValue => {
@@ -16,12 +20,12 @@ export const fromJSON = (e: NixValue): NixValue => {
throw new TypeError(`builtins.fromJSON: expected a string, got ${typeOf(str)}`); throw new TypeError(`builtins.fromJSON: expected a string, got ${typeOf(str)}`);
} }
const jsonStr = isStringWithContext(str) ? str.value : str; const jsonStr = isStringWithContext(str) ? str.value : str;
return Deno.core.ops.op_from_json(jsonStr); return Deno.core.ops.op_from_json(jsonStr) as NixValue;
}; };
export const fromTOML = (e: NixValue): NixValue => { export const fromTOML = (e: NixValue): NixValue => {
const toml = forceStringNoCtx(e); const toml = forceStringNoCtx(e);
return Deno.core.ops.op_from_toml(toml); return Deno.core.ops.op_from_toml(toml) as NixValue;
}; };
export const toJSON = (e: NixValue): NixString => { export const toJSON = (e: NixValue): NixString => {
@@ -33,8 +37,12 @@ export const toJSON = (e: NixValue): NixString => {
return mkStringWithContext(string, context); return mkStringWithContext(string, context);
}; };
export const toXML = (e: NixValue): never => { export const toXML = (e: NixValue): NixString => {
throw new Error("Not implemented: toXML"); const [xml, context] = Deno.core.ops.op_to_xml(force(e));
if (context.length === 0) {
return xml;
}
return mkStringWithContext(xml, new Set(context));
}; };
/** /**
@@ -87,7 +95,7 @@ export const coerceToString = (
value: NixValue, value: NixValue,
mode: StringCoercionMode, mode: StringCoercionMode,
copyToStore: boolean = false, copyToStore: boolean = false,
outContext?: NixStringContext, outContext: NixStringContext,
): string => { ): string => {
const v = force(value); const v = force(value);
@@ -97,10 +105,8 @@ export const coerceToString = (
} }
if (isStringWithContext(v)) { if (isStringWithContext(v)) {
if (outContext) { for (const elem of v.context) {
for (const elem of v.context) { outContext.add(elem);
outContext.add(elem);
}
} }
return v.value; return v.value;
} }
@@ -110,46 +116,38 @@ export const coerceToString = (
if (copyToStore) { if (copyToStore) {
const pathStr = v.value; const pathStr = v.value;
const storePath = Deno.core.ops.op_copy_path_to_store(pathStr); const storePath = Deno.core.ops.op_copy_path_to_store(pathStr);
if (outContext) { outContext.add(storePath);
outContext.add(storePath);
}
return storePath; return storePath;
} }
return v.value; return v.value;
} }
if (typeof v === "object" && v !== null && !Array.isArray(v)) { if (typeof v === "object" && v !== null && !Array.isArray(v)) {
// First, try the __toString method if present if (v instanceof Map) {
// This allows custom types to define their own string representation if (v.has("__toString")) {
if ("__toString" in v) { const toStringMethod = forceFunction(v.get("__toString") as NixValue);
// Force the method in case it's a thunk const result = force(toStringMethod(v));
const toStringMethod = forceFunction(v.__toString); return coerceToString(result, mode, copyToStore, outContext);
const result = force(toStringMethod(v)); }
// Recursively coerceToString
return coerceToString(result, mode, copyToStore, outContext); if (v.has("outPath")) {
} const outPath = coerceToString(v.get("outPath") as NixValue, mode, copyToStore, outContext);
if (v.has("type") && v.get("type") === "derivation" && v.has("drvPath") && outContext) {
// If no __toString, try outPath (used for derivations and store paths) const drvPathValue = force(v.get("drvPath") as NixValue);
// This allows derivation objects like { outPath = "/nix/store/..."; } to be coerced const drvPathStr = isStringWithContext(drvPathValue)
if ("outPath" in v) { ? drvPathValue.value
// Recursively coerce the outPath value : typeof drvPathValue === "string"
const outPath = coerceToString(v.outPath, mode, copyToStore, outContext); ? drvPathValue
if ("type" in v && v.type === "derivation" && "drvPath" in v && outContext) { : null;
const drvPathValue = force(v.drvPath); if (drvPathStr) {
const drvPathStr = isStringWithContext(drvPathValue) const outputName = v.has("outputName") ? String(force(v.get("outputName") as NixValue)) : "out";
? drvPathValue.value addBuiltContext(outContext, drvPathStr, outputName);
: typeof drvPathValue === "string" }
? drvPathValue }
: null; return outPath;
if (drvPathStr) {
const outputName = "outputName" in v ? String(force(v.outputName)) : "out";
addBuiltContext(outContext, drvPathStr, outputName);
}
} }
return outPath;
} }
// Attribute sets without __toString or outPath cannot be coerced
throw new TypeError(`cannot coerce ${typeOf(v)} to a string`); throw new TypeError(`cannot coerce ${typeOf(v)} to a string`);
} }
@@ -254,14 +252,14 @@ export const coerceToStringWithContext = (
* - Returns the path string (not a NixPath object) * - Returns the path string (not a NixPath object)
* - Preserves string context if present * - Preserves string context if present
*/ */
export const coerceToPath = (value: NixValue, outContext?: NixStringContext): string => { export const coerceToPath = (value: NixValue, outContext: NixStringContext): string => {
const forced = force(value); const forced = force(value);
if (isPath(forced)) { if (isPath(forced)) {
return forced.value; return forced.value;
} }
if (isAttrs(forced) && Object.hasOwn(forced, "__toString")) { if (isAttrs(forced) && forced.has("__toString")) {
const toStringFunc = forceFunction(forced.__toString); const toStringFunc = forceFunction(forced.get("__toString") as NixValue);
return coerceToPath(toStringFunc(forced), outContext); return coerceToPath(toStringFunc(forced), outContext);
} }
@@ -290,3 +288,84 @@ export const coerceToPath = (value: NixValue, outContext?: NixStringContext): st
export const toStringFunc = (value: NixValue): NixString => { export const toStringFunc = (value: NixValue): NixString => {
return coerceToStringWithContext(value, StringCoercionMode.ToString, false); return coerceToStringWithContext(value, StringCoercionMode.ToString, false);
}; };
export type JsonValue = number | boolean | string | null | { [key: string]: JsonValue } | Array<JsonValue>;
export const nixValueToJson = (
value: NixValue,
strict: boolean,
outContext: NixStringContext,
copyToStore: boolean,
seen: Set<NixValue> = new Set(),
): JsonValue => {
const v = strict ? force(value) : value;
if (isThunk(v) || typeof v === "function")
throw new Error(`cannot convert ${isThunk(v) ? "thunk" : "lambda"} to JSON`);
if (v === null) return null;
if (typeof v === "bigint") {
const num = Number(v);
if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {
console.warn(`integer ${v} exceeds safe range, precision may be lost`);
}
return num;
}
if (typeof v === "number") return v;
if (typeof v === "boolean") return v;
if (typeof v === "string") return v;
if (v instanceof StringWithContext) {
for (const elem of v.context) {
outContext.add(elem);
}
return v.value;
}
if (v instanceof NixPath) {
if (copyToStore) {
const storePath = Deno.core.ops.op_copy_path_to_store(v.value);
outContext.add(storePath);
return storePath;
} else {
return v.value;
}
}
// FIXME: is this check necessary?
// if (seen.has(v)) {
// throw new Error("cycle detected in toJSON");
// } else {
// seen.add(v)
// }
if (Array.isArray(v)) {
return v.map((item) => nixValueToJson(item, strict, outContext, copyToStore, seen));
}
if (v instanceof Map) {
if (v.has("__toString") && typeof force(v.get("__toString") as NixValue) === "function") {
const toStringMethod = force(v.get("__toString") as NixValue) as (self: typeof v) => NixValue;
const result = force(toStringMethod(v));
if (typeof result === "string") {
return result;
}
if (isStringWithContext(result)) {
for (const elem of result.context) {
outContext.add(elem);
}
return result.value;
}
return nixValueToJson(result, strict, outContext, copyToStore, seen);
}
if (v.has("outPath")) {
return nixValueToJson(v.get("outPath") as NixValue, strict, outContext, copyToStore, seen);
}
const result: { [key: string]: JsonValue } = {};
const keys = Array.from(v.keys()).sort();
for (const key of keys) {
result[key] = nixValueToJson(v.get(key) as NixValue, strict, outContext, copyToStore, seen);
}
return result;
}
throw new Error(`cannot convert ${typeof v} to JSON`);
};

View File

@@ -1,45 +1,125 @@
import type { NixValue, NixAttrs } from "../types";
import { forceStringValue, forceList, forceStringNoCtx } from "../type-assert";
import { force } from "../thunk";
import { import {
type DerivationData,
type OutputInfo,
generateAterm,
generateAtermModulo,
} from "../derivation-helpers";
import { coerceToString, StringCoercionMode } from "./conversion";
import {
type NixStringContext,
extractInputDrvsAndSrcs,
isStringWithContext,
mkStringWithContext,
addDrvDeepContext,
addBuiltContext, addBuiltContext,
addDrvDeepContext,
mkStringWithContext,
type NixStringContext,
} from "../string-context"; } from "../string-context";
import { nixValueToJson } from "../conversion"; import { force } from "../thunk";
import { isNixPath } from "../types"; import { forceAttrs, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
import type { NixAttrs, NixValue } from "../types";
import { coerceToString, type JsonValue, nixValueToJson, StringCoercionMode } from "./conversion";
const drvHashCache = new Map<string, string>(); export interface OutputInfo {
path: string;
hashAlgo: string;
hash: string;
}
const forceAttrs = (value: NixValue): NixAttrs => { export interface DerivationData {
const forced = force(value); name: string;
if ( outputs: Map<string, OutputInfo>;
typeof forced !== "object" || inputDrvs: Map<string, Set<string>>;
forced === null || inputSrcs: Set<string>;
Array.isArray(forced) || platform: string;
isStringWithContext(forced) || builder: string;
isNixPath(forced) args: string[];
) { env: Map<string, string>;
throw new TypeError(`Expected attribute set for derivation, got ${typeof forced}`); }
export const escapeString = (s: string): string => {
let result = "";
for (const char of s) {
switch (char) {
case '"':
result += '\\"';
break;
case "\\":
result += "\\\\";
break;
case "\n":
result += "\\n";
break;
case "\r":
result += "\\r";
break;
case "\t":
result += "\\t";
break;
default:
result += char;
}
} }
return forced; return `"${result}"`;
}; };
const quoteString = (s: string): string => `"${s}"`;
const cmpByKey = <T>(a: [string, T], b: [string, T]): number => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0);
export const generateAterm = (drv: DerivationData): string => {
const outputEntries: string[] = [];
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
for (const [name, info] of sortedOutputs) {
outputEntries.push(
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
);
}
const outputs = outputEntries.join(",");
const inputDrvEntries: string[] = [];
const sortedInputDrvs = Array.from(drv.inputDrvs.entries()).sort(cmpByKey);
for (const [drvPath, outputs] of sortedInputDrvs) {
const sortedOuts = Array.from(outputs).sort();
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
inputDrvEntries.push(`(${quoteString(drvPath)},${outList})`);
}
const inputDrvs = inputDrvEntries.join(",");
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
const args = drv.args.map(escapeString).join(",");
const envs = Array.from(drv.env.entries())
.sort(cmpByKey)
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
};
export const generateAtermModulo = (drv: DerivationData, inputDrvHashes: Map<string, string>): string => {
const outputEntries: string[] = [];
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
for (const [name, info] of sortedOutputs) {
outputEntries.push(
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
);
}
const outputs = outputEntries.join(",");
const inputDrvEntries: string[] = [];
const sortedInputDrvHashes = Array.from(inputDrvHashes.entries()).sort(cmpByKey);
for (const [drvHash, outputs] of sortedInputDrvHashes) {
const sortedOuts = outputs.split(",").sort();
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
inputDrvEntries.push(`(${quoteString(drvHash)},${outList})`);
}
const inputDrvs = inputDrvEntries.join(",");
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
const args = drv.args.map(escapeString).join(",");
const envs = Array.from(drv.env.entries())
.sort(cmpByKey)
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
};
const validateName = (attrs: NixAttrs): string => { const validateName = (attrs: NixAttrs): string => {
if (!("name" in attrs)) { if (!attrs.has("name")) {
throw new Error("derivation: missing required attribute 'name'"); throw new Error("derivation: missing required attribute 'name'");
} }
const name = forceStringValue(attrs.name); const name = forceStringValue(attrs.get("name") as NixValue);
if (!name) { if (!name) {
throw new Error("derivation: 'name' cannot be empty"); throw new Error("derivation: 'name' cannot be empty");
} }
@@ -50,17 +130,17 @@ const validateName = (attrs: NixAttrs): string => {
}; };
const validateBuilder = (attrs: NixAttrs, outContext: NixStringContext): string => { const validateBuilder = (attrs: NixAttrs, outContext: NixStringContext): string => {
if (!("builder" in attrs)) { if (!attrs.has("builder")) {
throw new Error("derivation: missing required attribute 'builder'"); throw new Error("derivation: missing required attribute 'builder'");
} }
return coerceToString(attrs.builder, StringCoercionMode.ToString, true, outContext); return coerceToString(attrs.get("builder") as NixValue, StringCoercionMode.ToString, true, outContext);
}; };
const validateSystem = (attrs: NixAttrs): string => { const validateSystem = (attrs: NixAttrs): string => {
if (!("system" in attrs)) { if (!attrs.has("system")) {
throw new Error("derivation: missing required attribute 'system'"); throw new Error("derivation: missing required attribute 'system'");
} }
return forceStringValue(attrs.system); return forceStringValue(attrs.get("system") as NixValue);
}; };
const validateOutputs = (outputs: string[]): void => { const validateOutputs = (outputs: string[]): void => {
@@ -82,17 +162,25 @@ const validateOutputs = (outputs: string[]): void => {
}; };
const extractOutputs = (attrs: NixAttrs, structuredAttrs: boolean): string[] => { const extractOutputs = (attrs: NixAttrs, structuredAttrs: boolean): string[] => {
if (!("outputs" in attrs)) { if (!attrs.has("outputs")) {
return ["out"]; return ["out"];
} }
let outputs: string[]; let outputs: string[];
if (structuredAttrs) { if (structuredAttrs) {
const outputsList = forceList(attrs.outputs); const outputsList = forceList(attrs.get("outputs") as NixValue);
outputs = outputsList.map((o) => forceStringValue(o)); outputs = outputsList.map((o) => forceStringValue(o));
} else { } else {
const outputsStr = coerceToString(attrs.outputs, StringCoercionMode.ToString, false, new Set()); const outputsStr = coerceToString(
outputs = outputsStr.split(/\s+/).filter((s) => s.length > 0); attrs.get("outputs") as NixValue,
StringCoercionMode.ToString,
false,
new Set(),
);
outputs = outputsStr
.trim()
.split(/\s+/)
.filter((s) => s.length > 0);
} }
validateOutputs(outputs); validateOutputs(outputs);
@@ -100,20 +188,13 @@ const extractOutputs = (attrs: NixAttrs, structuredAttrs: boolean): string[] =>
}; };
const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] => { const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] => {
if (!("args" in attrs)) { if (!attrs.has("args")) {
return []; return [];
} }
const argsList = forceList(attrs.args); const argsList = forceList(attrs.get("args") as NixValue);
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext)); return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext));
}; };
const outputPathName = (drvName: string, output: string) => {
if (output === "out") {
return drvName
}
return `${drvName}-${output}`
}
const structuredAttrsExcludedKeys = new Set([ const structuredAttrsExcludedKeys = new Set([
"__structuredAttrs", "__structuredAttrs",
"__ignoreNulls", "__ignoreNulls",
@@ -124,9 +205,9 @@ const structuredAttrsExcludedKeys = new Set([
const specialAttrs = new Set(["args", "__ignoreNulls", "__contentAddressed", "__impure"]); const specialAttrs = new Set(["args", "__ignoreNulls", "__contentAddressed", "__impure"]);
const sortedJsonStringify = (obj: Record<string, any>): string => { const sortedJsonStringify = (obj: Record<string, JsonValue>): string => {
const sortedKeys = Object.keys(obj).sort(); const sortedKeys = Object.keys(obj).sort();
const sortedObj: Record<string, any> = {}; const sortedObj: Record<string, JsonValue> = {};
for (const key of sortedKeys) { for (const key of sortedKeys) {
sortedObj[key] = obj[key]; sortedObj[key] = obj[key];
} }
@@ -143,8 +224,8 @@ const extractEnv = (
const env = new Map<string, string>(); const env = new Map<string, string>();
if (structuredAttrs) { if (structuredAttrs) {
const jsonAttrs: Record<string, any> = {}; const jsonAttrs: Record<string, JsonValue> = {};
for (const [key, value] of Object.entries(attrs)) { for (const [key, value] of attrs) {
if (!structuredAttrsExcludedKeys.has(key)) { if (!structuredAttrsExcludedKeys.has(key)) {
const forcedValue = force(value); const forcedValue = force(value);
if (ignoreNulls && forcedValue === null) { if (ignoreNulls && forcedValue === null) {
@@ -198,13 +279,13 @@ const extractEnv = (
} }
env.set("__json", sortedJsonStringify(jsonAttrs)); env.set("__json", sortedJsonStringify(jsonAttrs));
} else { } else {
for (const [key, value] of Object.entries(attrs)) { for (const [key, value] of attrs) {
if (!specialAttrs.has(key)) { if (!specialAttrs.has(key)) {
const forcedValue = force(value); const forcedValue = force(value as NixValue);
if (ignoreNulls && forcedValue === null) { if (ignoreNulls && forcedValue === null) {
continue; continue;
} }
env.set(key, coerceToString(value, StringCoercionMode.ToString, true, outContext)); env.set(key, coerceToString(value as NixValue, StringCoercionMode.ToString, true, outContext));
} }
} }
} }
@@ -219,29 +300,29 @@ interface FixedOutputInfo {
} }
const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOutputInfo | null => { const extractFixedOutputInfo = (attrs: NixAttrs, ignoreNulls: boolean): FixedOutputInfo | null => {
if (!("outputHash" in attrs)) { if (!attrs.has("outputHash")) {
return null; return null;
} }
const hashValue = force(attrs.outputHash); const hashValue = force(attrs.get("outputHash") as NixValue);
if (ignoreNulls && hashValue === null) { if (ignoreNulls && hashValue === null) {
return null; return null;
} }
const hashRaw = forceStringNoCtx(attrs.outputHash); const hashRaw = forceStringNoCtx(hashValue);
let hashAlgo = null; let hashAlgo = null;
if ("outputHashAlgo" in attrs) { if (attrs.has("outputHashAlgo")) {
const algoValue = force(attrs.outputHashAlgo); const algoValue = force(attrs.get("outputHashAlgo") as NixValue);
if (!(ignoreNulls && algoValue === null)) { if (!(ignoreNulls && algoValue === null)) {
hashAlgo = forceStringNoCtx(attrs.outputHashAlgo); hashAlgo = forceStringNoCtx(algoValue);
} }
} }
let hashMode = "flat"; let hashMode = "flat";
if ("outputHashMode" in attrs) { if (attrs.has("outputHashMode")) {
const modeValue = force(attrs.outputHashMode); const modeValue = force(attrs.get("outputHashMode") as NixValue);
if (!(ignoreNulls && modeValue === null)) { if (!(ignoreNulls && modeValue === null)) {
hashMode = forceStringValue(attrs.outputHashMode); hashMode = forceStringValue(modeValue);
} }
} }
@@ -268,157 +349,60 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const builder = validateBuilder(attrs, collectedContext); const builder = validateBuilder(attrs, collectedContext);
const platform = validateSystem(attrs); const platform = validateSystem(attrs);
const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false; const structuredAttrs = attrs.has("__structuredAttrs")
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false; ? force(attrs.get("__structuredAttrs") as NixValue) === true
: false;
const ignoreNulls = attrs.has("__ignoreNulls")
? force(attrs.get("__ignoreNulls") as NixValue) === true
: false;
const outputs = extractOutputs(attrs, structuredAttrs); const outputs = extractOutputs(attrs, structuredAttrs);
const fixedOutputInfo = extractFixedOutputInfo(attrs, ignoreNulls); const fixedOutputInfo = extractFixedOutputInfo(attrs, ignoreNulls);
validateFixedOutputConstraints(fixedOutputInfo, outputs); validateFixedOutputConstraints(fixedOutputInfo, outputs);
if ("__contentAddressed" in attrs && force(attrs.__contentAddressed) === true) { if (attrs.has("__contentAddressed") && force(attrs.get("__contentAddressed") as NixValue) === true) {
throw new Error("ca derivations are not supported"); throw new Error("ca derivations are not supported");
} }
if ("__impure" in attrs && force(attrs.__impure) === true) { if (attrs.has("__impure") && force(attrs.get("__impure") as NixValue) === true) {
throw new Error("impure derivations are not supported"); throw new Error("impure derivations are not supported");
} }
const drvArgs = extractArgs(attrs, collectedContext); const drvArgs = extractArgs(attrs, collectedContext);
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName); const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName);
const { inputDrvs, inputSrcs } = extractInputDrvsAndSrcs(collectedContext); const envEntries: [string, string][] = Array.from(env.entries());
const contextArray: string[] = Array.from(collectedContext);
const collectDrvReferences = (): string[] => { const rustResult: {
const refs = new Set<string>(); drvPath: string;
for (const src of inputSrcs) { outputs: [string, string][];
refs.add(src); } = Deno.core.ops.op_finalize_derivation(
} drvName,
for (const drvPath of inputDrvs.keys()) { builder,
refs.add(drvPath); platform,
} outputs,
return Array.from(refs).sort(); drvArgs,
}; envEntries,
contextArray,
fixedOutputInfo,
);
let outputInfos: Map<string, OutputInfo>; const result: NixAttrs = new Map();
let drvPath: string;
if (fixedOutputInfo) {
const pathName = outputPathName(drvName, "out");
const outPath = Deno.core.ops.op_make_fixed_output_path(
fixedOutputInfo.hashAlgo,
fixedOutputInfo.hash,
fixedOutputInfo.hashMode,
pathName,
);
const hashAlgoPrefix = fixedOutputInfo.hashMode === "recursive" ? "r:" : "";
outputInfos = new Map([
[
"out",
{
path: outPath,
hashAlgo: hashAlgoPrefix + fixedOutputInfo.hashAlgo,
hash: fixedOutputInfo.hash,
},
],
]);
env.set("out", outPath);
const finalDrv: DerivationData = {
name: drvName,
outputs: outputInfos,
inputDrvs,
inputSrcs,
platform,
builder,
args: drvArgs,
env,
};
const finalAterm = generateAterm(finalDrv);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const fixedHashFingerprint = `fixed:out:${hashAlgoPrefix}${fixedOutputInfo.hashAlgo}:${fixedOutputInfo.hash}:${outPath}`;
const fixedModuloHash = Deno.core.ops.op_sha256_hex(fixedHashFingerprint);
drvHashCache.set(drvPath, fixedModuloHash);
} else {
const maskedOutputs = new Map<string, OutputInfo>(
outputs.map((o) => [
o,
{
path: "",
hashAlgo: "",
hash: "",
},
]),
);
const maskedEnv = new Map(env);
for (const output of outputs) {
maskedEnv.set(output, "");
}
const maskedDrv: DerivationData = {
name: drvName,
outputs: maskedOutputs,
inputDrvs,
inputSrcs,
platform,
builder,
args: drvArgs,
env: maskedEnv,
};
const inputDrvHashes = new Map<string, string>();
for (const [drvPath, outputNames] of inputDrvs) {
const cachedHash = drvHashCache.get(drvPath);
if (!cachedHash) {
throw new Error(`Missing modulo hash for input derivation: ${drvPath}`);
}
inputDrvHashes.set(cachedHash, Array.from(outputNames).join(","));
}
const maskedAterm = generateAtermModulo(maskedDrv, inputDrvHashes);
const drvModuloHash = Deno.core.ops.op_sha256_hex(maskedAterm);
outputInfos = new Map<string, OutputInfo>();
for (const outputName of outputs) {
const pathName = outputPathName(drvName, outputName);
const outPath = Deno.core.ops.op_make_store_path(`output:${outputName}`, drvModuloHash, pathName);
outputInfos.set(outputName, {
path: outPath,
hashAlgo: "",
hash: "",
});
env.set(outputName, outPath);
}
const finalDrv: DerivationData = {
...maskedDrv,
outputs: outputInfos,
env,
};
const finalAterm = generateAterm(finalDrv);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const finalAtermModulo = generateAtermModulo(finalDrv, inputDrvHashes);
const cachedModuloHash = Deno.core.ops.op_sha256_hex(finalAtermModulo);
drvHashCache.set(drvPath, cachedModuloHash);
}
const result: NixAttrs = {};
const drvPathContext = new Set<string>(); const drvPathContext = new Set<string>();
addDrvDeepContext(drvPathContext, drvPath); addDrvDeepContext(drvPathContext, rustResult.drvPath);
result.drvPath = mkStringWithContext(drvPath, drvPathContext); result.set("drvPath", mkStringWithContext(rustResult.drvPath, drvPathContext));
for (const [outputName, outputInfo] of outputInfos.entries()) { for (const [outputName, outputPath] of rustResult.outputs) {
const outputContext = new Set<string>(); const outputContext = new Set<string>();
addBuiltContext(outputContext, drvPath, outputName); addBuiltContext(outputContext, rustResult.drvPath, outputName);
result[outputName] = mkStringWithContext(outputInfo.path, outputContext); result.set(outputName, mkStringWithContext(outputPath, outputContext));
} }
return result; return result;
}; };
export const derivation = (_: NixValue): NixAttrs => { export const derivationStub = (_: NixValue): NixAttrs => {
throw new Error("unreachable: placeholder derivation implementation called") throw new Error("unreachable: stub derivation implementation called");
}; };

View File

@@ -0,0 +1,17 @@
import type { NixValue } from "../types";
export const getFlake = (_attrs: NixValue): never => {
throw new Error("Not implemented: getFlake");
};
export const parseFlakeName = (_s: NixValue): never => {
throw new Error("Not implemented: parseFlakeName");
};
export const parseFlakeRef = (_s: NixValue): never => {
throw new Error("Not implemented: parseFlakeRef");
};
export const flakeRefToString = (_attrs: NixValue): never => {
throw new Error("Not implemented: flakeRefToString");
};

View File

@@ -1,11 +1,7 @@
/**
* Functional programming builtin functions
*/
import { CatchableError, type NixValue } from "../types";
import { force } from "../thunk";
import { coerceToString, StringCoercionMode } from "./conversion";
import { printValue } from "../print"; import { printValue } from "../print";
import { force } from "../thunk";
import { CatchableError, type NixValue } from "../types";
import { coerceToString, StringCoercionMode } from "./conversion";
import { isAttrs } from "./type-check"; import { isAttrs } from "./type-check";
export const seq = export const seq =
@@ -31,7 +27,7 @@ export const deepSeq =
recurse(val); recurse(val);
} }
} else if (isAttrs(forced)) { } else if (isAttrs(forced)) {
for (const [_, val] of Object.entries(forced)) { for (const [_, val] of forced.entries()) {
recurse(val); recurse(val);
} }
} }
@@ -45,7 +41,7 @@ export const abort = (s: NixValue): never => {
}; };
export const throwFunc = (s: NixValue): never => { export const throwFunc = (s: NixValue): never => {
throw new CatchableError(coerceToString(s, StringCoercionMode.Base)); throw new CatchableError(coerceToString(s, StringCoercionMode.Base, false, new Set()));
}; };
export const trace = export const trace =

View File

@@ -0,0 +1,34 @@
import { select } from "../helpers";
import { forceAttrs, forceStringNoCtx, forceStringValue } from "../type-assert";
import type { NixValue } from "../types";
import { realisePath } from "./io";
export const hashFile =
(type: NixValue) =>
(p: NixValue): string => {
const algo = forceStringNoCtx(type);
const pathStr = realisePath(p);
return Deno.core.ops.op_hash_file(algo, pathStr);
};
export const hashString =
(type: NixValue) =>
(s: NixValue): string => {
const algo = forceStringNoCtx(type);
const data = forceStringValue(s);
return Deno.core.ops.op_hash_string(algo, data);
};
export const convertHash = (args: NixValue): string => {
const attrs = forceAttrs(args);
const hash = forceStringNoCtx(select(attrs, ["hash"]));
let hashAlgo: string | null = null;
if (attrs.has("hashAlgo")) {
hashAlgo = forceStringNoCtx(select(attrs, ["hashAlgo"]));
}
const toHashFormat = forceStringNoCtx(select(attrs, ["toHashFormat"]));
return Deno.core.ops.op_convert_hash(hash, hashAlgo, toHashFormat);
};

View File

@@ -1,85 +1,54 @@
/** import { createThunk, force } from "../thunk";
* Main builtins export import type { NixAttrs, NixFunction, NixValue } from "../types";
* Combines all builtin function categories into the global `builtins` object
*/
// Import all builtin categories
import * as arithmetic from "./arithmetic"; import * as arithmetic from "./arithmetic";
import * as math from "./math";
import * as typeCheck from "./type-check";
import * as list from "./list";
import * as attrs from "./attrs"; import * as attrs from "./attrs";
import * as string from "./string";
import * as pathOps from "./path";
import * as functional from "./functional";
import * as io from "./io";
import * as conversion from "./conversion"; import * as conversion from "./conversion";
import * as misc from "./misc";
import * as derivation from "./derivation"; import * as derivation from "./derivation";
import * as flake from "./flake";
import * as functional from "./functional";
import * as hash from "./hash";
import * as io from "./io";
import * as list from "./list";
import * as math from "./math";
import * as misc from "./misc";
import * as pathOps from "./path";
import * as string from "./string";
import * as typeCheck from "./type-check";
import type { NixValue } from "../types";
import { createThunk, force, isThunk } from "../thunk";
import { getTos } from "../helpers";
/**
* Symbol used to mark functions as primops (primitive operations)
* This is similar to IS_THUNK but for builtin functions
*/
export const PRIMOP_METADATA = Symbol("primop_metadata"); export const PRIMOP_METADATA = Symbol("primop_metadata");
/**
* Metadata interface for primop functions
*/
export interface PrimopMetadata { export interface PrimopMetadata {
/** The name of the primop (e.g., "add", "map") */
name: string; name: string;
/** Total arity of the function (number of arguments it expects) */
arity: number; arity: number;
/** Number of arguments already applied (for partial applications) */
applied: number; applied: number;
} }
/**
* Mark a function as a primop with metadata
* For curried functions, this recursively marks each layer
*
* @param func - The function to mark
* @param name - Name of the primop
* @param arity - Total number of arguments expected
* @param applied - Number of arguments already applied (default: 0)
* @returns The marked function
*/
export const mkPrimop = ( export const mkPrimop = (
func: (...args: NixValue[]) => NixValue, func: NixFunction,
name: string, name: string,
arity: number, arity: number,
applied: number = 0, applied: number = 0,
): Function => { ): ((...args: NixValue[]) => NixValue) => {
// Mark this function as a primop func[PRIMOP_METADATA] = {
(func as any)[PRIMOP_METADATA] = {
name, name,
arity, arity,
applied, applied,
} satisfies PrimopMetadata; } satisfies PrimopMetadata;
// If this is a curried function and not fully applied,
// wrap it to mark the next layer too
if (applied < arity - 1) { if (applied < arity - 1) {
const wrappedFunc = ((...args: NixValue[]) => { const wrappedFunc: NixFunction = ((arg: NixValue) => {
const result = func(...args); const result = func(arg);
// If result is a function, mark it as the next layer
if (typeof result === "function") { if (typeof result === "function") {
return mkPrimop(result, name, arity, applied + args.length); return mkPrimop(result, name, arity, applied + 1);
} }
return result; return result;
}) as any; });
// Copy the primop metadata to the wrapper
wrappedFunc[PRIMOP_METADATA] = { wrappedFunc[PRIMOP_METADATA] = {
name, name,
arity, arity,
applied, applied,
} satisfies PrimopMetadata; };
return wrappedFunc; return wrappedFunc;
} }
@@ -87,12 +56,9 @@ export const mkPrimop = (
return func; return func;
}; };
/** export const isPrimop = (
* Type guard to check if a value is a primop value: NixValue,
* @param value - Value to check ): value is NixFunction & { [PRIMOP_METADATA]: PrimopMetadata } => {
* @returns true if value is marked as a primop
*/
export const is_primop = (value: unknown): value is Function & { [PRIMOP_METADATA]: PrimopMetadata } => {
return ( return (
typeof value === "function" && typeof value === "function" &&
PRIMOP_METADATA in value && PRIMOP_METADATA in value &&
@@ -101,172 +67,158 @@ export const is_primop = (value: unknown): value is Function & { [PRIMOP_METADAT
); );
}; };
/** export const getPrimopMetadata = (func: NixValue): PrimopMetadata | undefined => {
* Get primop metadata from a function if (isPrimop(func)) {
* @param func - Function to get metadata from
* @returns Metadata if function is a primop, undefined otherwise
*/
export const get_primop_metadata = (func: unknown): PrimopMetadata | undefined => {
if (is_primop(func)) {
return func[PRIMOP_METADATA]; return func[PRIMOP_METADATA];
} }
return undefined; return undefined;
}; };
/** export const builtins: NixAttrs = new Map<string, NixValue>(
* The global builtins object Object.entries({
* Contains 80+ Nix builtin functions plus metadata add: mkPrimop(arithmetic.add, "add", 2),
* sub: mkPrimop(arithmetic.sub, "sub", 2),
* All functions are curried for Nix semantics: mul: mkPrimop(arithmetic.mul, "mul", 2),
* - Single argument functions: (a) => result div: mkPrimop(arithmetic.div, "div", 2),
* - Multi-argument functions: (a) => (b) => result bitAnd: mkPrimop(arithmetic.bitAnd, "bitAnd", 2),
* bitOr: mkPrimop(arithmetic.bitOr, "bitOr", 2),
* All primop functions are marked with PRIMOP_METADATA symbol for runtime introspection bitXor: mkPrimop(arithmetic.bitXor, "bitXor", 2),
*/ lessThan: mkPrimop(arithmetic.lessThan, "lessThan", 2),
export const builtins: any = {
add: mkPrimop(arithmetic.add, "add", 2),
sub: mkPrimop(arithmetic.sub, "sub", 2),
mul: mkPrimop(arithmetic.mul, "mul", 2),
div: mkPrimop(arithmetic.div, "div", 2),
bitAnd: mkPrimop(arithmetic.bitAnd, "bitAnd", 2),
bitOr: mkPrimop(arithmetic.bitOr, "bitOr", 2),
bitXor: mkPrimop(arithmetic.bitXor, "bitXor", 2),
lessThan: mkPrimop(arithmetic.lessThan, "lessThan", 2),
ceil: mkPrimop(math.ceil, "ceil", 1), ceil: mkPrimop(math.ceil, "ceil", 1),
floor: mkPrimop(math.floor, "floor", 1), floor: mkPrimop(math.floor, "floor", 1),
isAttrs: mkPrimop((e: NixValue) => typeCheck.isAttrs(force(e)), "isAttrs", 1), isAttrs: mkPrimop((e: NixValue) => typeCheck.isAttrs(force(e)), "isAttrs", 1),
isBool: mkPrimop((e: NixValue) => typeCheck.isBool(force(e)), "isBool", 1), isBool: mkPrimop((e: NixValue) => typeCheck.isBool(force(e)), "isBool", 1),
isFloat: mkPrimop((e: NixValue) => typeCheck.isFloat(force(e)), "isFloat", 1), isFloat: mkPrimop((e: NixValue) => typeCheck.isFloat(force(e)), "isFloat", 1),
isFunction: mkPrimop((e: NixValue) => typeCheck.isFunction(force(e)), "isFunction", 1), isFunction: mkPrimop((e: NixValue) => typeCheck.isFunction(force(e)), "isFunction", 1),
isInt: mkPrimop((e: NixValue) => typeCheck.isInt(force(e)), "isInt", 1), isInt: mkPrimop((e: NixValue) => typeCheck.isInt(force(e)), "isInt", 1),
isList: mkPrimop((e: NixValue) => typeCheck.isList(force(e)), "isList", 1), isList: mkPrimop((e: NixValue) => typeCheck.isList(force(e)), "isList", 1),
isNull: mkPrimop((e: NixValue) => typeCheck.isNull(force(e)), "isNull", 1), isNull: mkPrimop((e: NixValue) => typeCheck.isNull(force(e)), "isNull", 1),
isPath: mkPrimop((e: NixValue) => typeCheck.isPath(force(e)), "isPath", 1), isPath: mkPrimop((e: NixValue) => typeCheck.isPath(force(e)), "isPath", 1),
isString: mkPrimop((e: NixValue) => typeCheck.isString(force(e)), "isString", 1), isString: mkPrimop((e: NixValue) => typeCheck.isString(force(e)), "isString", 1),
typeOf: mkPrimop((e: NixValue) => typeCheck.typeOf(force(e)), "typeOf", 1), typeOf: mkPrimop((e: NixValue) => typeCheck.typeOf(force(e)), "typeOf", 1),
map: mkPrimop(list.map, "map", 2), map: mkPrimop(list.map, "map", 2),
filter: mkPrimop(list.filter, "filter", 2), filter: mkPrimop(list.filter, "filter", 2),
length: mkPrimop(list.length, "length", 1), length: mkPrimop(list.length, "length", 1),
head: mkPrimop(list.head, "head", 1), head: mkPrimop(list.head, "head", 1),
tail: mkPrimop(list.tail, "tail", 1), tail: mkPrimop(list.tail, "tail", 1),
elem: mkPrimop(list.elem, "elem", 2), elem: mkPrimop(list.elem, "elem", 2),
elemAt: mkPrimop(list.elemAt, "elemAt", 2), elemAt: mkPrimop(list.elemAt, "elemAt", 2),
concatLists: mkPrimop(list.concatLists, "concatLists", 1), concatLists: mkPrimop(list.concatLists, "concatLists", 1),
concatMap: mkPrimop(list.concatMap, "concatMap", 2), concatMap: mkPrimop(list.concatMap, "concatMap", 2),
"foldl'": mkPrimop(list.foldlPrime, "foldl'", 3), "foldl'": mkPrimop(list.foldlPrime, "foldl'", 3),
sort: mkPrimop(list.sort, "sort", 2), sort: mkPrimop(list.sort, "sort", 2),
partition: mkPrimop(list.partition, "partition", 2), partition: mkPrimop(list.partition, "partition", 2),
genList: mkPrimop(list.genList, "genList", 2), genList: mkPrimop(list.genList, "genList", 2),
all: mkPrimop(list.all, "all", 2), all: mkPrimop(list.all, "all", 2),
any: mkPrimop(list.any, "any", 2), any: mkPrimop(list.any, "any", 2),
attrNames: mkPrimop(attrs.attrNames, "attrNames", 1), attrNames: mkPrimop(attrs.attrNames, "attrNames", 1),
attrValues: mkPrimop(attrs.attrValues, "attrValues", 1), attrValues: mkPrimop(attrs.attrValues, "attrValues", 1),
getAttr: mkPrimop(attrs.getAttr, "getAttr", 2), getAttr: mkPrimop(attrs.getAttr, "getAttr", 2),
hasAttr: mkPrimop(attrs.hasAttr, "hasAttr", 2), hasAttr: mkPrimop(attrs.hasAttr, "hasAttr", 2),
mapAttrs: mkPrimop(attrs.mapAttrs, "mapAttrs", 2), mapAttrs: mkPrimop(attrs.mapAttrs, "mapAttrs", 2),
removeAttrs: mkPrimop(attrs.removeAttrs, "removeAttrs", 2), removeAttrs: mkPrimop(attrs.removeAttrs, "removeAttrs", 2),
listToAttrs: mkPrimop(attrs.listToAttrs, "listToAttrs", 1), listToAttrs: mkPrimop(attrs.listToAttrs, "listToAttrs", 1),
intersectAttrs: mkPrimop(attrs.intersectAttrs, "intersectAttrs", 2), intersectAttrs: mkPrimop(attrs.intersectAttrs, "intersectAttrs", 2),
catAttrs: mkPrimop(attrs.catAttrs, "catAttrs", 2), catAttrs: mkPrimop(attrs.catAttrs, "catAttrs", 2),
groupBy: mkPrimop(attrs.groupBy, "groupBy", 2), groupBy: mkPrimop(attrs.groupBy, "groupBy", 2),
zipAttrsWith: mkPrimop(attrs.zipAttrsWith, "zipAttrsWith", 2), zipAttrsWith: mkPrimop(attrs.zipAttrsWith, "zipAttrsWith", 2),
unsafeGetAttrPos: mkPrimop(attrs.unsafeGetAttrPos, "unsafeGetAttrPos", 2), unsafeGetAttrPos: mkPrimop(attrs.unsafeGetAttrPos, "unsafeGetAttrPos", 2),
stringLength: mkPrimop(string.stringLength, "stringLength", 1), stringLength: mkPrimop(string.stringLength, "stringLength", 1),
substring: mkPrimop(string.substring, "substring", 3), substring: mkPrimop(string.substring, "substring", 3),
concatStringsSep: mkPrimop(string.concatStringsSep, "concatStringsSep", 2), concatStringsSep: mkPrimop(string.concatStringsSep, "concatStringsSep", 2),
baseNameOf: mkPrimop(pathOps.baseNameOf, "baseNameOf", 1), baseNameOf: mkPrimop(pathOps.baseNameOf, "baseNameOf", 1),
dirOf: mkPrimop(pathOps.dirOf, "dirOf", 1), dirOf: mkPrimop(pathOps.dirOf, "dirOf", 1),
toPath: mkPrimop(pathOps.toPath, "toPath", 1), toPath: mkPrimop(pathOps.toPath, "toPath", 1),
match: mkPrimop(string.match, "match", 2), match: mkPrimop(string.match, "match", 2),
split: mkPrimop(string.split, "split", 2), split: mkPrimop(string.split, "split", 2),
seq: mkPrimop(functional.seq, "seq", 2), seq: mkPrimop(functional.seq, "seq", 2),
deepSeq: mkPrimop(functional.deepSeq, "deepSeq", 2), deepSeq: mkPrimop(functional.deepSeq, "deepSeq", 2),
abort: mkPrimop(functional.abort, "abort", 1), abort: mkPrimop(functional.abort, "abort", 1),
throw: mkPrimop(functional.throwFunc, "throw", 1), throw: mkPrimop(functional.throwFunc, "throw", 1),
trace: mkPrimop(functional.trace, "trace", 2), trace: mkPrimop(functional.trace, "trace", 2),
warn: mkPrimop(functional.warn, "warn", 2), warn: mkPrimop(functional.warn, "warn", 2),
break: mkPrimop(functional.breakFunc, "break", 1), break: mkPrimop(functional.breakFunc, "break", 1),
derivation: undefined as any, derivation: mkPrimop(derivation.derivationStub, "derivation", 1),
derivationStrict: mkPrimop(derivation.derivationStrict, "derivationStrict", 1), derivationStrict: mkPrimop(derivation.derivationStrict, "derivationStrict", 1),
import: mkPrimop(io.importFunc, "import", 1), import: mkPrimop(io.importFunc, "import", 1),
scopedImport: mkPrimop(io.scopedImport, "scopedImport", 2), scopedImport: mkPrimop(io.scopedImport, "scopedImport", 2),
storePath: mkPrimop(io.storePath, "storePath", 1), storePath: mkPrimop(io.storePath, "storePath", 1),
fetchClosure: mkPrimop(io.fetchClosure, "fetchClosure", 1), fetchClosure: mkPrimop(io.fetchClosure, "fetchClosure", 1),
fetchMercurial: mkPrimop(io.fetchMercurial, "fetchMercurial", 1), fetchMercurial: mkPrimop(io.fetchMercurial, "fetchMercurial", 1),
fetchGit: mkPrimop(io.fetchGit, "fetchGit", 1), fetchGit: mkPrimop(io.fetchGit, "fetchGit", 1),
fetchTarball: mkPrimop(io.fetchTarball, "fetchTarball", 1), fetchTarball: mkPrimop(io.fetchTarball, "fetchTarball", 1),
fetchTree: mkPrimop(io.fetchTree, "fetchTree", 1), fetchTree: mkPrimop(io.fetchTree, "fetchTree", 1),
fetchurl: mkPrimop(io.fetchurl, "fetchurl", 1), fetchurl: mkPrimop(io.fetchurl, "fetchurl", 1),
readDir: mkPrimop(io.readDir, "readDir", 1), readDir: mkPrimop(io.readDir, "readDir", 1),
readFile: mkPrimop(io.readFile, "readFile", 1), readFile: mkPrimop(io.readFile, "readFile", 1),
readFileType: mkPrimop(io.readFileType, "readFileType", 1), readFileType: mkPrimop(io.readFileType, "readFileType", 1),
pathExists: mkPrimop(io.pathExists, "pathExists", 1), pathExists: mkPrimop(io.pathExists, "pathExists", 1),
path: mkPrimop(io.path, "path", 1), path: mkPrimop(io.path, "path", 1),
toFile: mkPrimop(io.toFile, "toFile", 2), toFile: mkPrimop(io.toFile, "toFile", 2),
filterSource: mkPrimop(io.filterSource, "filterSource", 2), filterSource: mkPrimop(io.filterSource, "filterSource", 2),
findFile: mkPrimop(io.findFile, "findFile", 2), findFile: mkPrimop(io.findFile, "findFile", 2),
getEnv: mkPrimop(io.getEnv, "getEnv", 1), getEnv: mkPrimop(io.getEnv, "getEnv", 1),
fromJSON: mkPrimop(conversion.fromJSON, "fromJSON", 1), fromJSON: mkPrimop(conversion.fromJSON, "fromJSON", 1),
fromTOML: mkPrimop(conversion.fromTOML, "fromTOML", 1), fromTOML: mkPrimop(conversion.fromTOML, "fromTOML", 1),
toJSON: mkPrimop(conversion.toJSON, "toJSON", 1), toJSON: mkPrimop(conversion.toJSON, "toJSON", 1),
toXML: mkPrimop(conversion.toXML, "toXML", 1), toXML: mkPrimop(conversion.toXML, "toXML", 1),
toString: mkPrimop(conversion.toStringFunc, "toString", 1), toString: mkPrimop(conversion.toStringFunc, "toString", 1),
addErrorContext: mkPrimop(misc.addErrorContext, "addErrorContext", 1), hashFile: mkPrimop(hash.hashFile, "hashFile", 2),
appendContext: mkPrimop(misc.appendContext, "appendContext", 1), hashString: mkPrimop(hash.hashString, "hashString", 2),
getContext: mkPrimop(misc.getContext, "getContext", 1), convertHash: mkPrimop(hash.convertHash, "convertHash", 2),
hasContext: mkPrimop(misc.hasContext, "hasContext", 1),
hashFile: mkPrimop(misc.hashFile, "hashFile", 2),
hashString: mkPrimop(misc.hashString, "hashString", 2),
convertHash: mkPrimop(misc.convertHash, "convertHash", 2),
unsafeDiscardOutputDependency: mkPrimop(
misc.unsafeDiscardOutputDependency,
"unsafeDiscardOutputDependency",
1,
),
unsafeDiscardStringContext: mkPrimop(misc.unsafeDiscardStringContext, "unsafeDiscardStringContext", 1),
addDrvOutputDependencies: mkPrimop(misc.addDrvOutputDependencies, "addDrvOutputDependencies", 2),
compareVersions: mkPrimop(misc.compareVersions, "compareVersions", 2),
flakeRefToString: mkPrimop(misc.flakeRefToString, "flakeRefToString", 1),
functionArgs: mkPrimop(misc.functionArgs, "functionArgs", 1),
genericClosure: mkPrimop(misc.genericClosure, "genericClosure", 1),
getFlake: mkPrimop(misc.getFlake, "getFlake", 1),
outputOf: mkPrimop(misc.outputOf, "outputOf", 2),
parseDrvName: mkPrimop(misc.parseDrvName, "parseDrvName", 1),
parseFlakeName: mkPrimop(misc.parseFlakeName, "parseFlakeName", 1),
parseFlakeRef: mkPrimop(misc.parseFlakeRef, "parseFlakeRef", 1),
placeholder: mkPrimop(misc.placeholder, "placeholder", 1),
replaceStrings: mkPrimop(misc.replaceStrings, "replaceStrings", 3),
splitVersion: mkPrimop(misc.splitVersion, "splitVersion", 1),
traceVerbose: mkPrimop(misc.traceVerbose, "traceVerbose", 2),
tryEval: mkPrimop(misc.tryEval, "tryEval", 1),
builtins: createThunk(() => builtins, "builtins"), flakeRefToString: mkPrimop(flake.flakeRefToString, "flakeRefToString", 1),
currentSystem: createThunk(() => { getFlake: mkPrimop(flake.getFlake, "getFlake", 1),
return "x86_64-linux"; parseFlakeName: mkPrimop(flake.parseFlakeName, "parseFlakeName", 1),
}, "currentSystem"), parseFlakeRef: mkPrimop(flake.parseFlakeRef, "parseFlakeRef", 1),
currentTime: createThunk(() => Date.now(), "currentTime"),
false: false, addErrorContext: mkPrimop(misc.addErrorContext, "addErrorContext", 1),
true: true, appendContext: mkPrimop(misc.appendContext, "appendContext", 1),
null: null, getContext: mkPrimop(misc.getContext, "getContext", 1),
hasContext: mkPrimop(misc.hasContext, "hasContext", 1),
unsafeDiscardOutputDependency: mkPrimop(
misc.unsafeDiscardOutputDependency,
"unsafeDiscardOutputDependency",
1,
),
unsafeDiscardStringContext: mkPrimop(misc.unsafeDiscardStringContext, "unsafeDiscardStringContext", 1),
addDrvOutputDependencies: mkPrimop(misc.addDrvOutputDependencies, "addDrvOutputDependencies", 2),
compareVersions: mkPrimop(misc.compareVersions, "compareVersions", 2),
functionArgs: mkPrimop(misc.functionArgs, "functionArgs", 1),
genericClosure: mkPrimop(misc.genericClosure, "genericClosure", 1),
outputOf: mkPrimop(misc.outputOf, "outputOf", 2),
parseDrvName: mkPrimop(misc.parseDrvName, "parseDrvName", 1),
placeholder: mkPrimop(misc.placeholder, "placeholder", 1),
replaceStrings: mkPrimop(misc.replaceStrings, "replaceStrings", 3),
splitVersion: mkPrimop(misc.splitVersion, "splitVersion", 1),
traceVerbose: mkPrimop(misc.traceVerbose, "traceVerbose", 2),
tryEval: mkPrimop(misc.tryEval, "tryEval", 1),
langVersion: 6, builtins: createThunk(() => builtins, "builtins"),
nixPath: [], currentSystem: createThunk(() => {
nixVersion: "2.31.2", return "x86_64-linux";
storeDir: "INVALID_PATH", }, "currentSystem"),
currentTime: createThunk(() => Date.now(), "currentTime"),
__traceCaller: (e: NixValue) => { false: false,
console.log(`traceCaller: ${getTos()}`); true: true,
return e; null: null,
},
}; langVersion: 6,
nixPath: [],
nixVersion: "2.31.2",
storeDir: createThunk(() => {
throw new Error("stub storeDir evaluated");
}),
}),
);

View File

@@ -1,8 +1,8 @@
/** import { select } from "../helpers";
* I/O and filesystem builtin functions import { getPathValue } from "../path";
* Implemented via Rust ops exposed through deno_core import type { NixStringContext, StringWithContext } from "../string-context";
*/ import { addOpaqueContext, decodeContextElem, mkStringWithContext } from "../string-context";
import { force } from "../thunk";
import { import {
forceAttrs, forceAttrs,
forceBool, forceBool,
@@ -11,42 +11,47 @@ import {
forceStringNoCtx, forceStringNoCtx,
forceStringValue, forceStringValue,
} from "../type-assert"; } from "../type-assert";
import type { NixValue, NixAttrs, NixPath, NixString } from "../types"; import type { NixAttrs, NixString, NixValue } from "../types";
import { isNixPath, IS_PATH, CatchableError } from "../types"; import { CatchableError, isNixPath, NixPath } from "../types";
import { force } from "../thunk";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion"; import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
import { getPathValue } from "../path";
import type { NixStringContext, StringWithContext } from "../string-context";
import { mkStringWithContext, addOpaqueContext } from "../string-context";
import { isAttrs, isPath } from "./type-check";
import { baseNameOf } from "./path"; import { baseNameOf } from "./path";
import { isAttrs, isPath, isString } from "./type-check";
import { execBytecode, execBytecodeScoped } from "../vm";
const importCache = new Map<string, NixValue>(); const importCache = new Map<string, NixValue>();
export const importFunc = (path: NixValue): NixValue => { const realiseContext = (context: NixStringContext): void => {
const context: NixStringContext = new Set(); for (const encoded of context) {
const pathStr = coerceToPath(path, context); const elem = decodeContextElem(encoded);
if (elem.type === "built") {
// FIXME: Context collected but not yet propagated to build system throw new Error(
// This means derivation dependencies from imported paths are not `cannot build derivation '${elem.drvPath}' during evaluation because import-from-derivation is not supported`,
// currently tracked. This will cause issues when: );
// 1. Importing from derivation outputs: import "${drv}/file.nix" }
// 2. Building packages that depend on imported configurations
if (context.size > 0) {
console.warn(
`[WARN] import: Path has string context which is not yet fully tracked.
Dependency tracking for imported derivations may be incomplete.`,
);
} }
};
export const realisePath = (value: NixValue): string => {
const context: NixStringContext = new Set();
const pathStr = coerceToPath(value, context);
if (context.size > 0) {
realiseContext(context);
}
return pathStr;
};
export const importFunc = (path: NixValue): NixValue => {
const pathStr = realisePath(path);
const cached = importCache.get(pathStr); const cached = importCache.get(pathStr);
if (cached !== undefined) { if (cached !== undefined) {
return cached; return cached;
} }
// Call Rust op - returns JS code string const [code, currentDir] = Deno.core.ops.op_import(pathStr);
const code = Deno.core.ops.op_import(pathStr); const result = execBytecode(code, currentDir);
const result = Function(`return (${code})`)();
importCache.set(pathStr, result); importCache.set(pathStr, result);
return result; return result;
@@ -56,22 +61,12 @@ export const scopedImport =
(scope: NixValue) => (scope: NixValue) =>
(path: NixValue): NixValue => { (path: NixValue): NixValue => {
const scopeAttrs = forceAttrs(scope); const scopeAttrs = forceAttrs(scope);
const scopeKeys = Object.keys(scopeAttrs); const scopeKeys = Array.from(scopeAttrs.keys());
const context: NixStringContext = new Set(); const pathStr = realisePath(path);
const pathStr = coerceToPath(path, context);
if (context.size > 0) { const [code, currentDir] = Deno.core.ops.op_scoped_import(pathStr, scopeKeys);
console.warn( return execBytecodeScoped(code, currentDir, scopeAttrs);
`[WARN] scopedImport: Path has string context which is not yet fully tracked.
Dependency tracking for imported derivations may be incomplete.`,
);
}
const code = Deno.core.ops.op_scoped_import(pathStr, scopeKeys);
const scopedFunc = Function(`return (${code})`)();
return scopedFunc(scopeAttrs);
}; };
export const storePath = (pathArg: NixValue): StringWithContext => { export const storePath = (pathArg: NixValue): StringWithContext => {
@@ -84,29 +79,29 @@ export const storePath = (pathArg: NixValue): StringWithContext => {
return mkStringWithContext(validatedPath, context); return mkStringWithContext(validatedPath, context);
}; };
export const fetchClosure = (args: NixValue): never => { export const fetchClosure = (_args: NixValue): never => {
throw new Error("Not implemented: fetchClosure"); throw new Error("Not implemented: fetchClosure");
}; };
export interface FetchUrlResult { export interface FetchUrlResult {
store_path: string; storePath: string;
hash: string; hash: string;
} }
export interface FetchTarballResult { export interface FetchTarballResult {
store_path: string; storePath: string;
nar_hash: string; narHash: string;
} }
export interface FetchGitResult { export interface FetchGitResult {
out_path: string; outPath: string;
rev: string; rev: string;
short_rev: string; shortRev: string;
rev_count: number; revCount: number;
last_modified: number; lastModified: number;
last_modified_date: string; lastModifiedDate: string;
submodules: boolean; submodules: boolean;
nar_hash: string | null; narHash: string | null;
} }
const normalizeUrlInput = ( const normalizeUrlInput = (
@@ -117,26 +112,24 @@ const normalizeUrlInput = (
return { url: forced }; return { url: forced };
} }
const attrs = forceAttrs(args); const attrs = forceAttrs(args);
const url = forceStringValue(attrs.url); const url = forceStringValue(select(attrs, ["url"]));
const hash = const hash = attrs.has("sha256")
"sha256" in attrs ? forceStringValue(attrs.get("sha256") as NixValue)
? forceStringValue(attrs.sha256) : attrs.has("hash")
: "hash" in attrs ? forceStringValue(attrs.get("hash") as NixValue)
? forceStringValue(attrs.hash) : undefined;
: undefined; const name = attrs.has("name") ? forceStringValue(attrs.get("name") as NixValue) : undefined;
const name = "name" in attrs ? forceStringValue(attrs.name) : undefined; const executable = attrs.has("executable") ? forceBool(attrs.get("executable") as NixValue) : false;
const executable = "executable" in attrs ? forceBool(attrs.executable) : false;
return { url, hash, name, executable }; return { url, hash, name, executable };
}; };
const normalizeTarballInput = (args: NixValue): { url: string; sha256?: string; name?: string } => { const normalizeTarballInput = (args: NixValue): { url: string; sha256?: string; name?: string } => {
const forced = force(args); const forced = force(args);
if (isAttrs(forced)) { if (isAttrs(forced)) {
const url = resolvePseudoUrl(forceStringNoCtx(forced.url)); const url = resolvePseudoUrl(forceStringNoCtx(select(forced, ["url"])));
const sha256 = "sha256" in forced ? forceStringNoCtx(forced.sha256) : undefined; const sha256 = forced.has("sha256") ? forceStringNoCtx(forced.get("sha256") as NixValue) : undefined;
const nameRaw = "name" in forced ? forceStringNoCtx(forced.name) : undefined; const nameRaw = forced.has("name") ? forceStringNoCtx(forced.get("name") as NixValue) : undefined;
// FIXME: extract baseNameOfRaw const name = nameRaw === "" ? (baseNameOf(nameRaw) as string) : nameRaw;
const name = nameRaw === "" ? baseNameOf(nameRaw) as string : nameRaw;
return { url, sha256, name }; return { url, sha256, name };
} else { } else {
return { url: forceStringNoCtx(forced) }; return { url: forceStringNoCtx(forced) };
@@ -145,11 +138,11 @@ const normalizeTarballInput = (args: NixValue): { url: string; sha256?: string;
const resolvePseudoUrl = (url: string) => { const resolvePseudoUrl = (url: string) => {
if (url.startsWith("channel:")) { if (url.startsWith("channel:")) {
return `https://channels.nixos.org/${url.substring(8)}/nixexprs.tar.xz` return `https://channels.nixos.org/${url.substring(8)}/nixexprs.tar.xz`;
} else { } else {
return url return url;
} }
} };
export const fetchurl = (args: NixValue): NixString => { export const fetchurl = (args: NixValue): NixString => {
const { url, hash, name, executable } = normalizeUrlInput(args); const { url, hash, name, executable } = normalizeUrlInput(args);
@@ -160,44 +153,45 @@ export const fetchurl = (args: NixValue): NixString => {
executable ?? false, executable ?? false,
); );
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
addOpaqueContext(context, result.store_path); addOpaqueContext(context, result.storePath);
return mkStringWithContext(result.store_path, context); return mkStringWithContext(result.storePath, context);
}; };
export const fetchTarball = (args: NixValue): NixString => { export const fetchTarball = (args: NixValue): NixString => {
const { url, name, sha256 } = normalizeTarballInput(args); const { url, name, sha256 } = normalizeTarballInput(args);
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, name ?? null, sha256 ?? null); const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, name ?? null, sha256 ?? null);
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
addOpaqueContext(context, result.store_path); addOpaqueContext(context, result.storePath);
return mkStringWithContext(result.store_path, context); return mkStringWithContext(result.storePath, context);
}; };
export const fetchGit = (args: NixValue): NixAttrs => { export const fetchGit = (args: NixValue): NixAttrs => {
const forced = force(args); const forced = force(args);
if (typeof forced === "string" || isPath(forced)) { const disposedContext: NixStringContext = new Set();
const path = coerceToPath(forced); if (isString(forced) || isPath(forced)) {
const result: FetchGitResult = Deno.core.ops.op_fetch_git(path, null, null, false, false, false, null); const url = coerceToString(forced, StringCoercionMode.Base, false, disposedContext);
const result = Deno.core.ops.op_fetch_git(url, null, null, false, false, false, null);
const outContext: NixStringContext = new Set(); const outContext: NixStringContext = new Set();
addOpaqueContext(outContext, result.out_path); addOpaqueContext(outContext, result.outPath);
return { return new Map<string, NixValue>([
outPath: mkStringWithContext(result.out_path, outContext), ["outPath", mkStringWithContext(result.outPath, outContext)],
rev: result.rev, ["rev", result.rev],
shortRev: result.short_rev, ["shortRev", result.shortRev],
revCount: BigInt(result.rev_count), ["revCount", BigInt(result.revCount)],
lastModified: BigInt(result.last_modified), ["lastModified", BigInt(result.lastModified)],
lastModifiedDate: result.last_modified_date, ["lastModifiedDate", result.lastModifiedDate],
submodules: result.submodules, ["submodules", result.submodules],
narHash: result.nar_hash, ["narHash", result.narHash],
}; ]);
} }
const attrs = forceAttrs(args); const attrs = forceAttrs(args);
const url = forceStringValue(attrs.url); const url = forceStringValue(select(attrs, ["url"]));
const gitRef = "ref" in attrs ? forceStringValue(attrs.ref) : null; const gitRef = attrs.has("ref") ? forceStringValue(attrs.get("ref") as NixValue) : null;
const rev = "rev" in attrs ? forceStringValue(attrs.rev) : null; const rev = attrs.has("rev") ? forceStringValue(attrs.get("rev") as NixValue) : null;
const shallow = "shallow" in attrs ? forceBool(attrs.shallow) : false; const shallow = attrs.has("shallow") ? forceBool(attrs.get("shallow") as NixValue) : false;
const submodules = "submodules" in attrs ? forceBool(attrs.submodules) : false; const submodules = attrs.has("submodules") ? forceBool(attrs.get("submodules") as NixValue) : false;
const allRefs = "allRefs" in attrs ? forceBool(attrs.allRefs) : false; const allRefs = attrs.has("allRefs") ? forceBool(attrs.get("allRefs") as NixValue) : false;
const name = "name" in attrs ? forceStringValue(attrs.name) : null; const name = attrs.has("name") ? forceStringValue(attrs.get("name") as NixValue) : null;
const result: FetchGitResult = Deno.core.ops.op_fetch_git( const result: FetchGitResult = Deno.core.ops.op_fetch_git(
url, url,
@@ -210,26 +204,26 @@ export const fetchGit = (args: NixValue): NixAttrs => {
); );
const outContext: NixStringContext = new Set(); const outContext: NixStringContext = new Set();
addOpaqueContext(outContext, result.out_path); addOpaqueContext(outContext, result.outPath);
return { return new Map<string, NixValue>([
outPath: mkStringWithContext(result.out_path, outContext), ["outPath", mkStringWithContext(result.outPath, outContext)],
rev: result.rev, ["rev", result.rev],
shortRev: result.short_rev, ["shortRev", result.shortRev],
revCount: BigInt(result.rev_count), ["revCount", BigInt(result.revCount)],
lastModified: BigInt(result.last_modified), ["lastModified", BigInt(result.lastModified)],
lastModifiedDate: result.last_modified_date, ["lastModifiedDate", result.lastModifiedDate],
submodules: result.submodules, ["submodules", result.submodules],
narHash: result.nar_hash, ["narHash", result.narHash],
}; ]);
}; };
export const fetchMercurial = (_args: NixValue): NixAttrs => { export const fetchMercurial = (_args: NixValue): NixAttrs => {
throw new Error("Not implemented: fetchMercurial") throw new Error("Not implemented: fetchMercurial");
}; };
export const fetchTree = (args: NixValue): NixAttrs => { export const fetchTree = (args: NixValue): NixAttrs => {
const attrs = forceAttrs(args); const attrs = forceAttrs(args);
const type = "type" in attrs ? forceStringValue(attrs.type) : "auto"; const type = attrs.has("type") ? forceStringValue(attrs.get("type") as NixValue) : "auto";
switch (type) { switch (type) {
case "git": case "git":
@@ -238,29 +232,31 @@ export const fetchTree = (args: NixValue): NixAttrs => {
case "mercurial": case "mercurial":
return fetchMercurial(args); return fetchMercurial(args);
case "tarball": case "tarball":
return { outPath: fetchTarball(args) }; return new Map<string, NixValue>([["outPath", fetchTarball(args)]]);
case "file": case "file":
return { outPath: fetchurl(args) }; return new Map<string, NixValue>([["outPath", fetchurl(args)]]);
case "path": { case "path": {
const path = forceStringValue(attrs.path); const path = forceStringValue(select(attrs, ["path"]));
return { outPath: path }; return new Map<string, NixValue>([["outPath", path]]);
} }
case "github": case "github":
case "gitlab": case "gitlab":
case "sourcehut": case "sourcehut":
return fetchGitForge(type, attrs); return fetchGitForge(type, attrs);
case "auto":
default: default:
return autoDetectAndFetch(attrs); return autoDetectAndFetch(attrs);
} }
}; };
const fetchGitForge = (forge: string, attrs: NixAttrs): NixAttrs => { const fetchGitForge = (forge: string, attrs: NixAttrs): NixAttrs => {
const owner = forceStringValue(attrs.owner); const owner = forceStringValue(select(forge, ["owner"]));
const repo = forceStringValue(attrs.repo); const repo = forceStringValue(select(forge, ["repo"]));
const rev = const rev = attrs.has("rev")
"rev" in attrs ? forceStringValue(attrs.rev) : "ref" in attrs ? forceStringValue(attrs.ref) : "HEAD"; ? forceStringValue(attrs.get("rev") as NixValue)
const host = "host" in attrs ? forceStringValue(attrs.host) : undefined; : attrs.has("ref")
? forceStringValue(attrs.get("ref") as NixValue)
: "HEAD";
const host = attrs.has("host") ? forceStringValue(attrs.get("host") as NixValue) : undefined;
let tarballUrl: string; let tarballUrl: string;
switch (forge) { switch (forge) {
@@ -283,17 +279,17 @@ const fetchGitForge = (forge: string, attrs: NixAttrs): NixAttrs => {
throw new Error(`Unknown forge type: ${forge}`); throw new Error(`Unknown forge type: ${forge}`);
} }
const outPath = fetchTarball({ url: tarballUrl, ...attrs }); const outPath = fetchTarball(new Map<string, NixValue>([["url", tarballUrl], ...attrs]));
return { return new Map<string, NixValue>([
outPath, ["outPath", outPath],
rev, ["rev", rev],
shortRev: rev.substring(0, 7), ["shortRev", rev.substring(0, 7)],
}; ]);
}; };
const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => { const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
const url = forceStringValue(attrs.url); const url = forceStringValue(select(attrs, ["url"]));
if (url.endsWith(".git") || url.includes("github.com") || url.includes("gitlab.com")) { if (url.endsWith(".git") || url.includes("github.com") || url.includes("gitlab.com")) {
return fetchGit(attrs); return fetchGit(attrs);
} }
@@ -303,34 +299,33 @@ const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
url.endsWith(".tar.bz2") || url.endsWith(".tar.bz2") ||
url.endsWith(".tgz") url.endsWith(".tgz")
) { ) {
return { outPath: fetchTarball(attrs) }; return new Map<string, NixValue>([["outPath", fetchTarball(attrs)]]);
} }
return { outPath: fetchurl(attrs) }; return new Map<string, NixValue>([["outPath", fetchurl(attrs)]]);
}; };
export const readDir = (path: NixValue): NixAttrs => { export const readDir = (path: NixValue): NixAttrs => {
const pathStr = coerceToPath(path); const pathStr = realisePath(path);
const entries: Record<string, string> = Deno.core.ops.op_read_dir(pathStr); return Deno.core.ops.op_read_dir(pathStr);
const result: NixAttrs = {};
for (const [name, type] of Object.entries(entries)) {
result[name] = type;
}
return result;
}; };
export const readFile = (path: NixValue): string => { export const readFile = (path: NixValue): string => {
const pathStr = coerceToPath(path); const pathStr = realisePath(path);
return Deno.core.ops.op_read_file(pathStr); return Deno.core.ops.op_read_file(pathStr);
}; };
export const readFileType = (path: NixValue): string => { export const readFileType = (path: NixValue): string => {
const pathStr = coerceToPath(path); const pathStr = realisePath(path);
return Deno.core.ops.op_read_file_type(pathStr); return Deno.core.ops.op_read_file_type(pathStr);
}; };
export const pathExists = (path: NixValue): boolean => { export const pathExists = (path: NixValue): boolean => {
const pathStr = coerceToPath(path); try {
return Deno.core.ops.op_path_exists(pathStr); const pathStr = realisePath(path);
return Deno.core.ops.op_path_exists(pathStr);
} catch {
return false;
}
}; };
/** /**
@@ -349,11 +344,11 @@ export const pathExists = (path: NixValue): boolean => {
export const path = (args: NixValue): NixString => { export const path = (args: NixValue): NixString => {
const attrs = forceAttrs(args); const attrs = forceAttrs(args);
if (!("path" in attrs)) { if (!attrs.has("path")) {
throw new TypeError("builtins.path: 'path' attribute is required"); throw new TypeError("builtins.path: 'path' attribute is required");
} }
const pathValue = force(attrs.path); const pathValue = force(attrs.get("path") as NixValue);
let pathStr: string; let pathStr: string;
if (isNixPath(pathValue)) { if (isNixPath(pathValue)) {
@@ -362,20 +357,20 @@ export const path = (args: NixValue): NixString => {
pathStr = forceStringValue(pathValue); pathStr = forceStringValue(pathValue);
} }
const name = "name" in attrs ? forceStringValue(attrs.name) : null; const name = attrs.has("name") ? forceStringValue(attrs.get("name") as NixValue) : null;
const recursive = "recursive" in attrs ? forceBool(attrs.recursive) : true; const recursive = attrs.has("recursive") ? forceBool(attrs.get("recursive") as NixValue) : true;
const sha256 = "sha256" in attrs ? forceStringValue(attrs.sha256) : null; const sha256 = attrs.has("sha256") ? forceStringValue(attrs.get("sha256") as NixValue) : null;
let storePath: string; let storePath: string;
if ("filter" in attrs) { if (attrs.has("filter")) {
const filterFn = forceFunction(attrs.filter); const filterFn = forceFunction(attrs.get("filter") as NixValue);
const entries: [string, string][] = Deno.core.ops.op_walk_dir(pathStr); const entries: [string, string][] = Deno.core.ops.op_walk_dir(pathStr);
const includePaths: string[] = []; const includePaths: string[] = [];
for (const [relPath, fileType] of entries) { for (const [relPath, fileType] of entries) {
const fullPath = pathStr + "/" + relPath; const fullPath = `${pathStr}/${relPath}`;
const innerFn = forceFunction(filterFn(fullPath)); const innerFn = forceFunction(filterFn(fullPath));
const shouldInclude = force(innerFn(fileType)); const shouldInclude = force(innerFn(fileType));
if (shouldInclude === true) { if (shouldInclude === true) {
@@ -383,13 +378,7 @@ export const path = (args: NixValue): NixString => {
} }
} }
storePath = Deno.core.ops.op_add_filtered_path( storePath = Deno.core.ops.op_add_filtered_path(pathStr, name, recursive, sha256, includePaths);
pathStr,
name,
recursive,
sha256,
includePaths,
);
} else { } else {
storePath = Deno.core.ops.op_add_path(pathStr, name, recursive, sha256); storePath = Deno.core.ops.op_add_path(pathStr, name, recursive, sha256);
} }
@@ -421,13 +410,11 @@ export const toFile =
return mkStringWithContext(storePath, new Set([storePath])); return mkStringWithContext(storePath, new Set([storePath]));
}; };
export const toPath = (name: NixValue, s: NixValue): never => { export const filterSource =
throw new Error("Not implemented: toPath"); (_filter: NixValue) =>
}; (_path: NixValue): never => {
throw new Error("Not implemented: filterSource");
export const filterSource = (args: NixValue): never => { };
throw new Error("Not implemented: filterSource");
};
const suffixIfPotentialMatch = (prefix: string, path: string): string | null => { const suffixIfPotentialMatch = (prefix: string, path: string): string | null => {
const n = prefix.length; const n = prefix.length;
@@ -454,9 +441,9 @@ export const findFile =
for (const item of forcedSearchPath) { for (const item of forcedSearchPath) {
const attrs = forceAttrs(item); const attrs = forceAttrs(item);
const prefix = "prefix" in attrs ? forceStringNoCtx(attrs.prefix) : ""; const prefix = attrs.has("prefix") ? forceStringNoCtx(attrs.get("prefix") as NixValue) : "";
if (!("path" in attrs)) { if (!attrs.has("path")) {
throw new Error("findFile: search path element is missing 'path' attribute"); throw new Error("findFile: search path element is missing 'path' attribute");
} }
@@ -466,7 +453,12 @@ export const findFile =
} }
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
const pathVal = coerceToString(attrs.path, StringCoercionMode.Interpolation, false, context); const pathVal = coerceToString(
attrs.get("path") as NixValue,
StringCoercionMode.Interpolation,
false,
context,
);
if (context.size > 0) { if (context.size > 0) {
throw new Error("findFile: path with string context is not yet supported"); throw new Error("findFile: path with string context is not yet supported");
@@ -477,13 +469,13 @@ export const findFile =
suffix.length > 0 ? Deno.core.ops.op_resolve_path(suffix, resolvedPath) : resolvedPath; suffix.length > 0 ? Deno.core.ops.op_resolve_path(suffix, resolvedPath) : resolvedPath;
if (Deno.core.ops.op_path_exists(candidatePath)) { if (Deno.core.ops.op_path_exists(candidatePath)) {
return { [IS_PATH]: true, value: candidatePath }; return new NixPath(candidatePath);
} }
} }
if (lookupPathStr.startsWith("nix/")) { if (lookupPathStr.startsWith("nix/")) {
// FIXME: special path type // FIXME: special path type
return { [IS_PATH]: true, value: `<${lookupPathStr}>` }; return new NixPath(`<${lookupPathStr}>`);
} }
throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`); throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`);

View File

@@ -1,12 +1,7 @@
/**
* List operation builtin functions
* All functions are properly curried
*/
import type { NixValue, NixList, NixAttrs } from "../types";
import { force } from "../thunk";
import { forceList, forceFunction, forceInt, forceBool } from "../type-assert";
import { op } from "../operators"; import { op } from "../operators";
import { force } from "../thunk";
import { forceBool, forceFunction, forceInt, forceList } from "../type-assert";
import type { NixAttrs, NixList, NixValue } from "../types";
export const map = export const map =
(f: NixValue) => (f: NixValue) =>
@@ -74,23 +69,23 @@ export const concatMap =
}; };
export const foldlPrime = export const foldlPrime =
(op_fn: NixValue) => (opFn: NixValue) =>
(nul: NixValue) => (nul: NixValue) =>
(list: NixValue): NixValue => { (list: NixValue): NixValue => {
const forced_op = forceFunction(op_fn); const forcedOp = forceFunction(opFn);
return forceList(list).reduce((acc: NixValue, cur: NixValue) => { return forceList(list).reduce((acc: NixValue, cur: NixValue) => {
return forceFunction(forced_op(acc))(cur); return forceFunction(forcedOp(acc))(cur);
}, nul); }, nul);
}; };
export const sort = export const sort =
(cmp: NixValue) => (cmp: NixValue) =>
(list: NixValue): NixList => { (list: NixValue): NixList => {
const forced_list = [...forceList(list)]; const forcedList = [...forceList(list)];
const forced_cmp = forceFunction(cmp); const forcedCmp = forceFunction(cmp);
return forced_list.sort((a, b) => { return forcedList.sort((a, b) => {
if (force(forceFunction(forced_cmp(a))(b))) return -1; if (force(forceFunction(forcedCmp(a))(b))) return -1;
if (force(forceFunction(forced_cmp(b))(a))) return 1; if (force(forceFunction(forcedCmp(b))(a))) return 1;
return 0; return 0;
}); });
}; };
@@ -98,20 +93,21 @@ export const sort =
export const partition = export const partition =
(pred: NixValue) => (pred: NixValue) =>
(list: NixValue): NixAttrs => { (list: NixValue): NixAttrs => {
const forced_list = forceList(list); const forcedList = forceList(list);
const forced_pred = forceFunction(pred); const forcedPred = forceFunction(pred);
const attrs = { const right: NixList = [];
right: [] as NixList, const wrong: NixList = [];
wrong: [] as NixList, for (const elem of forcedList) {
}; if (force(forcedPred(elem))) {
for (const elem of forced_list) { right.push(elem);
if (force(forced_pred(elem))) {
attrs.right.push(elem);
} else { } else {
attrs.wrong.push(elem); wrong.push(elem);
} }
} }
return attrs; return new Map<string, NixValue>([
["right", right],
["wrong", wrong],
]);
}; };
export const genList = export const genList =

View File

@@ -1,9 +1,5 @@
/**
* Math builtin functions
*/
import type { NixValue } from "../types";
import { forceNumeric } from "../type-assert"; import { forceNumeric } from "../type-assert";
import type { NixValue } from "../types";
export const ceil = (x: NixValue): bigint => { export const ceil = (x: NixValue): bigint => {
const val = forceNumeric(x); const val = forceNumeric(x);

View File

@@ -1,31 +1,28 @@
/** import { OrderedSet } from "js-sdsl";
* Miscellaneous builtin functions import { select } from "../helpers";
*/ import { compareValues } from "../operators";
import {
import { force } from "../thunk"; getStringContext,
import { CatchableError, ATTR_POSITIONS } from "../types"; getStringValue,
import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types"; mkStringWithContext,
type NixStringContext,
} from "../string-context";
import { force } from "../thunk";
import { import {
forceList,
forceAttrs, forceAttrs,
forceFunction, forceFunction,
forceStringValue, forceList,
forceString, forceString,
forceStringNoCtx, forceStringNoCtx,
forceStringValue,
} from "../type-assert"; } from "../type-assert";
import type { NixAttrs, NixStrictValue, NixValue } from "../types";
import { ATTR_POSITIONS, CatchableError } from "../types";
import * as context from "./context"; import * as context from "./context";
import { compareValues } from "../operators";
import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check"; import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check";
import { OrderedSet } from "js-sdsl";
import {
type NixStringContext,
getStringValue,
getStringContext,
mkStringWithContext,
} from "../string-context";
export const addErrorContext = export const addErrorContext =
(e1: NixValue) => (_e1: NixValue) =>
(e2: NixValue): NixValue => { (e2: NixValue): NixValue => {
// FIXME: // FIXME:
// console.log("[WARNING]: addErrorContext not implemented"); // console.log("[WARNING]: addErrorContext not implemented");
@@ -38,23 +35,6 @@ export const getContext = context.getContext;
export const hasContext = context.hasContext; export const hasContext = context.hasContext;
export const hashFile =
(type: NixValue) =>
(p: NixValue): never => {
const ty = forceStringNoCtx(type);
throw new Error("Not implemented: hashFile");
};
export const hashString =
(type: NixValue) =>
(p: NixValue): never => {
throw new Error("Not implemented: hashString");
};
export const convertHash = (args: NixValue): never => {
throw new Error("Not implemented: convertHash");
};
export const unsafeDiscardOutputDependency = context.unsafeDiscardOutputDependency; export const unsafeDiscardOutputDependency = context.unsafeDiscardOutputDependency;
export const unsafeDiscardStringContext = context.unsafeDiscardStringContext; export const unsafeDiscardStringContext = context.unsafeDiscardStringContext;
@@ -77,9 +57,9 @@ export const compareVersions =
i1 = c1.nextIndex; i1 = c1.nextIndex;
i2 = c2.nextIndex; i2 = c2.nextIndex;
if (componentsLT(c1.component, c2.component)) { if (componentsLt(c1.component, c2.component)) {
return -1n; return -1n;
} else if (componentsLT(c2.component, c1.component)) { } else if (componentsLt(c2.component, c1.component)) {
return 1n; return 1n;
} }
} }
@@ -121,7 +101,7 @@ function nextComponent(s: string, startIdx: number): ComponentResult {
return { component: s.substring(start, p), nextIndex: p }; return { component: s.substring(start, p), nextIndex: p };
} }
function componentsLT(c1: string, c2: string): boolean { function componentsLt(c1: string, c2: string): boolean {
const n1 = c1.match(/^[0-9]+$/) ? BigInt(c1) : null; const n1 = c1.match(/^[0-9]+$/) ? BigInt(c1) : null;
const n2 = c2.match(/^[0-9]+$/) ? BigInt(c2) : null; const n2 = c2.match(/^[0-9]+$/) ? BigInt(c2) : null;
@@ -155,35 +135,23 @@ function componentsLT(c1: string, c2: string): boolean {
return c1 < c2; return c1 < c2;
} }
export const dirOf = (s: NixValue): never => {
throw new Error("Not implemented: dirOf");
};
export const flakeRefToString = (attrs: NixValue): never => {
throw new Error("Not implemented: flakeRefToString");
};
export const functionArgs = (f: NixValue): NixAttrs => { export const functionArgs = (f: NixValue): NixAttrs => {
const func = forceFunction(f); const func = forceFunction(f);
if (func.args) { if (func.args) {
const ret: NixAttrs = {}; const ret: NixAttrs = new Map();
for (const key of func.args!.required) { for (const key of func.args.required) {
ret[key] = false; ret.set(key, false);
} }
for (const key of func.args!.optional) { for (const key of func.args.optional) {
ret[key] = true; ret.set(key, true);
} }
const positions = func.args!.positions; const positions = func.args.positions;
if (positions && Object.keys(positions).length > 0) { if (positions) {
Object.defineProperty(ret, ATTR_POSITIONS, { ret[ATTR_POSITIONS] = positions;
value: positions,
enumerable: false,
writable: false,
});
} }
return ret; return ret;
} }
return {}; return new Map();
}; };
const checkComparable = (value: NixStrictValue): void => { const checkComparable = (value: NixStrictValue): void => {
@@ -195,7 +163,8 @@ const checkComparable = (value: NixStrictValue): void => {
export const genericClosure = (args: NixValue): NixValue => { export const genericClosure = (args: NixValue): NixValue => {
const forcedArgs = forceAttrs(args); const forcedArgs = forceAttrs(args);
const { startSet, operator } = forcedArgs; const startSet = select(forcedArgs, ["startSet"]);
const operator = select(forcedArgs, ["operator"]);
const initialList = forceList(startSet); const initialList = forceList(startSet);
const opFunction = forceFunction(operator); const opFunction = forceFunction(operator);
@@ -206,7 +175,7 @@ export const genericClosure = (args: NixValue): NixValue => {
for (const item of initialList) { for (const item of initialList) {
const itemAttrs = forceAttrs(item); const itemAttrs = forceAttrs(item);
const key = force(itemAttrs.key); const key = force(select(itemAttrs, ["key"]));
checkComparable(key); checkComparable(key);
if (resultSet.find(key).equals(resultSet.end())) { if (resultSet.find(key).equals(resultSet.end())) {
resultSet.insert(key); resultSet.insert(key);
@@ -222,7 +191,7 @@ export const genericClosure = (args: NixValue): NixValue => {
for (const newItem of newItems) { for (const newItem of newItems) {
const newItemAttrs = forceAttrs(newItem); const newItemAttrs = forceAttrs(newItem);
const key = force(newItemAttrs.key); const key = force(select(newItemAttrs, ["key"]));
checkComparable(key); checkComparable(key);
if (resultSet.find(key).equals(resultSet.end())) { if (resultSet.find(key).equals(resultSet.end())) {
resultSet.insert(key); resultSet.insert(key);
@@ -235,14 +204,10 @@ export const genericClosure = (args: NixValue): NixValue => {
return resultList; return resultList;
}; };
export const getFlake = (attrs: NixValue): never => {
throw new Error("Not implemented: getFlake");
};
export const outputOf = export const outputOf =
(drv: NixValue) => (_drv: NixValue) =>
(out: NixValue): never => { (_out: NixValue): never => {
throw new Error("Not implemented: outputOf"); throw new Error("Not implemented: outputOf (part of dynamic-derivation)");
}; };
export const parseDrvName = (s: NixValue): NixAttrs => { export const parseDrvName = (s: NixValue): NixAttrs => {
@@ -256,18 +221,10 @@ export const parseDrvName = (s: NixValue): NixAttrs => {
break; break;
} }
} }
return { return new Map<string, NixValue>([
name, ["name", name],
version, ["version", version],
}; ]);
};
export const parseFlakeName = (s: NixValue): never => {
throw new Error("Not implemented: parseFlakeName");
};
export const parseFlakeRef = (s: NixValue): never => {
throw new Error("Not implemented: parseFlakeRef");
}; };
export const placeholder = (output: NixValue): NixValue => { export const placeholder = (output: NixValue): NixValue => {
@@ -314,7 +271,7 @@ export const replaceStrings =
resultContext.add(elem); resultContext.add(elem);
} }
} }
const replacement = toCache.get(i)!; const replacement = toCache.get(i) as string;
result += replacement; result += replacement;
@@ -361,22 +318,25 @@ export const splitVersion = (s: NixValue): NixValue => {
return components; return components;
}; };
export const traceVerbose = (e1: NixValue, e2: NixValue): never => { export const traceVerbose =
throw new Error("Not implemented: traceVerbose"); (_e1: NixValue) =>
}; (e2: NixValue): NixStrictValue => {
// TODO: implement traceVerbose
return force(e2);
};
export const tryEval = (e: NixValue): { success: NixBool; value: NixStrictValue } => { export const tryEval = (e: NixValue): NixAttrs => {
try { try {
return { return new Map<string, NixValue>([
success: true, ["success", true],
value: force(e), ["value", force(e)],
}; ]);
} catch (err) { } catch (err) {
if (err instanceof CatchableError) { if (err instanceof CatchableError) {
return { return new Map<string, NixValue>([
success: false, ["success", false],
value: false, ["value", false],
}; ]);
} else { } else {
throw err; throw err;
} }

View File

@@ -1,13 +1,9 @@
/**
* Path-related builtin functions
*/
import type { NixValue, NixString, NixPath } from "../types";
import { isNixPath, isStringWithContext } from "../types";
import { force } from "../thunk";
import { mkPath } from "../path"; import { mkPath } from "../path";
import { coerceToString, StringCoercionMode, coerceToPath } from "./conversion";
import { mkStringWithContext, type NixStringContext } from "../string-context"; import { mkStringWithContext, type NixStringContext } from "../string-context";
import { force } from "../thunk";
import type { NixPath, NixString, NixValue } from "../types";
import { isNixPath } from "../types";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
/** /**
* builtins.baseNameOf * builtins.baseNameOf
@@ -90,21 +86,8 @@ export const dirOf = (s: NixValue): NixPath | NixString => {
} }
// String input → string output // String input → string output
const strValue: NixString = coerceToString(s, StringCoercionMode.Base, false) as NixString; const outContext: NixStringContext = new Set();
const pathStr = coerceToString(s, StringCoercionMode.Base, false, outContext);
let pathStr: string;
let hasContext = false;
let originalContext: Set<string> | undefined;
if (typeof strValue === "string") {
pathStr = strValue;
} else if (isStringWithContext(strValue)) {
pathStr = strValue.value;
hasContext = strValue.context.size > 0;
originalContext = strValue.context;
} else {
pathStr = strValue as string;
}
const lastSlash = pathStr.lastIndexOf("/"); const lastSlash = pathStr.lastIndexOf("/");
@@ -117,9 +100,8 @@ export const dirOf = (s: NixValue): NixPath | NixString => {
const result = pathStr.slice(0, lastSlash); const result = pathStr.slice(0, lastSlash);
// Preserve string context if present if (outContext.size > 0) {
if (hasContext && originalContext) { return mkStringWithContext(result, outContext);
return mkStringWithContext(result, originalContext);
} }
return result; return result;

View File

@@ -1,29 +1,15 @@
/**
* String operation builtin functions
*/
import type { NixInt, NixValue, NixString } from "../types";
import { forceStringValue, forceList, forceInt, forceString } from "../type-assert";
import { coerceToString, StringCoercionMode } from "./conversion";
import { import {
type NixStringContext,
getStringValue,
getStringContext, getStringContext,
getStringValue,
mkStringWithContext, mkStringWithContext,
type NixStringContext,
} from "../string-context"; } from "../string-context";
import { forceInt, forceList, forceString, forceStringValue } from "../type-assert";
import type { NixInt, NixString, NixValue } from "../types";
import { coerceToString, StringCoercionMode } from "./conversion";
export const stringLength = (e: NixValue): NixInt => BigInt(forceStringValue(e).length); export const stringLength = (e: NixValue): NixInt => BigInt(forceStringValue(e).length);
/**
* builtins.substring - Extract substring while preserving string context
*
* IMPORTANT: String context must be preserved from the source string.
* This matches Lix behavior where substring operations maintain references
* to store paths and derivations.
*
* Special case: substring 0 0 str can be used idiomatically to capture
* string context efficiently without copying the string value.
*/
export const substring = export const substring =
(start: NixValue) => (start: NixValue) =>
(len: NixValue) => (len: NixValue) =>
@@ -55,13 +41,6 @@ export const substring =
return mkStringWithContext(result, context); return mkStringWithContext(result, context);
}; };
/**
* builtins.concatStringsSep - Concatenate strings with separator, merging contexts
*
* IMPORTANT: String context must be collected from both the separator and all
* list elements, then merged into the result. This ensures that store path
* references are preserved when building paths like "/nix/store/xxx/bin:/nix/store/yyy/bin".
*/
export const concatStringsSep = export const concatStringsSep =
(sep: NixValue) => (sep: NixValue) =>
(list: NixValue): NixString => { (list: NixValue): NixString => {

View File

@@ -1,12 +1,6 @@
/**
* Type checking builtin functions
*/
import { import {
HAS_CONTEXT,
isNixPath, isNixPath,
isStringWithContext, isStringWithContext,
type NixPath,
type NixAttrs, type NixAttrs,
type NixBool, type NixBool,
type NixFloat, type NixFloat,
@@ -14,23 +8,17 @@ import {
type NixInt, type NixInt,
type NixList, type NixList,
type NixNull, type NixNull,
type NixString, type NixPath,
type NixStrictValue, type NixStrictValue,
type NixString,
} from "../types"; } from "../types";
/**
* Check if a value is a Nix string (plain string or StringWithContext)
* This works on already-forced values (NixStrictValue).
*/
export const isNixString = (v: NixStrictValue): v is NixString => { export const isNixString = (v: NixStrictValue): v is NixString => {
return typeof v === "string" || isStringWithContext(v); return typeof v === "string" || isStringWithContext(v);
}; };
export const isAttrs = (e: NixStrictValue): e is NixAttrs => { export const isAttrs = (e: NixStrictValue): e is NixAttrs => {
const val = e; return e instanceof Map;
return (
typeof val === "object" && !Array.isArray(val) && val !== null && !(HAS_CONTEXT in val) && !isPath(val)
);
}; };
export const isBool = (e: NixStrictValue): e is NixBool => typeof e === "boolean"; export const isBool = (e: NixStrictValue): e is NixBool => typeof e === "boolean";
@@ -68,7 +56,7 @@ export const typeOf = (e: NixStrictValue): NixType => {
if (isNixString(e)) return "string"; if (isNixString(e)) return "string";
if (isNixPath(e)) return "path"; if (isNixPath(e)) return "path";
if (Array.isArray(e)) return "list"; if (Array.isArray(e)) return "list";
if (typeof e === "object") return "set"; if (e instanceof Map) return "set";
if (typeof e === "function") return "lambda"; if (typeof e === "function") return "lambda";
throw new TypeError(`Unknown Nix type: ${typeof e}`); throw new TypeError(`Unknown Nix type: ${typeof e}`);

View File

@@ -1,83 +0,0 @@
import { HAS_CONTEXT, NixStringContext } from "./string-context";
import { force, isThunk } from "./thunk";
import type { NixValue } from "./types";
import { isStringWithContext, IS_PATH } from "./types";
export const nixValueToJson = (
value: NixValue,
strict: boolean,
outContext: NixStringContext,
copyToStore: boolean,
seen: Set<NixValue> = new Set(),
): any => {
const v = strict ? force(value) : value;
if (isThunk(v) || typeof v === "function")
throw new Error(`cannot convert ${isThunk(v) ? "thunk" : "lambda"} to JSON`);
if (v === null) return null;
if (typeof v === "bigint") {
const num = Number(v);
if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {
console.warn(`integer ${v} exceeds safe range, precision may be lost`);
}
return num;
}
if (typeof v === "number") return v;
if (typeof v === "boolean") return v;
if (typeof v === "string") return v;
if (typeof v === "object" && HAS_CONTEXT in v) {
for (const elem of v.context) {
outContext.add(elem);
}
return v.value;
}
if (typeof v === "object" && IS_PATH in v) {
if (copyToStore) {
const storePath = Deno.core.ops.op_copy_path_to_store(v.value);
outContext.add(storePath);
return storePath;
} else {
return v.value;
}
}
// FIXME: is this check necessary?
// if (seen.has(v)) {
// throw new Error("cycle detected in toJSON");
// } else {
// seen.add(v)
// }
if (Array.isArray(v)) {
return v.map((item) => nixValueToJson(item, strict, outContext, copyToStore, seen));
}
// NixAttrs
if ("__toString" in v && typeof force(v.__toString) === "function") {
const toStringMethod = force(v.__toString) as (self: typeof v) => NixValue;
const result = force(toStringMethod(v));
if (typeof result === "string") {
return result;
}
if (isStringWithContext(result)) {
if (outContext) {
for (const elem of result.context) {
outContext.add(elem);
}
}
return result.value;
}
return nixValueToJson(result, strict, outContext, copyToStore, seen);
}
if ("outPath" in v) {
return nixValueToJson(v.outPath, strict, outContext, copyToStore,seen);
}
const result: Record<string, any> = {};
const keys = Object.keys(v).sort();
for (const key of keys) {
result[key] = nixValueToJson(v[key], strict, outContext, copyToStore, seen);
}
return result;
};

View File

@@ -1,106 +0,0 @@
export interface OutputInfo {
path: string;
hashAlgo: string;
hash: string;
}
export interface DerivationData {
name: string;
outputs: Map<string, OutputInfo>;
inputDrvs: Map<string, Set<string>>;
inputSrcs: Set<string>;
platform: string;
builder: string;
args: string[];
env: Map<string, string>;
}
export const escapeString = (s: string): string => {
let result = "";
for (const char of s) {
switch (char) {
case '"':
result += '\\"';
break;
case "\\":
result += "\\\\";
break;
case "\n":
result += "\\n";
break;
case "\r":
result += "\\r";
break;
case "\t":
result += "\\t";
break;
default:
result += char;
}
}
return `"${result}"`;
};
const quoteString = (s: string): string => `"${s}"`;
const cmpByKey = <T>(a: [string, T], b: [string, T]): number => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0);
export const generateAterm = (drv: DerivationData): string => {
const outputEntries: string[] = [];
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
for (const [name, info] of sortedOutputs) {
outputEntries.push(
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
);
}
const outputs = outputEntries.join(",");
const inputDrvEntries: string[] = [];
const sortedInputDrvs = Array.from(drv.inputDrvs.entries()).sort(cmpByKey);
for (const [drvPath, outputs] of sortedInputDrvs) {
const sortedOuts = Array.from(outputs).sort();
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
inputDrvEntries.push(`(${quoteString(drvPath)},${outList})`);
}
const inputDrvs = inputDrvEntries.join(",");
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
const args = drv.args.map(escapeString).join(",");
const envs = Array.from(drv.env.entries())
.sort(cmpByKey)
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
};
export const generateAtermModulo = (drv: DerivationData, inputDrvHashes: Map<string, string>): string => {
const outputEntries: string[] = [];
const sortedOutputs = Array.from(drv.outputs.entries()).sort(cmpByKey);
for (const [name, info] of sortedOutputs) {
outputEntries.push(
`(${quoteString(name)},${quoteString(info.path)},${quoteString(info.hashAlgo)},${quoteString(info.hash)})`,
);
}
const outputs = outputEntries.join(",");
const inputDrvEntries: string[] = [];
const sortedInputDrvHashes = Array.from(inputDrvHashes.entries()).sort(cmpByKey);
for (const [drvHash, outputs] of sortedInputDrvHashes) {
const sortedOuts = outputs.split(",").sort();
const outList = `[${sortedOuts.map(quoteString).join(",")}]`;
inputDrvEntries.push(`(${quoteString(drvHash)},${outList})`);
}
const inputDrvs = inputDrvEntries.join(",");
const sortedInputSrcs = Array.from(drv.inputSrcs).sort();
const inputSrcs = sortedInputSrcs.map(quoteString).join(",");
const args = drv.args.map(escapeString).join(",");
const envs = Array.from(drv.env.entries())
.sort(cmpByKey)
.map(([k, v]) => `(${escapeString(k)},${escapeString(v)})`);
return `Derive([${outputs}],[${inputDrvs}],[${inputSrcs}],${quoteString(drv.platform)},${escapeString(drv.builder)},[${args}],[${envs}])`;
};

View File

@@ -1,18 +1,14 @@
/**
* Helper functions for nix-js runtime
*/
import type { NixValue, NixAttrs, NixBool, NixString, NixPath } from "./types";
import { forceAttrs, forceBool, forceFunction, forceStringValue } from "./type-assert";
import { isAttrs, typeOf } from "./builtins/type-check";
import { coerceToString, StringCoercionMode } from "./builtins/conversion"; import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import { type NixStringContext, mkStringWithContext, isStringWithContext } from "./string-context"; import { isAttrs, typeOf } from "./builtins/type-check";
import { force } from "./thunk";
import { mkPath } from "./path"; import { mkPath } from "./path";
import { isStringWithContext, mkStringWithContext, type NixStringContext } from "./string-context";
import { force } from "./thunk";
import { forceAttrs, forceBool, forceFunction, forceStringNoCtx, forceStringValue } from "./type-assert";
import type { NixAttrs, NixBool, NixPath, NixString, NixValue } from "./types";
import { CatchableError, isNixPath } from "./types"; import { CatchableError, isNixPath } from "./types";
interface StackFrame { interface StackFrame {
span: string; span: number;
message: string; message: string;
} }
@@ -36,35 +32,18 @@ function enrichError(error: unknown): Error {
return err; return err;
} }
export const getTos = (): string => { const pushContext = (message: string, span: number): void => {
const tos = callStack[callStack.length - 2];
const { file, line, column } = Deno.core.ops.op_decode_span(tos.span);
return `${tos.message} at ${file}:${line}:${column}`;
};
/**
* Push an error context onto the stack
* Used for tracking evaluation context (e.g., "while evaluating the condition")
*/
export const pushContext = (message: string, span: string): void => {
if (callStack.length >= MAX_STACK_DEPTH) { if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift(); callStack.shift();
} }
callStack.push({ span, message }); callStack.push({ span, message });
}; };
/** const popContext = (): void => {
* Pop an error context from the stack
*/
export const popContext = (): void => {
callStack.pop(); callStack.pop();
}; };
/** export const withContext = <T>(message: string, span: number, fn: () => T): T => {
* Execute a function with error context tracking
* Automatically pushes context before execution and pops after
*/
export const withContext = <T>(message: string, span: string, fn: () => T): T => {
pushContext(message, span); pushContext(message, span);
try { try {
return fn(); return fn();
@@ -149,13 +128,6 @@ export const concatStringsWithContext = (parts: NixValue[], forceString: boolean
return mkStringWithContext(value, context); return mkStringWithContext(value, context);
}; };
/**
* Resolve a path (handles both absolute and relative paths)
* For relative paths, resolves against current import stack
*
* @param path - Path string (may be relative or absolute)
* @returns NixPath object with absolute path
*/
export const resolvePath = (currentDir: string, path: NixValue): NixPath => { export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
const forced = force(path); const forced = force(path);
let pathStr: string; let pathStr: string;
@@ -170,86 +142,92 @@ export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
return mkPath(resolved); return mkPath(resolved);
}; };
export const select = (obj: NixValue, attrpath: NixValue[], span?: string): NixValue => { export const select = (obj: NixValue, attrpath: NixValue[], span?: number): NixValue => {
if (span) { if (span !== undefined) {
const pathStrings = attrpath.map((a) => forceStringValue(a));
const path = pathStrings.join(".");
const message = path ? `while selecting attribute [${path}]` : "while selecting attribute";
if (callStack.length >= MAX_STACK_DEPTH) { if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift(); callStack.shift();
} }
callStack.push({ span, message }); const frame: StackFrame = { span, message: "while selecting attribute" };
callStack.push(frame);
try { try {
return select_impl(obj, attrpath); return selectImpl(obj, attrpath);
} catch (error) { } catch (error) {
try {
const path = attrpath.map((a) => forceStringValue(a)).join(".");
if (path) frame.message = `while selecting attribute [${path}]`;
} catch {
throw enrichError(error);
}
throw enrichError(error); throw enrichError(error);
} finally { } finally {
callStack.pop(); callStack.pop();
} }
} else { } else {
return select_impl(obj, attrpath); return selectImpl(obj, attrpath);
} }
}; };
function select_impl(obj: NixValue, attrpath: NixValue[]): NixValue { function selectImpl(obj: NixValue, attrpath: NixValue[]): NixValue {
let attrs = forceAttrs(obj); let attrs = forceAttrs(obj);
for (const attr of attrpath.slice(0, -1)) { for (let i = 0; i < attrpath.length - 1; i++) {
const key = forceStringValue(attr); const key = forceStringValue(attrpath[i]);
if (!(key in attrs)) { if (!attrs.has(key)) {
throw new Error(`Attribute '${key}' not found`); throw new Error(`Attribute '${key}' not found`);
} }
const cur = forceAttrs(attrs[forceStringValue(attr)]); const cur = forceAttrs(attrs.get(key) as NixValue);
attrs = cur; attrs = cur;
} }
const last = forceStringValue(attrpath[attrpath.length - 1]); const last = forceStringValue(attrpath[attrpath.length - 1]);
if (!(last in attrs)) { if (!attrs.has(last)) {
throw new Error(`Attribute '${last}' not found`); throw new Error(`Attribute '${last}' not found`);
} }
return attrs[last]; return attrs.get(last) as NixValue;
} }
export const selectWithDefault = ( export const selectWithDefault = (
obj: NixValue, obj: NixValue,
attrpath: NixValue[], attrpath: NixValue[],
default_val: NixValue, defaultVal: NixValue,
span?: string, span?: number,
): NixValue => { ): NixValue => {
if (span) { if (span !== undefined) {
const pathStrings = attrpath.map((a) => forceStringValue(a));
const path = pathStrings.join(".");
const message = path ? `while selecting attribute [${path}]` : "while selecting attribute";
if (callStack.length >= MAX_STACK_DEPTH) { if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift(); callStack.shift();
} }
callStack.push({ span, message }); const frame: StackFrame = { span, message: "while selecting attribute" };
callStack.push(frame);
try { try {
return selectWithDefault_impl(obj, attrpath, default_val); return selectWithDefaultImpl(obj, attrpath, defaultVal);
} catch (error) { } catch (error) {
try {
const path = attrpath.map((a) => forceStringValue(a)).join(".");
if (path) frame.message = `while selecting attribute [${path}]`;
} catch {
throw enrichError(error);
}
throw enrichError(error); throw enrichError(error);
} finally { } finally {
callStack.pop(); callStack.pop();
} }
} else { } else {
return selectWithDefault_impl(obj, attrpath, default_val); return selectWithDefaultImpl(obj, attrpath, defaultVal);
} }
}; };
function selectWithDefault_impl(obj: NixValue, attrpath: NixValue[], defaultVal: NixValue): NixValue { function selectWithDefaultImpl(obj: NixValue, attrpath: NixValue[], defaultVal: NixValue): NixValue {
let attrs = force(obj); let attrs = force(obj);
if (!isAttrs(attrs)) { if (!isAttrs(attrs)) {
return defaultVal; return defaultVal;
} }
for (const attr of attrpath.slice(0, -1)) { for (let i = 0; i < attrpath.length - 1; i++) {
const key = forceStringValue(attr); const key = forceStringValue(attrpath[i]);
if (!(key in attrs)) { if (!attrs.has(key)) {
return defaultVal; return defaultVal;
} }
const cur = force(attrs[key]); const cur = force(attrs.get(key) as NixValue);
if (!isAttrs(cur)) { if (!isAttrs(cur)) {
return defaultVal; return defaultVal;
} }
@@ -257,8 +235,8 @@ function selectWithDefault_impl(obj: NixValue, attrpath: NixValue[], defaultVal:
} }
const last = forceStringValue(attrpath[attrpath.length - 1]); const last = forceStringValue(attrpath[attrpath.length - 1]);
if (last in attrs) { if (attrs.has(last)) {
return attrs[last]; return attrs.get(last) as NixValue;
} }
return defaultVal; return defaultVal;
} }
@@ -270,97 +248,53 @@ export const hasAttr = (obj: NixValue, attrpath: NixValue[]): NixBool => {
} }
let attrs = forced; let attrs = forced;
for (const attr of attrpath.slice(0, -1)) { for (let i = 0; i < attrpath.length - 1; i++) {
const cur = force(attrs[forceStringValue(attr)]); const key = forceStringNoCtx(attrpath[i]);
if (!attrs.has(key)) {
return false;
}
const cur = force(attrs.get(key) as NixValue);
if (!isAttrs(cur)) { if (!isAttrs(cur)) {
return false; return false;
} }
attrs = cur; attrs = cur;
} }
return forceStringValue(attrpath[attrpath.length - 1]) in attrs; return attrs.has(forceStringValue(attrpath[attrpath.length - 1]));
}; };
/** export const call = (func: NixValue, arg: NixValue, span?: number): NixValue => {
* Validate function parameters if (span !== undefined) {
* Used for pattern matching in function parameters
*
* Example: { a, b ? 1, ... }: ...
* - required: ["a"]
* - allowed: ["a", "b"] (or null if ellipsis "..." present)
*
* @param arg - Argument object to validate
* @param required - Array of required parameter names (or null)
* @param allowed - Array of allowed parameter names (or null for ellipsis)
* @returns The forced argument object
* @throws Error if required param missing or unexpected param present
*/
export const validateParams = (
arg: NixValue,
required: string[] | null,
allowed: string[] | null,
): NixAttrs => {
const forced_arg = forceAttrs(arg);
// Check required parameters
if (required) {
for (const key of required) {
if (!Object.hasOwn(forced_arg, key)) {
throw new Error(`Function called without required argument '${key}'`);
}
}
}
// Check allowed parameters (if not using ellipsis)
if (allowed) {
const allowed_set = new Set(allowed);
for (const key in forced_arg) {
if (!allowed_set.has(key)) {
throw new Error(`Function called with unexpected argument '${key}'`);
}
}
}
return forced_arg;
};
export const call = (func: NixValue, arg: NixValue, span?: string): NixValue => {
if (span) {
if (callStack.length >= MAX_STACK_DEPTH) { if (callStack.length >= MAX_STACK_DEPTH) {
callStack.shift(); callStack.shift();
} }
callStack.push({ span, message: "from call site" }); callStack.push({ span, message: "from call site" });
try { try {
return call_impl(func, arg); return callImpl(func, arg);
} catch (error) { } catch (error) {
throw enrichError(error); throw enrichError(error);
} finally { } finally {
callStack.pop(); callStack.pop();
} }
} else { } else {
return call_impl(func, arg); return callImpl(func, arg);
} }
}; };
function call_impl(func: NixValue, arg: NixValue): NixValue { function callImpl(func: NixValue, arg: NixValue): NixValue {
const forcedFunc = force(func); const forced = force(func);
if (typeof forcedFunc === "function") { if (typeof forced === "function") {
forcedFunc.args?.check(arg); forced.args?.check(arg);
return forcedFunc(arg); return forced(arg);
} }
if ( if (forced instanceof Map && forced.has("__functor")) {
typeof forcedFunc === "object" && const functor = forceFunction(forced.get("__functor") as NixValue);
!Array.isArray(forcedFunc) && return call(callImpl(functor, forced), arg);
forcedFunc !== null &&
"__functor" in forcedFunc
) {
const functor = forceFunction(forcedFunc.__functor);
return call(functor(forcedFunc), arg);
} }
throw new Error(`attempt to call something which is not a function but ${typeOf(forcedFunc)}`); throw new Error(`attempt to call something which is not a function but ${typeOf(forced)}`);
} }
export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string, span: string): NixValue => { export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string, span: number): NixValue => {
if (forceBool(assertion)) { if (forceBool(assertion)) {
return expr; return expr;
} }
@@ -370,14 +304,7 @@ export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string
throw "unreachable"; throw "unreachable";
}; };
export const ifFunc = (cond: NixValue, consq: NixValue, alter: NixValue) => { export const mkPos = (span: number): NixAttrs => {
if (forceBool(cond)) {
return consq;
}
return alter;
};
export const mkPos = (span: string): NixAttrs => {
return Deno.core.ops.op_decode_span(span); return Deno.core.ops.op_decode_span(span);
}; };
@@ -386,12 +313,12 @@ interface WithScope {
last: WithScope | null; last: WithScope | null;
} }
export const lookupWith = (name: string, withScope: WithScope | null): NixValue => { export const lookupWith = (name: string, withScope: WithScope): NixValue => {
let current = withScope; let current: WithScope | null = withScope;
while (current !== null) { while (current !== null) {
const attrs = forceAttrs(current.env); const attrs = forceAttrs(current.env);
if (name in attrs) { if (attrs.has(name)) {
return attrs[name]; return attrs.get(name) as NixValue;
} }
current = current.last; current = current.last;
} }

View File

@@ -4,88 +4,95 @@
* All functionality is exported via the global `Nix` object * All functionality is exported via the global `Nix` object
*/ */
import { builtins, PRIMOP_METADATA } from "./builtins";
import { import {
createThunk, assert,
force, call,
isThunk, concatStringsWithContext,
IS_THUNK, hasAttr,
DEBUG_THUNKS, lookupWith,
forceDeep, mkPos,
IS_CYCLE, resolvePath,
forceShallow,
} from "./thunk";
import {
select, select,
selectWithDefault, selectWithDefault,
validateParams,
resolvePath,
hasAttr,
concatStringsWithContext,
call,
assert,
pushContext,
popContext,
withContext,
mkPos,
lookupWith,
} from "./helpers"; } from "./helpers";
import { op } from "./operators"; import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins";
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import { HAS_CONTEXT } from "./string-context"; import { HAS_CONTEXT } from "./string-context";
import { IS_PATH, mkAttrs, mkFunction, mkAttrsWithPos, ATTR_POSITIONS, NixValue } from "./types"; import { createThunk, DEBUG_THUNKS, force, forceDeep, forceShallow, IS_CYCLE, IS_THUNK } from "./thunk";
import { forceBool } from "./type-assert"; import { forceBool } from "./type-assert";
import { IS_PATH, mkAttrs, mkFunction, type NixValue } from "./types";
import { execBytecode, execBytecodeScoped, vmStrings, vmConstants } from "./vm";
export type NixRuntime = typeof Nix; export type NixRuntime = typeof Nix;
const replBindings: Record<string, NixValue> = {}; const replBindings: Map<string, NixValue> = new Map();
/**
* The global Nix runtime object
*/
export const Nix = { export const Nix = {
createThunk,
force,
forceShallow,
forceDeep,
forceBool,
isThunk,
IS_THUNK, IS_THUNK,
IS_CYCLE, IS_CYCLE,
HAS_CONTEXT, HAS_CONTEXT,
IS_PATH, IS_PATH,
PRIMOP_METADATA,
DEBUG_THUNKS, DEBUG_THUNKS,
createThunk,
force,
forceBool,
forceShallow,
forceDeep,
assert, assert,
call, call,
hasAttr, hasAttr,
select, select,
selectWithDefault, selectWithDefault,
lookupWith, lookupWith,
validateParams,
resolvePath, resolvePath,
coerceToString,
concatStringsWithContext, concatStringsWithContext,
StringCoercionMode,
mkAttrs, mkAttrs,
mkAttrsWithPos,
mkFunction, mkFunction,
mkPos, mkPos,
ATTR_POSITIONS,
pushContext,
popContext,
withContext,
op, op,
builtins, builtins,
PRIMOP_METADATA,
strings: vmStrings,
constants: vmConstants,
execBytecode,
execBytecodeScoped,
replBindings, replBindings,
setReplBinding: (name: string, value: NixValue) => { setReplBinding: (name: string, value: NixValue) => {
replBindings[name] = value; replBindings.set(name, value);
}, },
getReplBinding: (name: string) => replBindings[name], getReplBinding: (name: string) => replBindings.get(name),
}; };
globalThis.Nix = Nix; globalThis.Nix = Nix;
globalThis.$t = createThunk;
globalThis.$f = force;
globalThis.$fb = forceBool;
globalThis.$a = assert;
globalThis.$c = call;
globalThis.$h = hasAttr;
globalThis.$s = select;
globalThis.$sd = selectWithDefault;
globalThis.$l = lookupWith;
globalThis.$r = resolvePath;
globalThis.$cs = concatStringsWithContext;
globalThis.$ma = mkAttrs;
globalThis.$mf = mkFunction;
globalThis.$mp = mkPos;
globalThis.$gb = Nix.getReplBinding;
globalThis.$oa = op.add;
globalThis.$os = op.sub;
globalThis.$om = op.mul;
globalThis.$od = op.div;
globalThis.$oe = op.eq;
globalThis.$ol = op.lt;
globalThis.$og = op.gt;
globalThis.$oc = op.concat;
globalThis.$ou = op.update;
globalThis.$b = builtins;
globalThis.$e = new Map();

View File

@@ -1,37 +1,27 @@
/** import { coerceToString, StringCoercionMode } from "./builtins/conversion";
* Nix operators module import { isNixString, typeOf } from "./builtins/type-check";
* Implements all binary and unary operators used by codegen import { mkPath } from "./path";
*/
import type { NixValue, NixList, NixAttrs, NixString, NixPath } from "./types";
import { isNixPath } from "./types";
import { force } from "./thunk";
import { forceNumeric, forceList, forceAttrs, coerceNumeric } from "./type-assert";
import { import {
type NixStringContext,
getStringValue,
getStringContext, getStringContext,
getStringValue,
mergeContexts, mergeContexts,
mkStringWithContext, mkStringWithContext,
type NixStringContext,
} from "./string-context"; } from "./string-context";
import { coerceToString, StringCoercionMode } from "./builtins/conversion"; import { force } from "./thunk";
import { mkPath } from "./path"; import { coerceNumeric, forceAttrs, forceBool, forceList, forceNumeric } from "./type-assert";
import { typeOf, isNixString } from "./builtins/type-check"; import type { NixAttrs, NixList, NixPath, NixString, NixValue } from "./types";
import { isNixPath } from "./types";
const canCoerceToString = (v: NixValue): boolean => { const canCoerceToString = (v: NixValue): boolean => {
const forced = force(v); const forced = force(v);
if (isNixString(forced)) return true; if (isNixString(forced)) return true;
if (typeof forced === "object" && forced !== null && !Array.isArray(forced)) { if (forced instanceof Map) {
if ("outPath" in forced || "__toString" in forced) return true; if (forced.has("outPath") || forced.has("__toString")) return true;
} }
return false; return false;
}; };
/**
* Compare two values, similar to Nix's CompareValues.
* Returns: -1 if a < b, 0 if a == b, 1 if a > b
* Throws TypeError for incomparable types.
*/
export const compareValues = (a: NixValue, b: NixValue): -1 | 0 | 1 => { export const compareValues = (a: NixValue, b: NixValue): -1 | 0 | 1 => {
const av = force(a); const av = force(a);
const bv = force(b); const bv = force(b);
@@ -54,32 +44,29 @@ export const compareValues = (a: NixValue, b: NixValue): -1 | 0 | 1 => {
throw new TypeError(`cannot compare ${typeOf(av)} with ${typeOf(bv)}`); throw new TypeError(`cannot compare ${typeOf(av)} with ${typeOf(bv)}`);
} }
// Int and float comparison
if (typeA === "int" || typeA === "float") { if (typeA === "int" || typeA === "float") {
return av! < bv! ? -1 : av === bv ? 0 : 1; return (av as never) < (bv as never) ? -1 : av === bv ? 0 : 1;
} }
// String comparison (handles both plain strings and StringWithContext)
if (typeA === "string") { if (typeA === "string") {
const strA = getStringValue(av as NixString); const strA = getStringValue(av as NixString);
const strB = getStringValue(bv as NixString); const strB = getStringValue(bv as NixString);
return strA < strB ? -1 : strA > strB ? 1 : 0; return strA < strB ? -1 : strA > strB ? 1 : 0;
} }
// Path comparison
if (typeA === "path") { if (typeA === "path") {
const aPath = av as NixPath; const aPath = av as NixPath;
const bPath = bv as NixPath; const bPath = bv as NixPath;
return aPath.value < bPath.value ? -1 : aPath.value > bPath.value ? 1 : 0; return aPath.value < bPath.value ? -1 : aPath.value > bPath.value ? 1 : 0;
} }
// List comparison (lexicographic)
if (typeA === "list") { if (typeA === "list") {
const aList = av as NixList; const aList = av as NixList;
const bList = bv as NixList; const bList = bv as NixList;
for (let i = 0; ; i++) { for (let i = 0; ; i++) {
// Equal if same length, else aList > bList
if (i === bList.length) { if (i === bList.length) {
return i === aList.length ? 0 : 1; // Equal if same length, else aList > bList return i === aList.length ? 0 : 1;
} else if (i === aList.length) { } else if (i === aList.length) {
return -1; // aList < bList return -1; // aList < bList
} else if (!op.eq(aList[i], bList[i])) { } else if (!op.eq(aList[i], bList[i])) {
@@ -94,10 +81,6 @@ export const compareValues = (a: NixValue, b: NixValue): -1 | 0 | 1 => {
); );
}; };
/**
* Operator object exported as Nix.op
* All operators referenced by codegen (e.g., Nix.op.add, Nix.op.eq)
*/
export const op = { export const op = {
add: (a: NixValue, b: NixValue): bigint | number | NixString | NixPath => { add: (a: NixValue, b: NixValue): bigint | number | NixString | NixPath => {
const av = force(a); const av = force(a);
@@ -109,15 +92,14 @@ export const op = {
const strB = getStringValue(bv); const strB = getStringValue(bv);
const ctxB = getStringContext(bv); const ctxB = getStringContext(bv);
// Lix constraint: cannot append string with store context to path
if (ctxB.size > 0) { if (ctxB.size > 0) {
throw new TypeError("a string that refers to a store path cannot be appended to a path"); throw new TypeError("a string that refers to a store path cannot be appended to a path");
} }
// Concatenate paths
return mkPath(av.value + strB); return mkPath(av.value + strB);
} }
// FIXME: handle corepkgs
// path + path: concatenate // path + path: concatenate
if (isNixPath(bv)) { if (isNixPath(bv)) {
return mkPath(av.value + bv.value); return mkPath(av.value + bv.value);
@@ -138,6 +120,7 @@ export const op = {
// String concatenation // String concatenation
if (isNixString(av) && isNixString(bv)) { if (isNixString(av) && isNixString(bv)) {
// Merge string context
const strA = getStringValue(av); const strA = getStringValue(av);
const strB = getStringValue(bv); const strB = getStringValue(bv);
const ctxA = getStringContext(av); const ctxA = getStringContext(av);
@@ -162,19 +145,19 @@ export const op = {
return mkStringWithContext(result, context); return mkStringWithContext(result, context);
} }
// Numeric addition // Perform numeric addition otherwise
const [numA, numB] = coerceNumeric(forceNumeric(a), forceNumeric(b)); const [numA, numB] = coerceNumeric(forceNumeric(a), forceNumeric(b));
return (numA as any) + (numB as any); return (numA as never) + (numB as never);
}, },
sub: (a: NixValue, b: NixValue): bigint | number => { sub: (a: NixValue, b: NixValue): bigint | number => {
const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b)); const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b));
return (av as any) - (bv as any); return (av as never) - (bv as never);
}, },
mul: (a: NixValue, b: NixValue): bigint | number => { mul: (a: NixValue, b: NixValue): bigint | number => {
const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b)); const [av, bv] = coerceNumeric(forceNumeric(a), forceNumeric(b));
return (av as any) * (bv as any); return (av as never) * (bv as never);
}, },
div: (a: NixValue, b: NixValue): bigint | number => { div: (a: NixValue, b: NixValue): bigint | number => {
@@ -184,7 +167,7 @@ export const op = {
throw new RangeError("Division by zero"); throw new RangeError("Division by zero");
} }
return (av as any) / (bv as any); return (av as never) / (bv as never);
}, },
eq: (a: NixValue, b: NixValue): boolean => { eq: (a: NixValue, b: NixValue): boolean => {
@@ -202,7 +185,6 @@ export const op = {
return av === Number(bv); return av === Number(bv);
} }
// Get type names for comparison (skip if already handled above)
const typeA = typeOf(av); const typeA = typeOf(av);
const typeB = typeOf(bv); const typeB = typeOf(bv);
@@ -223,10 +205,12 @@ export const op = {
return (av as NixPath).value === (bv as NixPath).value; return (av as NixPath).value === (bv as NixPath).value;
} }
if (Array.isArray(av) && Array.isArray(bv)) { if (typeA === "list") {
if (av.length !== bv.length) return false; const aList = av as NixList;
for (let i = 0; i < av.length; i++) { const bList = bv as NixList;
if (!op.eq(av[i], bv[i])) return false; if (aList.length !== bList.length) return false;
for (let i = 0; i < aList.length; i++) {
if (!op.eq(aList[i], bList[i])) return false;
} }
return true; return true;
} }
@@ -235,37 +219,30 @@ export const op = {
const attrsA = av as NixAttrs; const attrsA = av as NixAttrs;
const attrsB = bv as NixAttrs; const attrsB = bv as NixAttrs;
// Derivation comparison: compare outPaths only if (attrsA.has("type") && attrsB.has("type")) {
// Safe to force 'type' because it's always a string literal, never a computed value const typeValA = force(attrsA.get("type") as NixValue);
if ("type" in attrsA && "type" in attrsB) { const typeValB = force(attrsB.get("type") as NixValue);
const typeValA = force(attrsA.type);
const typeValB = force(attrsB.type);
if (typeValA === "derivation" && typeValB === "derivation") { if (typeValA === "derivation" && typeValB === "derivation") {
if ("outPath" in attrsA && "outPath" in attrsB) { if (attrsA.has("outPath") && attrsB.has("outPath")) {
return op.eq(attrsA.outPath, attrsB.outPath); return op.eq(attrsA.get("outPath") as NixValue, attrsB.get("outPath") as NixValue);
} }
} }
} }
// Otherwise, compare attributes one by one const keysA = Array.from(attrsA.keys()).sort();
const keysA = Object.keys(attrsA).sort(); const keysB = Array.from(attrsB.keys()).sort();
const keysB = Object.keys(attrsB).sort();
if (keysA.length !== keysB.length) return false; if (keysA.length !== keysB.length) return false;
for (let i = 0; i < keysA.length; i++) { for (let i = 0; i < keysA.length; i++) {
if (keysA[i] !== keysB[i]) return false; if (keysA[i] !== keysB[i]) return false;
if (!op.eq(attrsA[keysA[i]], attrsB[keysB[i]])) return false; if (!op.eq(attrsA.get(keysA[i]) as NixValue, attrsB.get(keysB[i]) as NixValue)) return false;
} }
return true; return true;
} }
// Functions are incomparable // Other types are incomparable
if (typeof av === "function") {
return false;
}
return false; return false;
}, },
neq: (a: NixValue, b: NixValue): boolean => { neq: (a: NixValue, b: NixValue): boolean => {
@@ -284,11 +261,25 @@ export const op = {
return compareValues(a, b) >= 0; return compareValues(a, b) >= 0;
}, },
bnot: (a: NixValue): boolean => !force(a), bnot: (a: NixValue): boolean => !forceBool(a),
concat: (a: NixValue, b: NixValue): NixList => { concat: (a: NixValue, b: NixValue): NixList => {
return Array.prototype.concat.call(forceList(a), forceList(b)); return forceList(a).concat(forceList(b));
}, },
update: (a: NixValue, b: NixValue): NixAttrs => ({ ...forceAttrs(a), ...forceAttrs(b) }), update: (a: NixValue, b: NixValue): NixAttrs => {
const mapA = forceAttrs(a);
const mapB = forceAttrs(b);
if (mapA.size === 0) {
return mapB;
}
if (mapB.size === 0) {
return mapA;
}
const result: NixAttrs = new Map(mapA);
for (const [k, v] of mapB) {
result.set(k, v);
}
return result;
},
}; };

View File

@@ -1,4 +1,4 @@
import { IS_PATH, type NixPath } from "./types"; import { NixPath } from "./types";
const canonicalizePath = (path: string): string => { const canonicalizePath = (path: string): string => {
const parts: string[] = []; const parts: string[] = [];
@@ -14,13 +14,11 @@ const canonicalizePath = (path: string): string => {
const component = path.slice(i, j); const component = path.slice(i, j);
i = j; i = j;
if (component === ".") { if (component === "..") {
continue;
} else if (component === "..") {
if (parts.length > 0) { if (parts.length > 0) {
parts.pop(); parts.pop();
} }
} else { } else if (component !== ".") {
parts.push(component); parts.push(component);
} }
} }
@@ -28,11 +26,11 @@ const canonicalizePath = (path: string): string => {
if (parts.length === 0) { if (parts.length === 0) {
return "/"; return "/";
} }
return "/" + parts.join("/"); return `/${parts.join("/")}`;
}; };
export const mkPath = (value: string): NixPath => { export const mkPath = (value: string): NixPath => {
return { [IS_PATH]: true, value: canonicalizePath(value) }; return new NixPath(canonicalizePath(value));
}; };
export const getPathValue = (p: NixPath): string => { export const getPathValue = (p: NixPath): string => {

View File

@@ -1,7 +1,7 @@
import { isThunk, IS_CYCLE } from "./thunk"; import { getPrimopMetadata, isPrimop } from "./builtins/index";
import { isStringWithContext } from "./string-context"; import { isStringWithContext } from "./string-context";
import { IS_CYCLE, isThunk } from "./thunk";
import { isNixPath, type NixValue } from "./types"; import { isNixPath, type NixValue } from "./types";
import { is_primop, get_primop_metadata } from "./builtins/index";
export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet()): string => { export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet()): string => {
if (isThunk(value)) { if (isThunk(value)) {
@@ -29,8 +29,8 @@ export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet(
} }
if (typeof value === "function") { if (typeof value === "function") {
if (is_primop(value)) { if (isPrimop(value)) {
const meta = get_primop_metadata(value); const meta = getPrimopMetadata(value);
if (meta && meta.applied > 0) { if (meta && meta.applied > 0) {
return "<PRIMOP-APP>"; return "<PRIMOP-APP>";
} }
@@ -39,36 +39,40 @@ export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet(
return "<LAMBDA>"; return "<LAMBDA>";
} }
if (typeof value === "object") { if (IS_CYCLE in value) {
if (IS_CYCLE in value && (value as any)[IS_CYCLE] === true) { return "«repeated»";
return "«repeated»";
}
if (seen.has(value)) {
return "«repeated»";
}
seen.add(value);
if (isNixPath(value)) {
return value.value;
}
if (isStringWithContext(value)) {
return printString(value.value);
}
if (Array.isArray(value)) {
const items = value.map((v) => printValue(v, seen)).join(" ");
return `[ ${items} ]`;
}
const entries = Object.entries(value)
.map(([k, v]) => `${printSymbol(k)} = ${printValue(v, seen)};`)
.join(" ");
return `{${entries ? ` ${entries} ` : " "}}`;
} }
return "<unknown>"; if (isNixPath(value)) {
return value.value;
}
if (isStringWithContext(value)) {
return printString(value.value);
}
if (Array.isArray(value)) {
if (value.length > 0) {
if (seen.has(value)) {
return "«repeated»";
}
seen.add(value);
}
const items = value.map((v) => printValue(v, seen)).join(" ");
return `[ ${items} ]`;
}
if (seen.has(value)) {
return "«repeated»";
}
if (value.size > 0) {
seen.add(value);
}
const entries = [...value.entries()]
.map(([k, v]) => `${printSymbol(k)} = ${printValue(v, seen)};`)
.join(" ");
return `{${entries ? ` ${entries} ` : " "}}`;
}; };
const printString = (s: string): string => { const printString = (s: string): string => {
@@ -94,7 +98,7 @@ const printString = (s: string): string => {
result += c; result += c;
} }
} }
return result + '"'; return `${result}"`;
}; };
const SYMBOL_REGEX = /^[a-zA-Z_][a-zA-Z0-9_'-]*$/; const SYMBOL_REGEX = /^[a-zA-Z_][a-zA-Z0-9_'-]*$/;

View File

@@ -1,29 +1,4 @@
/** import type { NixStrictValue } from "./types";
* String Context System for Nix
*
* String context tracks references to store paths and derivations within strings.
* This is critical for Nix's dependency tracking - when a string containing a
* store path is used in a derivation, that store path becomes a build dependency.
*
* Context Elements (encoded as strings):
* - Opaque: Plain store path reference
* Format: "/nix/store/..."
* Example: "/nix/store/abc123-hello"
*
* - DrvDeep: Derivation with all outputs
* Format: "=/nix/store/...drv"
* Example: "=/nix/store/xyz789-hello.drv"
* Meaning: All outputs of this derivation and its closure
*
* - Built: Specific derivation output
* Format: "!<output>!/nix/store/...drv"
* Example: "!out!/nix/store/xyz789-hello.drv"
* Meaning: Specific output (e.g., "out", "dev", "lib") of this derivation
*
* This implementation matches Lix's NixStringContext system.
*/
import { NixStrictValue } from "./types";
export const HAS_CONTEXT = Symbol("HAS_CONTEXT"); export const HAS_CONTEXT = Symbol("HAS_CONTEXT");
@@ -47,18 +22,22 @@ export type StringContextElem = StringContextOpaque | StringContextDrvDeep | Str
export type NixStringContext = Set<string>; export type NixStringContext = Set<string>;
export interface StringWithContext { export class StringWithContext {
readonly [HAS_CONTEXT]: true; readonly [HAS_CONTEXT] = true as const;
value: string; value: string;
context: NixStringContext; context: NixStringContext;
constructor(value: string, context: NixStringContext) {
this.value = value;
this.context = context;
}
} }
export const isStringWithContext = (v: NixStrictValue): v is StringWithContext => { export const isStringWithContext = (v: NixStrictValue): v is StringWithContext => {
return typeof v === "object" && v !== null && HAS_CONTEXT in v; return v instanceof StringWithContext;
}; };
export const mkStringWithContext = (value: string, context: NixStringContext): StringWithContext => { export const mkStringWithContext = (value: string, context: NixStringContext): StringWithContext => {
return { [HAS_CONTEXT]: true, value, context }; return new StringWithContext(value, context);
}; };
export const mkPlainString = (value: string): string => value; export const mkPlainString = (value: string): string => value;
@@ -70,11 +49,12 @@ export const getStringValue = (s: string | StringWithContext): string => {
return s; return s;
}; };
const emptyContext: NixStringContext = new Set();
export const getStringContext = (s: string | StringWithContext): NixStringContext => { export const getStringContext = (s: string | StringWithContext): NixStringContext => {
if (isStringWithContext(s)) { if (isStringWithContext(s)) {
return s.context; return s.context;
} }
return new Set(); return emptyContext;
}; };
export const mergeContexts = (...contexts: NixStringContext[]): NixStringContext => { export const mergeContexts = (...contexts: NixStringContext[]): NixStringContext => {
@@ -171,68 +151,3 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
return result; return result;
}; };
/**
* Extract input derivations and source paths from context
*
* IMPORTANT: Used by derivation builder to determine build dependencies.
*
* Returns:
* - inputDrvs: Map of derivation paths to their required output names
* - inputSrcs: Set of plain store paths (opaque) and drvDeep references
*
* Context type handling:
* - Opaque: Added to inputSrcs
* - DrvDeep: Computes FS closure (like Nix's computeFSClosure) - adds all paths
* in the dependency graph to inputSrcs, and all derivations with their
* outputs to inputDrvs
* - Built: Added to inputDrvs with specific output name
*/
export const extractInputDrvsAndSrcs = (
context: NixStringContext,
): { inputDrvs: Map<string, Set<string>>; inputSrcs: Set<string> } => {
const inputDrvs = new Map<string, Set<string>>();
const inputSrcs = new Set<string>();
for (const encoded of context) {
const elem = decodeContextElem(encoded);
switch (elem.type) {
case "opaque":
inputSrcs.add(elem.path);
break;
case "drvDeep": {
const closure: {
input_drvs: [string, string[]][];
input_srcs: string[];
} = Deno.core.ops.op_compute_fs_closure(elem.drvPath);
for (const src of closure.input_srcs) {
inputSrcs.add(src);
}
for (const [drvPath, outputs] of closure.input_drvs) {
let existingOutputs = inputDrvs.get(drvPath);
if (!existingOutputs) {
existingOutputs = new Set<string>();
inputDrvs.set(drvPath, existingOutputs);
}
for (const output of outputs) {
existingOutputs.add(output);
}
}
break;
}
case "built": {
let outputs = inputDrvs.get(elem.drvPath);
if (!outputs) {
outputs = new Set<string>();
inputDrvs.set(elem.drvPath, outputs);
}
outputs.add(elem.output);
break;
}
}
}
return { inputDrvs, inputSrcs };
};

View File

@@ -1,17 +1,8 @@
/**
* Lazy evaluation system for nix-js
* Implements thunks for lazy evaluation of Nix expressions
*/
import type { NixValue, NixThunkInterface, NixStrictValue } from "./types";
import { HAS_CONTEXT } from "./string-context";
import { IS_PATH } from "./types";
import { isAttrs, isList } from "./builtins/type-check"; import { isAttrs, isList } from "./builtins/type-check";
import { StringWithContext } from "./string-context";
import type { NixAttrs, NixStrictValue, NixValue } from "./types";
import { NixPath } from "./types";
/**
* Symbol used to mark objects as thunks
* This is exported to Rust via Nix.IS_THUNK
*/
export const IS_THUNK = Symbol("is_thunk"); export const IS_THUNK = Symbol("is_thunk");
const forceStack: NixThunk[] = []; const forceStack: NixThunk[] = [];
@@ -30,8 +21,7 @@ export const DEBUG_THUNKS = { enabled: true };
* - Evaluating (blackhole): func is undefined, result is undefined * - Evaluating (blackhole): func is undefined, result is undefined
* - Evaluated: func is undefined, result is defined * - Evaluated: func is undefined, result is defined
*/ */
export class NixThunk implements NixThunkInterface { export class NixThunk {
[key: symbol]: any;
readonly [IS_THUNK] = true as const; readonly [IS_THUNK] = true as const;
func: (() => NixValue) | undefined; func: (() => NixValue) | undefined;
result: NixStrictValue | undefined; result: NixStrictValue | undefined;
@@ -51,13 +41,8 @@ export class NixThunk implements NixThunkInterface {
} }
} }
/** export const isThunk = (value: NixValue): value is NixThunk => {
* Type guard to check if a value is a thunk return value instanceof NixThunk;
* @param value - Value to check
* @returns true if value is a NixThunk
*/
export const isThunk = (value: NixValue): value is NixThunkInterface => {
return value !== null && typeof value === "object" && IS_THUNK in value && value[IS_THUNK] === true;
}; };
/** /**
@@ -96,7 +81,7 @@ export const force = (value: NixValue): NixStrictValue => {
} }
const thunk = value as NixThunk; const thunk = value as NixThunk;
const func = thunk.func!; const func = thunk.func as () => NixValue;
thunk.func = undefined; thunk.func = undefined;
if (DEBUG_THUNKS.enabled) { if (DEBUG_THUNKS.enabled) {
@@ -126,25 +111,12 @@ export const force = (value: NixValue): NixStrictValue => {
} }
}; };
/** export const createThunk = (func: () => NixValue, label?: string): NixThunk => {
* Create a new thunk from a function
* @param func - Function that produces a value when called
* @param label - Optional label for debugging
* @returns A new NixThunk wrapping the function
*/
export const createThunk = (func: () => NixValue, label?: string): NixThunkInterface => {
return new NixThunk(func, label); return new NixThunk(func, label);
}; };
/**
* Symbol to mark cyclic references detected during deep forcing
*/
export const IS_CYCLE = Symbol("is_cycle"); export const IS_CYCLE = Symbol("is_cycle");
export const CYCLE_MARKER = { [IS_CYCLE]: true as const };
/**
* Marker object for cyclic references
*/
export const CYCLE_MARKER = { [IS_CYCLE]: true };
/** /**
* Deeply force a value, handling cycles by returning a special marker. * Deeply force a value, handling cycles by returning a special marker.
@@ -160,16 +132,13 @@ export const forceDeep = (value: NixValue, seen: WeakSet<object> = new WeakSet()
} }
if (seen.has(forced)) { if (seen.has(forced)) {
if (Array.isArray(forced)) {
return [CYCLE_MARKER];
}
return CYCLE_MARKER; return CYCLE_MARKER;
} }
if (isAttrs(forced) || isList(forced)) { if ((isAttrs(forced) && forced.size > 0) || (isList(forced) && forced.length > 0)) {
seen.add(forced); seen.add(forced);
} }
if (HAS_CONTEXT in forced || IS_PATH in forced) { if (forced instanceof StringWithContext || forced instanceof NixPath) {
return forced; return forced;
} }
@@ -177,10 +146,10 @@ export const forceDeep = (value: NixValue, seen: WeakSet<object> = new WeakSet()
return forced.map((item) => forceDeep(item, seen)); return forced.map((item) => forceDeep(item, seen));
} }
if (typeof forced === "object") { if (forced instanceof Map) {
const result: Record<string, NixValue> = {}; const result: NixAttrs = new Map();
for (const [key, val] of Object.entries(forced)) { for (const [key, val] of forced) {
result[key] = forceDeep(val, seen); result.set(key, forceDeep(val, seen));
} }
return result; return result;
} }
@@ -207,10 +176,10 @@ export const forceShallow = (value: NixValue): NixStrictValue => {
} }
if (isAttrs(forced)) { if (isAttrs(forced)) {
const result: Record<string, NixValue> = {}; const result: NixAttrs = new Map();
for (const [key, val] of Object.entries(forced)) { for (const [key, val] of forced) {
const forcedVal = force(val); const forcedVal = force(val as NixValue);
result[key] = forcedVal === forced ? CYCLE_MARKER : forcedVal; result.set(key, forcedVal === forced ? CYCLE_MARKER : forcedVal);
} }
return result; return result;
} }

View File

@@ -1,27 +1,18 @@
/** import { isAttrs, isFunction, typeOf } from "./builtins/type-check";
* Type assertion helpers for runtime type checking import { force } from "./thunk";
* These functions force evaluation and verify the type, throwing errors on mismatch
*/
import type { import type {
NixValue,
NixList,
NixAttrs, NixAttrs,
NixFloat,
NixFunction, NixFunction,
NixInt, NixInt,
NixFloat, NixList,
NixNumber, NixNumber,
NixString,
NixPath, NixPath,
NixString,
NixValue,
} from "./types"; } from "./types";
import { isStringWithContext, isNixPath } from "./types"; import { isNixPath, isStringWithContext } from "./types";
import { force } from "./thunk";
import { isAttrs, isFunction, typeOf } from "./builtins/type-check";
/**
* Force a value and assert it's a list
* @throws TypeError if value is not a list after forcing
*/
export const forceList = (value: NixValue): NixList => { export const forceList = (value: NixValue): NixList => {
const forced = force(value); const forced = force(value);
if (!Array.isArray(forced)) { if (!Array.isArray(forced)) {
@@ -30,27 +21,19 @@ export const forceList = (value: NixValue): NixList => {
return forced; return forced;
}; };
/**
* Force a value and assert it's a function or functor
* @throws TypeError if value is not a function or functor after forcing
*/
export const forceFunction = (value: NixValue): NixFunction => { export const forceFunction = (value: NixValue): NixFunction => {
const forced = force(value); const forced = force(value);
if (isFunction(forced)) { if (isFunction(forced)) {
return forced; return forced;
} }
if (typeof forced === "object" && !Array.isArray(forced) && forced !== null && "__functor" in forced) { if (forced instanceof Map && forced.has("__functor")) {
const functorSet = forced as NixAttrs; const functorSet = forced as NixAttrs;
const functor = forceFunction(functorSet.__functor); const functor = forceFunction(functorSet.get("__functor") as NixValue);
return (arg: NixValue) => forceFunction(functor(functorSet))(arg); return (arg: NixValue) => forceFunction(functor(functorSet))(arg);
} }
throw new TypeError(`Expected function, got ${typeOf(forced)}`); throw new TypeError(`Expected function, got ${typeOf(forced)}`);
}; };
/**
* Force a value and assert it's an attribute set
* @throws TypeError if value is not an attribute set after forcing
*/
export const forceAttrs = (value: NixValue): NixAttrs => { export const forceAttrs = (value: NixValue): NixAttrs => {
const forced = force(value); const forced = force(value);
if (!isAttrs(forced)) { if (!isAttrs(forced)) {
@@ -59,10 +42,6 @@ export const forceAttrs = (value: NixValue): NixAttrs => {
return forced; return forced;
}; };
/**
* Force a value and assert it's a string (plain or with context)
* @throws TypeError if value is not a string after forcing
*/
export const forceStringValue = (value: NixValue): string => { export const forceStringValue = (value: NixValue): string => {
const forced = force(value); const forced = force(value);
if (typeof forced === "string") { if (typeof forced === "string") {
@@ -74,10 +53,6 @@ export const forceStringValue = (value: NixValue): string => {
throw new TypeError(`Expected string, got ${typeOf(forced)}`); throw new TypeError(`Expected string, got ${typeOf(forced)}`);
}; };
/**
* Force a value and assert it's a string, returning NixString (preserving context)
* @throws TypeError if value is not a string after forcing
*/
export const forceString = (value: NixValue): NixString => { export const forceString = (value: NixValue): NixString => {
const forced = force(value); const forced = force(value);
if (typeof forced === "string") { if (typeof forced === "string") {
@@ -100,10 +75,6 @@ export const forceStringNoCtx = (value: NixValue): string => {
throw new TypeError(`Expected string, got ${typeOf(forced)}`); throw new TypeError(`Expected string, got ${typeOf(forced)}`);
}; };
/**
* Force a value and assert it's a boolean
* @throws TypeError if value is not a boolean after forcing
*/
export const forceBool = (value: NixValue): boolean => { export const forceBool = (value: NixValue): boolean => {
const forced = force(value); const forced = force(value);
if (typeof forced !== "boolean") { if (typeof forced !== "boolean") {
@@ -112,10 +83,6 @@ export const forceBool = (value: NixValue): boolean => {
return forced; return forced;
}; };
/**
* Force a value and extract int value
* @throws TypeError if value is not an int
*/
export const forceInt = (value: NixValue): NixInt => { export const forceInt = (value: NixValue): NixInt => {
const forced = force(value); const forced = force(value);
if (typeof forced === "bigint") { if (typeof forced === "bigint") {
@@ -124,10 +91,6 @@ export const forceInt = (value: NixValue): NixInt => {
throw new TypeError(`Expected int, got ${typeOf(forced)}`); throw new TypeError(`Expected int, got ${typeOf(forced)}`);
}; };
/**
* Force a value and extract float value
* @throws TypeError if value is not a float
*/
export const forceFloat = (value: NixValue): NixFloat => { export const forceFloat = (value: NixValue): NixFloat => {
const forced = force(value); const forced = force(value);
if (typeof forced === "number") { if (typeof forced === "number") {
@@ -136,10 +99,6 @@ export const forceFloat = (value: NixValue): NixFloat => {
throw new TypeError(`Expected float, got ${typeOf(forced)}`); throw new TypeError(`Expected float, got ${typeOf(forced)}`);
}; };
/**
* Force a value and extract numeric value (int or float)
* @throws TypeError if value is not a numeric type
*/
export const forceNumeric = (value: NixValue): NixNumber => { export const forceNumeric = (value: NixValue): NixNumber => {
const forced = force(value); const forced = force(value);
if (typeof forced === "bigint" || typeof forced === "number") { if (typeof forced === "bigint" || typeof forced === "number") {
@@ -148,28 +107,17 @@ export const forceNumeric = (value: NixValue): NixNumber => {
throw new TypeError(`Expected numeric type, got ${typeOf(forced)}`); throw new TypeError(`Expected numeric type, got ${typeOf(forced)}`);
}; };
/**
* Coerce two numeric values to a common type for arithmetic
* Rule: If either is float, convert both to float; otherwise keep as bigint
* @returns [a, b] tuple of coerced values
*/
export const coerceNumeric = (a: NixNumber, b: NixNumber): [NixFloat, NixFloat] | [NixInt, NixInt] => { export const coerceNumeric = (a: NixNumber, b: NixNumber): [NixFloat, NixFloat] | [NixInt, NixInt] => {
const aIsInt = typeof a === "bigint"; const aIsInt = typeof a === "bigint";
const bIsInt = typeof b === "bigint"; const bIsInt = typeof b === "bigint";
// If either is float, convert both to float
if (!aIsInt || !bIsInt) { if (!aIsInt || !bIsInt) {
return [Number(a), Number(b)]; return [Number(a), Number(b)];
} }
// Both are integers
return [a, b]; return [a, b];
}; };
/**
* Force a value and assert it's a path
* @throws TypeError if value is not a path after forcing
*/
export const forceNixPath = (value: NixValue): NixPath => { export const forceNixPath = (value: NixValue): NixPath => {
const forced = force(value); const forced = force(value);
if (isNixPath(forced)) { if (isNixPath(forced)) {

View File

@@ -1,27 +1,24 @@
/** import { PRIMOP_METADATA, type PrimopMetadata } from "./builtins";
* Core TypeScript type definitions for nix-js runtime import { HAS_CONTEXT, isStringWithContext, type StringWithContext } from "./string-context";
*/ import { type CYCLE_MARKER, force, type NixThunk } from "./thunk";
import { force, IS_THUNK } from "./thunk";
import { type StringWithContext, HAS_CONTEXT, isStringWithContext, getStringContext } from "./string-context";
import { op } from "./operators";
import { forceAttrs, forceStringNoCtx } from "./type-assert"; import { forceAttrs, forceStringNoCtx } from "./type-assert";
import { isString, typeOf } from "./builtins/type-check";
export { HAS_CONTEXT, isStringWithContext }; export { HAS_CONTEXT, isStringWithContext };
export type { StringWithContext }; export type { StringWithContext };
export const IS_PATH = Symbol("IS_PATH"); export const IS_PATH = Symbol("IS_PATH");
export interface NixPath { export class NixPath {
readonly [IS_PATH]: true; readonly [IS_PATH] = true as const;
value: string; value: string;
constructor(value: string) {
this.value = value;
}
} }
export const isNixPath = (v: NixStrictValue): v is NixPath => { export const isNixPath = (v: NixStrictValue): v is NixPath => {
return typeof v === "object" && v !== null && IS_PATH in v; return v instanceof NixPath;
}; };
// Nix primitive types
export type NixInt = bigint; export type NixInt = bigint;
export type NixFloat = number; export type NixFloat = number;
export type NixNumber = NixInt | NixFloat; export type NixNumber = NixInt | NixFloat;
@@ -29,18 +26,20 @@ export type NixBool = boolean;
export type NixString = string | StringWithContext; export type NixString = string | StringWithContext;
export type NixNull = null; export type NixNull = null;
// Nix composite types export const ATTR_POSITIONS = Symbol("attrPositions");
export type NixList = NixValue[]; export type NixList = NixValue[];
// FIXME: reject contextful string export type NixAttrs = Map<string, NixValue> & { [ATTR_POSITIONS]?: Map<string, number> };
export type NixAttrs = { [key: string]: NixValue }; export type NixFunction = ((arg: NixValue) => NixValue) & {
export type NixFunction = ((arg: NixValue) => NixValue) & { args?: NixArgs }; args?: NixArgs;
[PRIMOP_METADATA]?: PrimopMetadata;
};
export class NixArgs { export class NixArgs {
required: string[]; required: string[];
optional: string[]; optional: string[];
allowed: Set<string>; allowed: Set<string>;
ellipsis: boolean; ellipsis: boolean;
positions: Record<string, string>; positions: Map<string, number>;
constructor(required: string[], optional: string[], positions: Record<string, string>, ellipsis: boolean) { constructor(required: string[], optional: string[], positions: Map<string, number>, ellipsis: boolean) {
this.required = required; this.required = required;
this.optional = optional; this.optional = optional;
this.positions = positions; this.positions = positions;
@@ -51,13 +50,13 @@ export class NixArgs {
const attrs = forceAttrs(arg); const attrs = forceAttrs(arg);
for (const key of this.required) { for (const key of this.required) {
if (!Object.hasOwn(attrs, key)) { if (!attrs.has(key)) {
throw new Error(`Function called without required argument '${key}'`); throw new Error(`Function called without required argument '${key}'`);
} }
} }
if (!this.ellipsis) { if (!this.ellipsis) {
for (const key in attrs) { for (const key of attrs.keys()) {
if (!this.allowed.has(key)) { if (!this.allowed.has(key)) {
throw new Error(`Function called with unexpected argument '${key}'`); throw new Error(`Function called with unexpected argument '${key}'`);
} }
@@ -69,33 +68,18 @@ export const mkFunction = (
f: (arg: NixValue) => NixValue, f: (arg: NixValue) => NixValue,
required: string[], required: string[],
optional: string[], optional: string[],
positions: Record<string, string>, positions: Map<string, number>,
ellipsis: boolean, ellipsis: boolean,
): NixFunction => { ): NixFunction => {
const func = f as NixFunction; const func: NixFunction = f;
func.args = new NixArgs(required, optional, positions, ellipsis); func.args = new NixArgs(required, optional, positions, ellipsis);
return func; return func;
}; };
export const mkAttrs = (attrs: NixAttrs, keys: NixValue[], values: NixValue[]): NixAttrs => { export const mkAttrs = (
const len = keys.length;
for (let i = 0; i < len; i++) {
const key = force(keys[i]);
if (key === null) {
continue;
}
const str = forceStringNoCtx(key);
attrs[str] = values[i];
}
return attrs;
};
const ATTR_POSITIONS = Symbol("attrPositions");
export const mkAttrsWithPos = (
attrs: NixAttrs, attrs: NixAttrs,
positions: Record<string, string>, positions: Map<string, number>,
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] }, dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: number[] },
): NixAttrs => { ): NixAttrs => {
if (dyns) { if (dyns) {
const len = dyns.dynKeys.length; const len = dyns.dynKeys.length;
@@ -105,62 +89,27 @@ export const mkAttrsWithPos = (
continue; continue;
} }
const str = forceStringNoCtx(key); const str = forceStringNoCtx(key);
attrs[str] = dyns.dynVals[i]; attrs.set(str, dyns.dynVals[i]);
positions[str] = dyns.dynSpans[i]; positions.set(str, dyns.dynSpans[i]);
} }
} }
if (Object.keys(positions).length > 0) { if (positions.size > 0) {
Object.defineProperty(attrs, ATTR_POSITIONS, { attrs[ATTR_POSITIONS] = positions;
value: positions,
enumerable: false,
writable: false,
});
} }
return attrs; return attrs;
}; };
export { ATTR_POSITIONS };
/**
* Interface for lazy thunk values
* Thunks delay evaluation until forced
*/
export interface NixThunkInterface {
readonly [IS_THUNK]: true;
func: (() => NixValue) | undefined;
result: NixStrictValue | undefined;
}
// Union of all Nix primitive types
export type NixPrimitive = NixNull | NixBool | NixInt | NixFloat | NixString; export type NixPrimitive = NixNull | NixBool | NixInt | NixFloat | NixString;
export type NixValue =
| NixPrimitive
| NixPath
| NixList
| NixAttrs
| NixFunction
| NixThunk
| typeof CYCLE_MARKER;
export type NixStrictValue = Exclude<NixValue, NixThunk>;
/**
* NixValue: Union type representing any possible Nix value
* This is the core type used throughout the runtime
*/
export type NixValue = NixPrimitive | NixPath | NixList | NixAttrs | NixFunction | NixThunkInterface;
export type NixStrictValue = Exclude<NixValue, NixThunkInterface>;
/**
* CatchableError: Error type thrown by `builtins.throw`
* This can be caught by `builtins.tryEval`
*/
export class CatchableError extends Error {} export class CatchableError extends Error {}
// Operator function signatures
export type BinaryOp<T = NixValue, U = NixValue, R = NixValue> = (a: T, b: U) => R;
export type UnaryOp<T = NixValue, R = NixValue> = (a: T) => R;
/**
* Curried function types - All Nix builtins must be curried!
*
* Examples:
* - add: Curried2<number, number, number> = (a) => (b) => a + b
* - map: Curried2<NixFunction, NixList, NixList> = (f) => (list) => list.map(f)
*/
export type Curried2<A, B, R> = (a: A) => (b: B) => R;
export type Curried3<A, B, C, R> = (a: A) => (b: B) => (c: C) => R;
export type Curried4<A, B, C, D, R> = (a: A) => (b: B) => (c: C) => (d: D) => R;

View File

@@ -1,36 +1,116 @@
import type { NixRuntime } from ".."; import type { NixRuntime } from "..";
import type { FetchTarballResult, FetchUrlResult, FetchGitResult, FetchHgResult } from "../builtins/io"; import type { builtins } from "../builtins";
import type { FetchGitResult, FetchTarballResult, FetchUrlResult } from "../builtins/io";
import type {
assert,
call,
concatStringsWithContext,
hasAttr,
lookupWith,
mkPos,
resolvePath,
select,
selectWithDefault,
} from "../helpers";
import type { op } from "../operators";
import type { createThunk, force } from "../thunk";
import type { forceBool } from "../type-assert";
import type { mkAttrs, mkFunction, NixAttrs, NixStrictValue } from "../types";
declare global { declare global {
var Nix: NixRuntime; var Nix: NixRuntime;
var $t: typeof createThunk;
var $f: typeof force;
var $fb: typeof forceBool;
var $a: typeof assert;
var $c: typeof call;
var $h: typeof hasAttr;
var $s: typeof select;
var $sd: typeof selectWithDefault;
var $l: typeof lookupWith;
var $r: typeof resolvePath;
var $cs: typeof concatStringsWithContext;
var $ma: typeof mkAttrs;
var $mf: typeof mkFunction;
var $mp: typeof mkPos;
var $oa: typeof op.add;
var $os: typeof op.sub;
var $om: typeof op.mul;
var $od: typeof op.div;
var $oe: typeof op.eq;
var $ol: typeof op.lt;
var $og: typeof op.gt;
var $oc: typeof op.concat;
var $ou: typeof op.update;
var $b: typeof builtins;
var $e: NixAttrs;
var $gb: typeof Nix.getReplBinding;
namespace Deno { namespace Deno {
namespace core { namespace core {
namespace ops { namespace ops {
function op_import(path: string): [Uint8Array, string];
function op_scoped_import(path: string, scopeKeys: string[]): [Uint8Array, string];
function op_resolve_path(currentDir: string, path: string): string; function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string;
function op_scoped_import(path: string, scopeKeys: string[]): string;
function op_read_file(path: string): string; function op_read_file(path: string): string;
function op_read_file_type(path: string): string; function op_read_file_type(path: string): string;
function op_read_dir(path: string): Record<string, string>; function op_read_dir(path: string): Map<string, string>;
function op_path_exists(path: string): boolean; function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string; function op_walk_dir(path: string): [string, string][];
function op_make_placeholder(output: string): string; function op_make_placeholder(output: string): string;
function op_decode_span(span: string): { function op_store_path(path: string): string;
file: string | null;
line: number | null; function op_convert_hash(hash: string, hashAlgo: string | null, toHashFormat: string): string;
column: number | null; function op_hash_string(algo: string, data: string): string;
}; function op_hash_file(algo: string, path: string): string;
function op_make_store_path(ty: string, hash_hex: string, name: string): string; function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string };
function op_parse_hash(hash_str: string, algo: string | null): { hex: string; algo: string };
function op_make_fixed_output_path( function op_add_path(
hash_algo: string, path: string,
hash: string, name: string | null,
hash_mode: string, recursive: boolean,
name: string, sha256: string | null,
): string; ): string;
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
includePaths: string[],
): string;
function op_decode_span(span: number): NixAttrs;
function op_to_file(name: string, contents: string, references: string[]): string;
function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): NixStrictValue;
function op_from_toml(toml: string): NixStrictValue;
function op_to_xml(e: NixValue): [string, string[]];
function op_finalize_derivation(
name: string,
builder: string,
platform: string,
outputs: string[],
args: string[],
env: [string, string][],
context: string[],
fixedOutput: { hashAlgo: string; hash: string; hashMode: string } | null,
): { drvPath: string; outputs: [string, string][] };
function op_fetch_url( function op_fetch_url(
url: string, url: string,
expected_hash: string | null, expectedHash: string | null,
name: string | null, name: string | null,
executable: boolean, executable: boolean,
): FetchUrlResult; ): FetchUrlResult;
@@ -45,37 +125,9 @@ declare global {
rev: string | null, rev: string | null,
shallow: boolean, shallow: boolean,
submodules: boolean, submodules: boolean,
all_refs: boolean, allRefs: boolean,
name: string | null, name: string | null,
): FetchGitResult; ): FetchGitResult;
function op_add_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
): string;
function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string;
function op_write_derivation(drv_name: string, aterm: string, references: string[]): string;
function op_read_derivation_outputs(drv_path: string): string[];
function op_compute_fs_closure(drv_path: string): {
input_drvs: [string, string[]][];
input_srcs: string[];
};
function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string;
function op_walk_dir(path: string): [string, string][];
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
include_paths: string[],
): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): any;
function op_from_toml(toml: string): any;
} }
} }
} }

617
nix-js/runtime-ts/src/vm.ts Normal file
View File

@@ -0,0 +1,617 @@
import {
assert,
call,
concatStringsWithContext,
hasAttr,
lookupWith,
mkPos,
resolvePath,
select,
selectWithDefault,
} from "./helpers";
import { op } from "./operators";
import { NixThunk } from "./thunk";
import { forceBool } from "./type-assert";
import { mkAttrs, NixArgs, type NixAttrs, type NixFunction, type NixValue } from "./types";
import { builtins } from "./builtins";
enum Op {
PushConst = 0x01,
PushString = 0x02,
PushNull = 0x03,
PushTrue = 0x04,
PushFalse = 0x05,
LoadLocal = 0x06,
LoadOuter = 0x07,
StoreLocal = 0x08,
AllocLocals = 0x09,
MakeThunk = 0x0A,
MakeClosure = 0x0B,
MakePatternClosure = 0x0C,
Call = 0x0D,
CallNoSpan = 0x0E,
MakeAttrs = 0x0F,
MakeAttrsDyn = 0x10,
MakeEmptyAttrs = 0x11,
Select = 0x12,
SelectDefault = 0x13,
HasAttr = 0x14,
MakeList = 0x15,
OpAdd = 0x16,
OpSub = 0x17,
OpMul = 0x18,
OpDiv = 0x19,
OpEq = 0x20,
OpNeq = 0x21,
OpLt = 0x22,
OpGt = 0x23,
OpLeq = 0x24,
OpGeq = 0x25,
OpConcat = 0x26,
OpUpdate = 0x27,
OpNeg = 0x28,
OpNot = 0x29,
ForceBool = 0x30,
JumpIfFalse = 0x31,
JumpIfTrue = 0x32,
Jump = 0x33,
ConcatStrings = 0x34,
ResolvePath = 0x35,
Assert = 0x36,
PushWith = 0x37,
PopWith = 0x38,
WithLookup = 0x39,
LoadBuiltins = 0x40,
LoadBuiltin = 0x41,
MkPos = 0x43,
LoadReplBinding = 0x44,
LoadScopedBinding = 0x45,
Return = 0x46,
}
interface ScopeChain {
locals: NixValue[];
parent: ScopeChain | null;
}
interface WithScope {
env: NixValue;
last: WithScope | null;
}
const strings: string[] = [];
const constants: NixValue[] = [];
const $e: NixAttrs = new Map();
function readU16(code: Uint8Array, offset: number): number {
return code[offset] | (code[offset + 1] << 8);
}
function readU32(code: Uint8Array, offset: number): number {
return (
code[offset] |
(code[offset + 1] << 8) |
(code[offset + 2] << 16) |
(code[offset + 3] << 24)
) >>> 0;
}
function readI32(code: Uint8Array, offset: number): number {
return code[offset] | (code[offset + 1] << 8) | (code[offset + 2] << 16) | (code[offset + 3] << 24);
}
export function execBytecode(code: Uint8Array, currentDir: string): NixValue {
const chain: ScopeChain = { locals: [], parent: null };
return execFrame(code, 0, chain, currentDir, null, null);
}
export function execBytecodeScoped(
code: Uint8Array,
currentDir: string,
scopeMap: NixAttrs,
): NixValue {
const chain: ScopeChain = { locals: [], parent: null };
return execFrame(code, 0, chain, currentDir, null, scopeMap);
}
function execFrame(
code: Uint8Array,
startPc: number,
chain: ScopeChain,
currentDir: string,
withScope: WithScope | null,
scopeMap: NixAttrs | null,
): NixValue {
const locals = chain.locals;
const stack: NixValue[] = [];
let pc = startPc;
for (;;) {
const opcode = code[pc++];
switch (opcode) {
case Op.PushConst: {
const idx = readU32(code, pc);
pc += 4;
stack.push(constants[idx]);
break;
}
case Op.PushString: {
const idx = readU32(code, pc);
pc += 4;
stack.push(strings[idx]);
break;
}
case Op.PushNull:
stack.push(null);
break;
case Op.PushTrue:
stack.push(true);
break;
case Op.PushFalse:
stack.push(false);
break;
case Op.LoadLocal: {
const idx = readU32(code, pc);
pc += 4;
stack.push(locals[idx]);
break;
}
case Op.LoadOuter: {
const layer = code[pc++];
const idx = readU32(code, pc);
pc += 4;
let c: ScopeChain = chain;
for (let i = 0; i < layer; i++) c = c.parent!;
stack.push(c.locals[idx]);
break;
}
case Op.StoreLocal: {
const idx = readU32(code, pc);
pc += 4;
locals[idx] = stack.pop()!;
break;
}
case Op.AllocLocals: {
const n = readU32(code, pc);
pc += 4;
for (let i = 0; i < n; i++) locals.push(null);
break;
}
case Op.MakeThunk: {
const bodyPc = readU32(code, pc);
pc += 4;
const labelIdx = readU32(code, pc);
pc += 4;
const label = strings[labelIdx];
const scopeChain = chain;
const scopeCode = code;
const scopeDir = currentDir;
const scopeWith = withScope;
stack.push(
new NixThunk(
() => execFrame(scopeCode, bodyPc, scopeChain, scopeDir, scopeWith, null),
label,
),
);
break;
}
case Op.MakeClosure: {
const bodyPc = readU32(code, pc);
pc += 4;
const nSlots = readU32(code, pc);
pc += 4;
const closureChain = chain;
const closureCode = code;
const closureDir = currentDir;
const closureWith = withScope;
const func: NixFunction = (arg: NixValue) => {
const innerLocals = new Array<NixValue>(1 + nSlots).fill(null);
innerLocals[0] = arg;
const innerChain: ScopeChain = { locals: innerLocals, parent: closureChain };
return execFrame(closureCode, bodyPc, innerChain, closureDir, closureWith, null);
};
stack.push(func);
break;
}
case Op.MakePatternClosure: {
const bodyPc = readU32(code, pc);
pc += 4;
const nSlots = readU32(code, pc);
pc += 4;
const nRequired = readU16(code, pc);
pc += 2;
const nOptional = readU16(code, pc);
pc += 2;
const hasEllipsis = code[pc++] !== 0;
const required: string[] = [];
for (let i = 0; i < nRequired; i++) {
required.push(strings[readU32(code, pc)]);
pc += 4;
}
const optional: string[] = [];
for (let i = 0; i < nOptional; i++) {
optional.push(strings[readU32(code, pc)]);
pc += 4;
}
const positions = new Map<string, number>();
const nTotal = nRequired + nOptional;
for (let i = 0; i < nTotal; i++) {
const nameIdx = readU32(code, pc);
pc += 4;
const spanId = readU32(code, pc);
pc += 4;
positions.set(strings[nameIdx], spanId);
}
const closureChain = chain;
const closureCode = code;
const closureDir = currentDir;
const closureWith = withScope;
const func: NixFunction = (arg: NixValue) => {
const innerLocals = new Array<NixValue>(1 + nSlots).fill(null);
innerLocals[0] = arg;
const innerChain: ScopeChain = { locals: innerLocals, parent: closureChain };
return execFrame(closureCode, bodyPc, innerChain, closureDir, closureWith, null);
};
func.args = new NixArgs(required, optional, positions, hasEllipsis);
stack.push(func);
break;
}
case Op.Call: {
const spanId = readU32(code, pc);
pc += 4;
const arg = stack.pop()!;
const func = stack.pop()!;
stack.push(call(func, arg, spanId));
break;
}
case Op.CallNoSpan: {
const arg = stack.pop()!;
const func = stack.pop()!;
stack.push(call(func, arg));
break;
}
case Op.MakeAttrs: {
const n = readU32(code, pc);
pc += 4;
const spanValues: number[] = [];
for (let i = 0; i < n; i++) {
spanValues.push(stack.pop() as number);
}
spanValues.reverse();
const map: NixAttrs = new Map();
const posMap = new Map<string, number>();
const pairs: [string, NixValue][] = [];
for (let i = 0; i < n; i++) {
const val = stack.pop()!;
const key = stack.pop() as string;
pairs.push([key, val]);
}
pairs.reverse();
for (let i = 0; i < n; i++) {
map.set(pairs[i][0], pairs[i][1]);
posMap.set(pairs[i][0], spanValues[i]);
}
stack.push(mkAttrs(map, posMap));
break;
}
case Op.MakeAttrsDyn: {
const nStatic = readU32(code, pc);
pc += 4;
const nDyn = readU32(code, pc);
pc += 4;
const dynTriples: [NixValue, NixValue, number][] = [];
for (let i = 0; i < nDyn; i++) {
const dynSpan = stack.pop() as number;
const dynVal = stack.pop()!;
const dynKey = stack.pop()!;
dynTriples.push([dynKey, dynVal, dynSpan]);
}
dynTriples.reverse();
const spanValues: number[] = [];
for (let i = 0; i < nStatic; i++) {
spanValues.push(stack.pop() as number);
}
spanValues.reverse();
const map: NixAttrs = new Map();
const posMap = new Map<string, number>();
const pairs: [string, NixValue][] = [];
for (let i = 0; i < nStatic; i++) {
const val = stack.pop()!;
const key = stack.pop() as string;
pairs.push([key, val]);
}
pairs.reverse();
for (let i = 0; i < nStatic; i++) {
map.set(pairs[i][0], pairs[i][1]);
posMap.set(pairs[i][0], spanValues[i]);
}
const dynKeys: NixValue[] = [];
const dynVals: NixValue[] = [];
const dynSpans: number[] = [];
for (const [k, v, s] of dynTriples) {
dynKeys.push(k);
dynVals.push(v);
dynSpans.push(s);
}
stack.push(mkAttrs(map, posMap, { dynKeys, dynVals, dynSpans }));
break;
}
case Op.MakeEmptyAttrs:
stack.push($e);
break;
case Op.Select: {
const nKeys = readU16(code, pc);
pc += 2;
const spanId = readU32(code, pc);
pc += 4;
const keys: NixValue[] = [];
for (let i = 0; i < nKeys; i++) keys.push(stack.pop()!);
keys.reverse();
const obj = stack.pop()!;
stack.push(select(obj, keys, spanId));
break;
}
case Op.SelectDefault: {
const nKeys = readU16(code, pc);
pc += 2;
const spanId = readU32(code, pc);
pc += 4;
const defaultVal = stack.pop()!;
const keys: NixValue[] = [];
for (let i = 0; i < nKeys; i++) keys.push(stack.pop()!);
keys.reverse();
const obj = stack.pop()!;
stack.push(selectWithDefault(obj, keys, defaultVal, spanId));
break;
}
case Op.HasAttr: {
const nKeys = readU16(code, pc);
pc += 2;
const keys: NixValue[] = [];
for (let i = 0; i < nKeys; i++) keys.push(stack.pop()!);
keys.reverse();
const obj = stack.pop()!;
stack.push(hasAttr(obj, keys));
break;
}
case Op.MakeList: {
const count = readU32(code, pc);
pc += 4;
const items: NixValue[] = new Array(count);
for (let i = count - 1; i >= 0; i--) {
items[i] = stack.pop()!;
}
stack.push(items);
break;
}
case Op.OpAdd: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.add(a, b));
break;
}
case Op.OpSub: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.sub(a, b));
break;
}
case Op.OpMul: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.mul(a, b));
break;
}
case Op.OpDiv: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.div(a, b));
break;
}
case Op.OpEq: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.eq(a, b));
break;
}
case Op.OpNeq: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(!op.eq(a, b));
break;
}
case Op.OpLt: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.lt(a, b));
break;
}
case Op.OpGt: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.gt(a, b));
break;
}
case Op.OpLeq: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(!op.gt(a, b));
break;
}
case Op.OpGeq: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(!op.lt(a, b));
break;
}
case Op.OpConcat: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.concat(a, b));
break;
}
case Op.OpUpdate: {
const b = stack.pop()!;
const a = stack.pop()!;
stack.push(op.update(a, b));
break;
}
case Op.OpNeg: {
const a = stack.pop()!;
stack.push(op.sub(0n, a));
break;
}
case Op.OpNot: {
const a = stack.pop()!;
stack.push(!forceBool(a));
break;
}
case Op.ForceBool: {
const val = stack.pop()!;
stack.push(forceBool(val));
break;
}
case Op.JumpIfFalse: {
const offset = readI32(code, pc);
pc += 4;
const val = stack.pop()!;
if (val === false) {
pc += offset;
}
break;
}
case Op.JumpIfTrue: {
const offset = readI32(code, pc);
pc += 4;
const val = stack.pop()!;
if (val === true) {
pc += offset;
}
break;
}
case Op.Jump: {
const offset = readI32(code, pc);
pc += 4;
pc += offset;
break;
}
case Op.ConcatStrings: {
const nParts = readU16(code, pc);
pc += 2;
const forceString = code[pc++] !== 0;
const parts: NixValue[] = new Array(nParts);
for (let i = nParts - 1; i >= 0; i--) {
parts[i] = stack.pop()!;
}
stack.push(concatStringsWithContext(parts, forceString));
break;
}
case Op.ResolvePath: {
const pathExpr = stack.pop()!;
stack.push(resolvePath(currentDir, pathExpr));
break;
}
case Op.Assert: {
const rawIdx = readU32(code, pc);
pc += 4;
const spanId = readU32(code, pc);
pc += 4;
const expr = stack.pop()!;
const assertion = stack.pop()!;
stack.push(assert(assertion, expr, strings[rawIdx], spanId));
break;
}
case Op.PushWith: {
const namespace = stack.pop()!;
withScope = { env: namespace, last: withScope };
break;
}
case Op.PopWith:
withScope = withScope!.last;
break;
case Op.WithLookup: {
const nameIdx = readU32(code, pc);
pc += 4;
stack.push(lookupWith(strings[nameIdx], withScope!));
break;
}
case Op.LoadBuiltins:
stack.push(builtins);
break;
case Op.LoadBuiltin: {
const idx = readU32(code, pc);
pc += 4;
stack.push(builtins.get(strings[idx])!);
break;
}
case Op.MkPos: {
const spanId = readU32(code, pc);
pc += 4;
stack.push(mkPos(spanId));
break;
}
case Op.LoadReplBinding: {
const idx = readU32(code, pc);
pc += 4;
stack.push(Nix.getReplBinding(strings[idx]));
break;
}
case Op.LoadScopedBinding: {
const idx = readU32(code, pc);
pc += 4;
stack.push(scopeMap!.get(strings[idx])!);
break;
}
case Op.Return:
return stack.pop()!;
default:
throw new Error(`Unknown bytecode opcode: ${opcode ? `0x${opcode.toString(16)}` : "undefined"} at pc=${pc - 1}`);
}
}
}
declare const Nix: {
getReplBinding: (name: string) => NixValue;
};
export { strings as vmStrings, constants as vmConstants };

View File

@@ -1,26 +0,0 @@
use anyhow::Result;
use nix_js::{context::Context, error::Source};
use std::process::exit;
fn main() -> Result<()> {
nix_js::logging::init_logging();
let mut args = std::env::args();
if args.len() != 2 {
eprintln!("Usage: {} expr", args.next().unwrap());
exit(1);
}
args.next();
let expr = args.next().unwrap();
let src = Source::new_eval(expr)?;
match Context::new()?.eval(src) {
Ok(value) => {
println!("{value}");
Ok(())
}
Err(err) => {
eprintln!("{:?}", miette::Report::new(*err));
exit(1);
}
}
}

View File

@@ -1,63 +0,0 @@
use anyhow::Result;
use hashbrown::HashSet;
use nix_js::context::Context;
use nix_js::error::Source;
use rustyline::DefaultEditor;
use rustyline::error::ReadlineError;
fn main() -> Result<()> {
nix_js::logging::init_logging();
let mut rl = DefaultEditor::new()?;
let mut context = Context::new()?;
let mut scope = HashSet::new();
const RE: ere::Regex<3> = ere::compile_regex!("^[ \t]*([a-zA-Z_][a-zA-Z0-9_'-]*)[ \t]*(.*)$");
loop {
let readline = rl.readline("nix-js-repl> ");
match readline {
Ok(line) => {
if line.trim().is_empty() {
continue;
}
let _ = rl.add_history_entry(line.as_str());
if let Some([Some(_), Some(ident), Some(rest)]) = RE.exec(&line) {
if let Some(expr) = rest.strip_prefix('=') {
let expr = expr.trim_start();
if expr.is_empty() {
eprintln!("Error: missing expression after '='");
continue;
}
match context.add_binding(ident, expr, &mut scope) {
Ok(value) => println!("{} = {}", ident, value),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
} else {
let src = Source::new_repl(line)?;
match context.eval_repl(src, &scope) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
}
} else {
let src = Source::new_repl(line)?;
match context.eval_shallow(src) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
}
}
Err(ReadlineError::Interrupted) => {
println!();
}
Err(ReadlineError::Eof) => {
println!("CTRL-D");
break;
}
Err(err) => {
eprintln!("Error: {err:?}");
break;
}
}
}
Ok(())
}

906
nix-js/src/bytecode.rs Normal file
View File

@@ -0,0 +1,906 @@
use std::ops::Deref;
use std::path::Path;
use hashbrown::HashMap;
use num_enum::TryFromPrimitive;
use rnix::TextRange;
use crate::ir::{ArgId, Attr, BinOpKind, Ir, Param, RawIrRef, SymId, ThunkId, UnOpKind};
#[derive(Clone, Hash, Eq, PartialEq)]
pub(crate) enum Constant {
Int(i64),
Float(u64),
}
pub struct Bytecode {
pub code: Box<[u8]>,
pub current_dir: String,
}
pub(crate) trait BytecodeContext {
fn intern_string(&mut self, s: &str) -> u32;
fn intern_constant(&mut self, c: Constant) -> u32;
fn register_span(&self, range: TextRange) -> u32;
fn get_sym(&self, id: SymId) -> &str;
fn get_current_dir(&self) -> &Path;
}
#[repr(u8)]
#[derive(Clone, Copy, TryFromPrimitive)]
#[allow(clippy::enum_variant_names)]
pub enum Op {
PushConst = 0x01,
PushString = 0x02,
PushNull = 0x03,
PushTrue = 0x04,
PushFalse = 0x05,
LoadLocal = 0x06,
LoadOuter = 0x07,
StoreLocal = 0x08,
AllocLocals = 0x09,
MakeThunk = 0x0A,
MakeClosure = 0x0B,
MakePatternClosure = 0x0C,
Call = 0x0D,
CallNoSpan = 0x0E,
MakeAttrs = 0x0F,
MakeAttrsDyn = 0x10,
MakeEmptyAttrs = 0x11,
Select = 0x12,
SelectDefault = 0x13,
HasAttr = 0x14,
MakeList = 0x15,
OpAdd = 0x16,
OpSub = 0x17,
OpMul = 0x18,
OpDiv = 0x19,
OpEq = 0x20,
OpNeq = 0x21,
OpLt = 0x22,
OpGt = 0x23,
OpLeq = 0x24,
OpGeq = 0x25,
OpConcat = 0x26,
OpUpdate = 0x27,
OpNeg = 0x28,
OpNot = 0x29,
ForceBool = 0x30,
JumpIfFalse = 0x31,
JumpIfTrue = 0x32,
Jump = 0x33,
ConcatStrings = 0x34,
ResolvePath = 0x35,
Assert = 0x36,
PushWith = 0x37,
PopWith = 0x38,
WithLookup = 0x39,
LoadBuiltins = 0x40,
LoadBuiltin = 0x41,
MkPos = 0x43,
LoadReplBinding = 0x44,
LoadScopedBinding = 0x45,
Return = 0x46,
}
struct ScopeInfo {
depth: u16,
arg_id: Option<ArgId>,
thunk_map: HashMap<ThunkId, u32>,
}
struct BytecodeEmitter<'a, Ctx: BytecodeContext> {
ctx: &'a mut Ctx,
code: Vec<u8>,
scope_stack: Vec<ScopeInfo>,
}
pub(crate) fn compile_bytecode(ir: RawIrRef<'_>, ctx: &mut impl BytecodeContext) -> Bytecode {
let current_dir = ctx.get_current_dir().to_string_lossy().to_string();
let mut emitter = BytecodeEmitter::new(ctx);
emitter.emit_toplevel(ir);
Bytecode {
code: emitter.code.into_boxed_slice(),
current_dir,
}
}
pub(crate) fn compile_bytecode_scoped(
ir: RawIrRef<'_>,
ctx: &mut impl BytecodeContext,
) -> Bytecode {
let current_dir = ctx.get_current_dir().to_string_lossy().to_string();
let mut emitter = BytecodeEmitter::new(ctx);
emitter.emit_toplevel_scoped(ir);
Bytecode {
code: emitter.code.into_boxed_slice(),
current_dir,
}
}
impl<'a, Ctx: BytecodeContext> BytecodeEmitter<'a, Ctx> {
fn new(ctx: &'a mut Ctx) -> Self {
Self {
ctx,
code: Vec::with_capacity(4096),
scope_stack: Vec::with_capacity(32),
}
}
#[inline]
fn emit_op(&mut self, op: Op) {
self.code.push(op as u8);
}
#[inline]
fn emit_u8(&mut self, val: u8) {
self.code.push(val);
}
#[inline]
fn emit_u16(&mut self, val: u16) {
self.code.extend_from_slice(&val.to_le_bytes());
}
#[inline]
fn emit_u32(&mut self, val: u32) {
self.code.extend_from_slice(&val.to_le_bytes());
}
#[inline]
fn emit_i32_placeholder(&mut self) -> usize {
let offset = self.code.len();
self.code.extend_from_slice(&[0u8; 4]);
offset
}
#[inline]
fn patch_i32(&mut self, offset: usize, val: i32) {
self.code[offset..offset + 4].copy_from_slice(&val.to_le_bytes());
}
#[inline]
fn emit_jump_placeholder(&mut self) -> usize {
self.emit_op(Op::Jump);
self.emit_i32_placeholder()
}
#[inline]
fn patch_jump_target(&mut self, placeholder_offset: usize) {
let current_pos = self.code.len();
let relative_offset = (current_pos as i32) - (placeholder_offset as i32) - 4;
self.patch_i32(placeholder_offset, relative_offset);
}
fn current_depth(&self) -> u16 {
self.scope_stack.last().map_or(0, |s| s.depth)
}
fn resolve_thunk(&self, id: ThunkId) -> (u16, u32) {
for scope in self.scope_stack.iter().rev() {
if let Some(&local_idx) = scope.thunk_map.get(&id) {
let layer = self.current_depth() - scope.depth;
return (layer, local_idx);
}
}
panic!("ThunkId {:?} not found in any scope", id);
}
fn resolve_arg(&self, id: ArgId) -> (u16, u32) {
for scope in self.scope_stack.iter().rev() {
if scope.arg_id == Some(id) {
let layer = self.current_depth() - scope.depth;
return (layer, 0);
}
}
panic!("ArgId {:?} not found in any scope", id);
}
fn emit_load(&mut self, layer: u16, local: u32) {
if layer == 0 {
self.emit_op(Op::LoadLocal);
self.emit_u32(local);
} else {
self.emit_op(Op::LoadOuter);
self.emit_u8(layer as u8);
self.emit_u32(local);
}
}
fn count_with_thunks(&self, ir: RawIrRef<'_>) -> usize {
match ir.deref() {
Ir::With { thunks, body, .. } => thunks.len() + self.count_with_thunks(*body),
Ir::TopLevel { thunks, body } => thunks.len() + self.count_with_thunks(*body),
Ir::If { cond, consq, alter } => {
self.count_with_thunks(*cond)
+ self.count_with_thunks(*consq)
+ self.count_with_thunks(*alter)
}
Ir::BinOp { lhs, rhs, .. } => {
self.count_with_thunks(*lhs) + self.count_with_thunks(*rhs)
}
Ir::UnOp { rhs, .. } => self.count_with_thunks(*rhs),
Ir::Call { func, arg, .. } => {
self.count_with_thunks(*func) + self.count_with_thunks(*arg)
}
Ir::Assert {
assertion, expr, ..
} => self.count_with_thunks(*assertion) + self.count_with_thunks(*expr),
Ir::Select { expr, default, .. } => {
self.count_with_thunks(*expr) + default.map_or(0, |d| self.count_with_thunks(d))
}
Ir::HasAttr { lhs, .. } => self.count_with_thunks(*lhs),
Ir::ConcatStrings { parts, .. } => {
parts.iter().map(|p| self.count_with_thunks(*p)).sum()
}
Ir::Path(p) => self.count_with_thunks(*p),
Ir::List { items } => items.iter().map(|item| self.count_with_thunks(*item)).sum(),
Ir::AttrSet { stcs, dyns } => {
stcs.iter()
.map(|(_, &(val, _))| self.count_with_thunks(val))
.sum::<usize>()
+ dyns
.iter()
.map(|&(k, v, _)| self.count_with_thunks(k) + self.count_with_thunks(v))
.sum::<usize>()
}
_ => 0,
}
}
fn collect_all_thunks<'ir>(
&self,
own_thunks: &[(ThunkId, RawIrRef<'ir>)],
body: RawIrRef<'ir>,
) -> Vec<(ThunkId, RawIrRef<'ir>)> {
let mut all = Vec::from(own_thunks);
self.collect_with_thunks_recursive(body, &mut all);
let mut i = 0;
while i < all.len() {
let thunk_body = all[i].1;
self.collect_with_thunks_recursive(thunk_body, &mut all);
i += 1;
}
all
}
fn collect_with_thunks_recursive<'ir>(
&self,
ir: RawIrRef<'ir>,
out: &mut Vec<(ThunkId, RawIrRef<'ir>)>,
) {
match ir.deref() {
Ir::With { thunks, body, .. } => {
for &(id, inner) in thunks.iter() {
out.push((id, inner));
}
self.collect_with_thunks_recursive(*body, out);
}
Ir::TopLevel { thunks, body } => {
for &(id, inner) in thunks.iter() {
out.push((id, inner));
}
self.collect_with_thunks_recursive(*body, out);
}
Ir::If { cond, consq, alter } => {
self.collect_with_thunks_recursive(*cond, out);
self.collect_with_thunks_recursive(*consq, out);
self.collect_with_thunks_recursive(*alter, out);
}
Ir::BinOp { lhs, rhs, .. } => {
self.collect_with_thunks_recursive(*lhs, out);
self.collect_with_thunks_recursive(*rhs, out);
}
Ir::UnOp { rhs, .. } => self.collect_with_thunks_recursive(*rhs, out),
Ir::Call { func, arg, .. } => {
self.collect_with_thunks_recursive(*func, out);
self.collect_with_thunks_recursive(*arg, out);
}
Ir::Assert {
assertion, expr, ..
} => {
self.collect_with_thunks_recursive(*assertion, out);
self.collect_with_thunks_recursive(*expr, out);
}
Ir::Select { expr, default, .. } => {
self.collect_with_thunks_recursive(*expr, out);
if let Some(d) = default {
self.collect_with_thunks_recursive(*d, out);
}
}
Ir::HasAttr { lhs, .. } => self.collect_with_thunks_recursive(*lhs, out),
Ir::ConcatStrings { parts, .. } => {
for p in parts.iter() {
self.collect_with_thunks_recursive(*p, out);
}
}
Ir::Path(p) => self.collect_with_thunks_recursive(*p, out),
Ir::List { items } => {
for item in items.iter() {
self.collect_with_thunks_recursive(*item, out);
}
}
Ir::AttrSet { stcs, dyns } => {
for (_, &(val, _)) in stcs.iter() {
self.collect_with_thunks_recursive(val, out);
}
for &(key, val, _) in dyns.iter() {
self.collect_with_thunks_recursive(key, out);
self.collect_with_thunks_recursive(val, out);
}
}
_ => {}
}
}
fn push_scope(&mut self, has_arg: bool, arg_id: Option<ArgId>, thunk_ids: &[ThunkId]) {
let depth = self.scope_stack.len() as u16;
let thunk_base = if has_arg { 1u32 } else { 0u32 };
let thunk_map = thunk_ids
.iter()
.enumerate()
.map(|(i, &id)| (id, thunk_base + i as u32))
.collect();
self.scope_stack.push(ScopeInfo {
depth,
arg_id,
thunk_map,
});
}
fn pop_scope(&mut self) {
self.scope_stack.pop();
}
fn emit_toplevel(&mut self, ir: RawIrRef<'_>) {
match ir.deref() {
Ir::TopLevel { body, thunks } => {
let with_thunk_count = self.count_with_thunks(*body);
let total_slots = thunks.len() + with_thunk_count;
let all_thunks = self.collect_all_thunks(thunks, *body);
let thunk_ids: Vec<ThunkId> = all_thunks.iter().map(|&(id, _)| id).collect();
self.push_scope(false, None, &thunk_ids);
if total_slots > 0 {
self.emit_op(Op::AllocLocals);
self.emit_u32(total_slots as u32);
}
self.emit_scope_thunks(thunks);
self.emit_expr(*body);
self.emit_op(Op::Return);
self.pop_scope();
}
_ => {
self.push_scope(false, None, &[]);
self.emit_expr(ir);
self.emit_op(Op::Return);
self.pop_scope();
}
}
}
fn emit_toplevel_scoped(&mut self, ir: RawIrRef<'_>) {
match ir.deref() {
Ir::TopLevel { body, thunks } => {
let with_thunk_count = self.count_with_thunks(*body);
let total_slots = thunks.len() + with_thunk_count;
let all_thunks = self.collect_all_thunks(thunks, *body);
let thunk_ids: Vec<ThunkId> = all_thunks.iter().map(|&(id, _)| id).collect();
self.push_scope(false, None, &thunk_ids);
if total_slots > 0 {
self.emit_op(Op::AllocLocals);
self.emit_u32(total_slots as u32);
}
self.emit_scope_thunks(thunks);
self.emit_expr(*body);
self.emit_op(Op::Return);
self.pop_scope();
}
_ => {
self.push_scope(false, None, &[]);
self.emit_expr(ir);
self.emit_op(Op::Return);
self.pop_scope();
}
}
}
fn emit_scope_thunks(&mut self, thunks: &[(ThunkId, RawIrRef<'_>)]) {
for &(id, inner) in thunks {
let label = format!("e{}", id.0);
let label_idx = self.ctx.intern_string(&label);
let skip_patch = self.emit_jump_placeholder();
let entry_point = self.code.len() as u32;
self.emit_expr(inner);
self.emit_op(Op::Return);
self.patch_jump_target(skip_patch);
self.emit_op(Op::MakeThunk);
self.emit_u32(entry_point);
self.emit_u32(label_idx);
let (_, local_idx) = self.resolve_thunk(id);
self.emit_op(Op::StoreLocal);
self.emit_u32(local_idx);
}
}
fn emit_expr(&mut self, ir: RawIrRef<'_>) {
match ir.deref() {
&Ir::Int(x) => {
let idx = self.ctx.intern_constant(Constant::Int(x));
self.emit_op(Op::PushConst);
self.emit_u32(idx);
}
&Ir::Float(x) => {
let idx = self.ctx.intern_constant(Constant::Float(x.to_bits()));
self.emit_op(Op::PushConst);
self.emit_u32(idx);
}
&Ir::Bool(true) => self.emit_op(Op::PushTrue),
&Ir::Bool(false) => self.emit_op(Op::PushFalse),
Ir::Null => self.emit_op(Op::PushNull),
Ir::Str(s) => {
let idx = self.ctx.intern_string(s.deref());
self.emit_op(Op::PushString);
self.emit_u32(idx);
}
&Ir::Path(p) => {
self.emit_expr(p);
self.emit_op(Op::ResolvePath);
}
&Ir::If { cond, consq, alter } => {
self.emit_expr(cond);
self.emit_op(Op::ForceBool);
self.emit_op(Op::JumpIfFalse);
let else_placeholder = self.emit_i32_placeholder();
let after_jif = self.code.len();
self.emit_expr(consq);
self.emit_op(Op::Jump);
let end_placeholder = self.emit_i32_placeholder();
let after_jump = self.code.len();
let else_offset = (after_jump as i32) - (after_jif as i32);
self.patch_i32(else_placeholder, else_offset);
self.emit_expr(alter);
let end_offset = (self.code.len() as i32) - (after_jump as i32);
self.patch_i32(end_placeholder, end_offset);
}
&Ir::BinOp { lhs, rhs, kind } => {
self.emit_binop(lhs, rhs, kind);
}
&Ir::UnOp { rhs, kind } => match kind {
UnOpKind::Neg => {
self.emit_expr(rhs);
self.emit_op(Op::OpNeg);
}
UnOpKind::Not => {
self.emit_expr(rhs);
self.emit_op(Op::OpNot);
}
},
&Ir::Func {
body,
ref param,
arg,
ref thunks,
} => {
self.emit_func(arg, thunks, param, body);
}
Ir::AttrSet { stcs, dyns } => {
self.emit_attrset(stcs, dyns);
}
Ir::List { items } => {
for &item in items.iter() {
self.emit_expr(item);
}
self.emit_op(Op::MakeList);
self.emit_u32(items.len() as u32);
}
&Ir::Call { func, arg, span } => {
self.emit_expr(func);
self.emit_expr(arg);
let span_id = self.ctx.register_span(span);
self.emit_op(Op::Call);
self.emit_u32(span_id);
}
&Ir::Arg(id) => {
let (layer, local) = self.resolve_arg(id);
self.emit_load(layer, local);
}
&Ir::TopLevel { body, ref thunks } => {
self.emit_toplevel_inner(body, thunks);
}
&Ir::Select {
expr,
ref attrpath,
default,
span,
} => {
self.emit_select(expr, attrpath, default, span);
}
&Ir::Thunk(id) => {
let (layer, local) = self.resolve_thunk(id);
self.emit_load(layer, local);
}
Ir::Builtins => {
self.emit_op(Op::LoadBuiltins);
}
&Ir::Builtin(name) => {
let sym = self.ctx.get_sym(name).to_string();
let idx = self.ctx.intern_string(&sym);
self.emit_op(Op::LoadBuiltin);
self.emit_u32(idx);
}
&Ir::ConcatStrings {
ref parts,
force_string,
} => {
for &part in parts.iter() {
self.emit_expr(part);
}
self.emit_op(Op::ConcatStrings);
self.emit_u16(parts.len() as u16);
self.emit_u8(if force_string { 1 } else { 0 });
}
&Ir::HasAttr { lhs, ref rhs } => {
self.emit_has_attr(lhs, rhs);
}
Ir::Assert {
assertion,
expr,
assertion_raw,
span,
} => {
let raw_idx = self.ctx.intern_string(assertion_raw);
let span_id = self.ctx.register_span(*span);
self.emit_expr(*assertion);
self.emit_expr(*expr);
self.emit_op(Op::Assert);
self.emit_u32(raw_idx);
self.emit_u32(span_id);
}
&Ir::CurPos(span) => {
let span_id = self.ctx.register_span(span);
self.emit_op(Op::MkPos);
self.emit_u32(span_id);
}
&Ir::ReplBinding(name) => {
let sym = self.ctx.get_sym(name).to_string();
let idx = self.ctx.intern_string(&sym);
self.emit_op(Op::LoadReplBinding);
self.emit_u32(idx);
}
&Ir::ScopedImportBinding(name) => {
let sym = self.ctx.get_sym(name).to_string();
let idx = self.ctx.intern_string(&sym);
self.emit_op(Op::LoadScopedBinding);
self.emit_u32(idx);
}
&Ir::With {
namespace,
body,
ref thunks,
} => {
self.emit_with(namespace, body, thunks);
}
&Ir::WithLookup(name) => {
let sym = self.ctx.get_sym(name).to_string();
let idx = self.ctx.intern_string(&sym);
self.emit_op(Op::WithLookup);
self.emit_u32(idx);
}
}
}
fn emit_binop(&mut self, lhs: RawIrRef<'_>, rhs: RawIrRef<'_>, kind: BinOpKind) {
use BinOpKind::*;
match kind {
And => {
self.emit_expr(lhs);
self.emit_op(Op::ForceBool);
self.emit_op(Op::JumpIfFalse);
let skip_placeholder = self.emit_i32_placeholder();
let after_jif = self.code.len();
self.emit_expr(rhs);
self.emit_op(Op::ForceBool);
self.emit_op(Op::Jump);
let end_placeholder = self.emit_i32_placeholder();
let after_jump = self.code.len();
let false_offset = (after_jump as i32) - (after_jif as i32);
self.patch_i32(skip_placeholder, false_offset);
self.emit_op(Op::PushFalse);
let end_offset = (self.code.len() as i32) - (after_jump as i32);
self.patch_i32(end_placeholder, end_offset);
}
Or => {
self.emit_expr(lhs);
self.emit_op(Op::ForceBool);
self.emit_op(Op::JumpIfTrue);
let skip_placeholder = self.emit_i32_placeholder();
let after_jit = self.code.len();
self.emit_expr(rhs);
self.emit_op(Op::ForceBool);
self.emit_op(Op::Jump);
let end_placeholder = self.emit_i32_placeholder();
let after_jump = self.code.len();
let true_offset = (after_jump as i32) - (after_jit as i32);
self.patch_i32(skip_placeholder, true_offset);
self.emit_op(Op::PushTrue);
let end_offset = (self.code.len() as i32) - (after_jump as i32);
self.patch_i32(end_placeholder, end_offset);
}
Impl => {
self.emit_expr(lhs);
self.emit_op(Op::ForceBool);
self.emit_op(Op::JumpIfFalse);
let skip_placeholder = self.emit_i32_placeholder();
let after_jif = self.code.len();
self.emit_expr(rhs);
self.emit_op(Op::ForceBool);
self.emit_op(Op::Jump);
let end_placeholder = self.emit_i32_placeholder();
let after_jump = self.code.len();
let true_offset = (after_jump as i32) - (after_jif as i32);
self.patch_i32(skip_placeholder, true_offset);
self.emit_op(Op::PushTrue);
let end_offset = (self.code.len() as i32) - (after_jump as i32);
self.patch_i32(end_placeholder, end_offset);
}
PipeL => {
self.emit_expr(rhs);
self.emit_expr(lhs);
self.emit_op(Op::CallNoSpan);
}
PipeR => {
self.emit_expr(lhs);
self.emit_expr(rhs);
self.emit_op(Op::CallNoSpan);
}
_ => {
self.emit_expr(lhs);
self.emit_expr(rhs);
self.emit_op(match kind {
Add => Op::OpAdd,
Sub => Op::OpSub,
Mul => Op::OpMul,
Div => Op::OpDiv,
Eq => Op::OpEq,
Neq => Op::OpNeq,
Lt => Op::OpLt,
Gt => Op::OpGt,
Leq => Op::OpLeq,
Geq => Op::OpGeq,
Con => Op::OpConcat,
Upd => Op::OpUpdate,
_ => unreachable!(),
});
}
}
}
fn emit_func(
&mut self,
arg: ArgId,
thunks: &[(ThunkId, RawIrRef<'_>)],
param: &Option<Param<'_>>,
body: RawIrRef<'_>,
) {
let with_thunk_count = self.count_with_thunks(body);
let total_slots = thunks.len() + with_thunk_count;
let all_thunks = self.collect_all_thunks(thunks, body);
let thunk_ids: Vec<ThunkId> = all_thunks.iter().map(|&(id, _)| id).collect();
let skip_patch = self.emit_jump_placeholder();
let entry_point = self.code.len() as u32;
self.push_scope(true, Some(arg), &thunk_ids);
self.emit_scope_thunks(thunks);
self.emit_expr(body);
self.emit_op(Op::Return);
self.pop_scope();
self.patch_jump_target(skip_patch);
if let Some(Param {
required,
optional,
ellipsis,
}) = param
{
self.emit_op(Op::MakePatternClosure);
self.emit_u32(entry_point);
self.emit_u32(total_slots as u32);
self.emit_u16(required.len() as u16);
self.emit_u16(optional.len() as u16);
self.emit_u8(if *ellipsis { 1 } else { 0 });
for &(sym, _) in required.iter() {
let name = self.ctx.get_sym(sym).to_string();
let idx = self.ctx.intern_string(&name);
self.emit_u32(idx);
}
for &(sym, _) in optional.iter() {
let name = self.ctx.get_sym(sym).to_string();
let idx = self.ctx.intern_string(&name);
self.emit_u32(idx);
}
for &(sym, span) in required.iter().chain(optional.iter()) {
let name = self.ctx.get_sym(sym).to_string();
let name_idx = self.ctx.intern_string(&name);
let span_id = self.ctx.register_span(span);
self.emit_u32(name_idx);
self.emit_u32(span_id);
}
} else {
self.emit_op(Op::MakeClosure);
self.emit_u32(entry_point);
self.emit_u32(total_slots as u32);
}
}
fn emit_attrset(
&mut self,
stcs: &crate::ir::HashMap<'_, SymId, (RawIrRef<'_>, TextRange)>,
dyns: &[(RawIrRef<'_>, RawIrRef<'_>, TextRange)],
) {
if stcs.is_empty() && dyns.is_empty() {
self.emit_op(Op::MakeEmptyAttrs);
return;
}
if !dyns.is_empty() {
for (&sym, &(val, _)) in stcs.iter() {
let key = self.ctx.get_sym(sym).to_string();
let idx = self.ctx.intern_string(&key);
self.emit_op(Op::PushString);
self.emit_u32(idx);
self.emit_expr(val);
}
for (_, &(_, span)) in stcs.iter() {
let span_id = self.ctx.register_span(span);
let idx = self.ctx.intern_constant(Constant::Int(span_id as i64));
self.emit_op(Op::PushConst);
self.emit_u32(idx);
}
for &(key, val, span) in dyns.iter() {
self.emit_expr(key);
self.emit_expr(val);
let span_id = self.ctx.register_span(span);
let idx = self.ctx.intern_constant(Constant::Int(span_id as i64));
self.emit_op(Op::PushConst);
self.emit_u32(idx);
}
self.emit_op(Op::MakeAttrsDyn);
self.emit_u32(stcs.len() as u32);
self.emit_u32(dyns.len() as u32);
} else {
for (&sym, &(val, _)) in stcs.iter() {
let key = self.ctx.get_sym(sym).to_string();
let idx = self.ctx.intern_string(&key);
self.emit_op(Op::PushString);
self.emit_u32(idx);
self.emit_expr(val);
}
for (_, &(_, span)) in stcs.iter() {
let span_id = self.ctx.register_span(span);
let idx = self.ctx.intern_constant(Constant::Int(span_id as i64));
self.emit_op(Op::PushConst);
self.emit_u32(idx);
}
self.emit_op(Op::MakeAttrs);
self.emit_u32(stcs.len() as u32);
}
}
fn emit_select(
&mut self,
expr: RawIrRef<'_>,
attrpath: &[Attr<RawIrRef<'_>>],
default: Option<RawIrRef<'_>>,
span: TextRange,
) {
self.emit_expr(expr);
for attr in attrpath.iter() {
match attr {
Attr::Str(sym, _) => {
let key = self.ctx.get_sym(*sym).to_string();
let idx = self.ctx.intern_string(&key);
self.emit_op(Op::PushString);
self.emit_u32(idx);
}
Attr::Dynamic(expr, _) => {
self.emit_expr(*expr);
}
}
}
if let Some(default) = default {
self.emit_expr(default);
let span_id = self.ctx.register_span(span);
self.emit_op(Op::SelectDefault);
self.emit_u16(attrpath.len() as u16);
self.emit_u32(span_id);
} else {
let span_id = self.ctx.register_span(span);
self.emit_op(Op::Select);
self.emit_u16(attrpath.len() as u16);
self.emit_u32(span_id);
}
}
fn emit_has_attr(&mut self, lhs: RawIrRef<'_>, rhs: &[Attr<RawIrRef<'_>>]) {
self.emit_expr(lhs);
for attr in rhs.iter() {
match attr {
Attr::Str(sym, _) => {
let key = self.ctx.get_sym(*sym).to_string();
let idx = self.ctx.intern_string(&key);
self.emit_op(Op::PushString);
self.emit_u32(idx);
}
Attr::Dynamic(expr, _) => {
self.emit_expr(*expr);
}
}
}
self.emit_op(Op::HasAttr);
self.emit_u16(rhs.len() as u16);
}
fn emit_with(
&mut self,
namespace: RawIrRef<'_>,
body: RawIrRef<'_>,
thunks: &[(ThunkId, RawIrRef<'_>)],
) {
self.emit_expr(namespace);
self.emit_op(Op::PushWith);
self.emit_scope_thunks(thunks);
self.emit_expr(body);
self.emit_op(Op::PopWith);
}
fn emit_toplevel_inner(&mut self, body: RawIrRef<'_>, thunks: &[(ThunkId, RawIrRef<'_>)]) {
self.emit_scope_thunks(thunks);
self.emit_expr(body);
}
}

View File

@@ -1,6 +1,9 @@
use std::fmt::{self, Write as _}; use std::fmt::{self, Write as _};
use std::ops::Deref;
use std::path::Path; use std::path::Path;
use rnix::TextRange;
use crate::ir::*; use crate::ir::*;
use crate::value::Symbol; use crate::value::Symbol;
@@ -26,50 +29,23 @@ macro_rules! code {
}; };
} }
pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String { pub(crate) fn compile<const SCOPED: bool>(expr: RawIrRef<'_>, ctx: &impl CodegenContext) -> String {
let mut buf = CodeBuffer::with_capacity(8192); let mut buf = CodeBuffer::with_capacity(8192);
code!(&mut buf, ctx; "(()=>{"); code!(
&mut buf, ctx;
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() { "((" { if SCOPED { "_s" } else { "" } } ")=>{"
code!(&mut buf, ctx; "Nix.DEBUG_THUNKS.enabled=true;"); "const _d="
} quoted(&ctx.get_current_dir().display().to_string())
",_w=null;"
code!(&mut buf, ctx; "return " expr
"Nix.builtins.storeDir=" "})" { if SCOPED { "" } else { "()" } }
quoted(ctx.get_store_dir())
";const __currentDir="
quoted(&ctx.get_current_dir().display().to_string())
";const __with=null;return "
expr
"})()");
buf.into_string()
}
pub(crate) fn compile_scoped(expr: &Ir, ctx: &impl CodegenContext) -> String {
let mut buf = CodeBuffer::with_capacity(8192);
code!(&mut buf, ctx; "((__scope)=>{");
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
code!(&mut buf, ctx; "Nix.DEBUG_THUNKS.enabled=true;");
}
code!(&mut buf, ctx;
"Nix.builtins.storeDir="
quoted(ctx.get_store_dir())
";const __currentDir="
quoted(&ctx.get_current_dir().display().to_string())
";return "
expr
"})"
); );
buf.into_string() buf.into_string()
} }
pub(crate) struct CodeBuffer { struct CodeBuffer {
buf: String, buf: String,
} }
@@ -195,29 +171,16 @@ where
impl<Ctx: CodegenContext> Compile<Ctx> for rnix::TextRange { impl<Ctx: CodegenContext> Compile<Ctx> for rnix::TextRange {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
code!( code!(buf, "{}", ctx.register_span(*self));
buf,
"\"{}:{}:{}\"",
ctx.get_current_source_id(),
usize::from(self.start()),
usize::from(self.end())
);
} }
} }
pub(crate) trait CodegenContext { pub(crate) trait CodegenContext {
fn get_ir(&self, id: ExprId) -> &Ir;
fn get_sym(&self, id: SymId) -> Symbol<'_>; fn get_sym(&self, id: SymId) -> Symbol<'_>;
fn get_current_dir(&self) -> &Path; fn get_current_dir(&self) -> &Path;
fn get_store_dir(&self) -> &str; fn get_store_dir(&self) -> &str;
fn get_current_source_id(&self) -> usize; fn get_current_source_id(&self) -> usize;
fn get_current_source(&self) -> crate::error::Source; fn register_span(&self, range: rnix::TextRange) -> usize;
}
impl<Ctx: CodegenContext> Compile<Ctx> for ExprId {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
ctx.get_ir(*self).compile(ctx, buf);
}
} }
impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> { impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> {
@@ -226,466 +189,430 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> {
} }
} }
impl<Ctx: CodegenContext> Compile<Ctx> for Ir { impl<Ctx: CodegenContext> Compile<Ctx> for RawIrRef<'_> {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
match self { match self.deref() {
Ir::Int(int) => { Ir::Int(int) => {
code!(buf, "{}n", int.inner); code!(buf, "{}n", int);
} }
Ir::Float(float) => { Ir::Float(float) => {
code!(buf, "{}", float.inner); code!(buf, "{}", float);
} }
Ir::Bool(bool) => { Ir::Bool(bool) => {
code!(buf, "{}", bool.inner); code!(buf, "{}", bool);
} }
Ir::Null(_) => { Ir::Null => {
code!(buf, ctx; "null"); code!(buf, ctx; "null");
} }
Ir::Str(s) => { Ir::Str(s) => {
code!(buf, ctx; quoted(&s.val)); code!(buf, ctx; quoted(s));
} }
Ir::Path(p) => { Ir::Path(p) => {
code!(buf, ctx; "Nix.resolvePath(__currentDir," ctx.get_ir(p.expr) ")"); // Nix.resolvePath
code!(buf, ctx; "$r(_d," p ")");
} }
Ir::If(x) => x.compile(ctx, buf), Ir::If { cond, consq, alter } => {
Ir::BinOp(x) => x.compile(ctx, buf), code!(buf, ctx; "$fb(" cond ")?(" consq "):(" alter ")");
Ir::UnOp(x) => x.compile(ctx, buf),
Ir::Func(x) => x.compile(ctx, buf),
Ir::AttrSet(x) => x.compile(ctx, buf),
Ir::List(x) => x.compile(ctx, buf),
Ir::Call(x) => x.compile(ctx, buf),
Ir::Arg(x) => {
code!(buf, "arg{}", x.inner.0);
} }
Ir::TopLevel(x) => x.compile(ctx, buf), &Ir::BinOp { lhs, rhs, kind } => compile_binop(lhs, rhs, kind, ctx, buf),
Ir::Select(x) => x.compile(ctx, buf), &Ir::UnOp { rhs, kind } => compile_unop(rhs, kind, ctx, buf),
&Ir::Thunk(Thunk { inner: expr_id, .. }) => { &Ir::Func {
code!(buf, "expr{}", expr_id.0); body,
} ref param,
Ir::Builtins(_) => { arg,
code!(buf, ctx; "Nix.builtins"); ref thunks,
} } => compile_func(arg, thunks, param, body, ctx, buf),
&Ir::Builtin(Builtin { inner: name, .. }) => { Ir::AttrSet { stcs, dyns } => compile_attrset(stcs, dyns, ctx, buf),
Ir::List { items } => compile_list(items, ctx, buf),
Ir::Call { func, arg, span } => {
code!(buf, ctx; code!(buf, ctx;
"Nix.builtins[" "$c("
ctx.get_sym(name) func
"]" ","
arg
","
span
")"
); );
} }
Ir::ConcatStrings(x) => x.compile(ctx, buf), Ir::Arg(x) => {
Ir::HasAttr(x) => x.compile(ctx, buf), code!(buf, "a{}", x.0);
&Ir::Assert(Assert { }
&Ir::TopLevel { body, ref thunks } => compile_toplevel(body, thunks, ctx, buf),
&Ir::Select {
expr,
ref attrpath,
default,
span,
} => compile_select(expr, attrpath, default, span, ctx, buf),
Ir::Thunk(ThunkId(id)) => {
code!(buf, "e{}", id);
}
Ir::Builtins => {
// Nix.builtins
code!(buf, ctx; "$b");
}
&Ir::Builtin(name) => {
// Nix.builtins
code!(buf, ctx; "$b.get(" ctx.get_sym(name) ")");
}
&Ir::ConcatStrings {
ref parts,
force_string,
} => compile_concat_strings(parts, force_string, ctx, buf),
&Ir::HasAttr { lhs, ref rhs } => compile_has_attr(lhs, rhs, ctx, buf),
Ir::Assert {
assertion, assertion,
expr, expr,
ref assertion_raw, assertion_raw,
span: assert_span, span: assert_span,
}) => { } => {
let assertion_ir = ctx.get_ir(assertion); // Nix.assert
let assertion_span = assertion_ir.span();
code!(buf, ctx; code!(buf, ctx;
"Nix.assert(Nix.withContext(\"while evaluating the condition of the assert statement\"," "$a("
assertion_span assertion
",()=>(" ","
assertion_ir expr
"))," ","
ctx.get_ir(expr) quoted(assertion_raw)
"," ","
quoted(assertion_raw) assert_span
","
assert_span
")" ")"
); );
} }
Ir::CurPos(cur_pos) => { Ir::CurPos(span) => {
code!(buf, ctx; // Nix.mkPos
"Nix.mkPos(" code!(buf, ctx; "$mp(" span ")");
cur_pos.span
")"
);
} }
&Ir::ReplBinding(ReplBinding { inner: name, .. }) => { &Ir::ReplBinding(name) => {
code!(buf, ctx; // Nix.getReplBinding
"Nix.getReplBinding(" code!(buf, ctx; "$gb(" ctx.get_sym(name) ")");
ctx.get_sym(name)
")"
);
} }
&Ir::ScopedImportBinding(ScopedImportBinding { inner: name, .. }) => { &Ir::ScopedImportBinding(name) => {
code!(buf, ctx; code!(buf, ctx; "_s.get(" ctx.get_sym(name) ")");
"__scope["
ctx.get_sym(name)
"]"
);
} }
Ir::WithExpr(x) => x.compile(ctx, buf), &Ir::With {
&Ir::WithLookup(WithLookup { inner: name, .. }) => { namespace,
code!(buf, ctx; body,
"Nix.lookupWith(" ref thunks,
ctx.get_sym(name) } => compile_with(namespace, body, thunks, ctx, buf),
",__with)" &Ir::WithLookup(name) => {
); // Nix.lookupWith
code!(buf, ctx; "$l(" ctx.get_sym(name) ",_w)");
} }
} }
} }
} }
impl<Ctx: CodegenContext> Compile<Ctx> for If { fn compile_binop<'ir>(
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { lhs: RawIrRef<'ir>,
let &If { rhs: RawIrRef<'ir>,
cond, kind: BinOpKind,
consq, ctx: &impl CodegenContext,
alter, buf: &mut CodeBuffer,
span: _, ) {
} = self; use BinOpKind::*;
let cond_ir = ctx.get_ir(cond); match kind {
let cond_span = cond_ir.span(); Add | Sub | Mul | Div | Eq | Neq | Lt | Gt | Leq | Geq | Con | Upd => {
let op_func = match kind {
Add => "$oa",
Sub => "$os",
Mul => "$om",
Div => "$od",
Eq => "$oe",
Neq => "!$oe",
Lt => "$ol",
Gt => "$og",
Leq => "!$og",
Geq => "!$ol",
Con => "$oc",
Upd => "$ou",
_ => unreachable!(),
};
code!(buf, ctx; code!(
"(Nix.withContext(\"while evaluating a branch condition\"," cond_span ",()=>Nix.forceBool(" cond_ir ")))" buf, ctx;
"?(" consq "):(" alter ")" op_func "(" lhs "," rhs ")"
);
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
use BinOpKind::*;
let lhs = ctx.get_ir(self.lhs);
let rhs = ctx.get_ir(self.rhs);
match self.kind {
Add | Sub | Mul | Div | Eq | Neq | Lt | Gt | Leq | Geq | Con | Upd => {
let op_name = match self.kind {
Add => "+",
Sub => "-",
Mul => "*",
Div => "/",
Eq => "==",
Neq => "!=",
Lt => "<",
Gt => ">",
Leq => "<=",
Geq => ">=",
Con => "++",
Upd => "//",
_ => unreachable!(),
};
let op_func = match self.kind {
Add => "Nix.op.add",
Sub => "Nix.op.sub",
Mul => "Nix.op.mul",
Div => "Nix.op.div",
Eq => "Nix.op.eq",
Neq => "Nix.op.neq",
Lt => "Nix.op.lt",
Gt => "Nix.op.gt",
Leq => "Nix.op.lte",
Geq => "Nix.op.gte",
Con => "Nix.op.concat",
Upd => "Nix.op.update",
_ => unreachable!(),
};
code!(
buf, ctx;
"Nix.withContext(\"while evaluating the " op_name " operator\"," self.span ",()=>(" op_func "(" lhs "," rhs ")))"
);
}
And => {
code!(
buf, ctx;
"Nix.withContext(\"while evaluating the && operator\"," self.span ",()=>(Nix.forceBool(" lhs ")&&Nix.forceBool(" rhs ")))"
);
}
Or => {
code!(
buf, ctx;
"Nix.withContext(\"while evaluating the || operator\"," self.span ",()=>(Nix.forceBool(" lhs ")||Nix.forceBool(" rhs ")))"
);
}
Impl => {
code!(
buf, ctx;
"Nix.withContext(\"while evaluating the -> operator\"," self.span ",()=>(!Nix.forceBool(" lhs ")||Nix.forceBool(" rhs ")))"
);
}
PipeL => {
code!(buf, ctx; "Nix.call(" rhs "," lhs ")");
}
PipeR => {
code!(buf, ctx; "Nix.call(" lhs "," rhs ")");
}
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for UnOp {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
use UnOpKind::*;
let rhs = ctx.get_ir(self.rhs);
match self.kind {
Neg => {
code!(buf, ctx; "Nix.op.sub(0n," rhs ")");
}
Not => {
code!(buf, ctx; "Nix.op.bnot(" ctx.get_ir(self.rhs) ")");
}
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for Func {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().inner.0;
let has_thunks = !self.thunks.is_empty();
if let Some(Param {
required,
optional,
ellipsis,
}) = &self.param
{
code!(buf, "Nix.mkFunction(arg{}=>", id);
if has_thunks {
code!(buf, ctx; "{" self.thunks "return " self.body "}");
} else {
code!(buf, ctx; "(" self.body ")");
}
code!(buf, ctx;
",["
joined(required.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
code!(buf, ctx; ctx.get_sym(sym));
})
"],["
joined(optional.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
code!(buf, ctx; ctx.get_sym(sym));
})
"],{"
joined(required.iter().chain(optional.iter()), ",", |ctx: &Ctx, buf, &(sym, span)| {
code!(buf, ctx; ctx.get_sym(sym) ":" span);
})
"},"
ellipsis
")"
); );
} else { }
code!(buf, "arg{}=>", id); And => {
if has_thunks { code!(
code!(buf, ctx; "{" self.thunks "return " self.body "}"); buf, ctx;
} else { "$fb(" lhs ")" "&&" "$fb(" rhs ")"
code!(buf, ctx; "(" self.body ")"); );
} }
Or => {
code!(
buf, ctx;
"$fb(" lhs ")" "||" "$fb(" rhs ")"
);
}
Impl => {
code!(
buf, ctx;
"!$fb(" lhs ")" "||" "$fb(" rhs ")"
);
}
PipeL => {
code!(buf, ctx; "$c(" rhs "," lhs ")");
}
PipeR => {
code!(buf, ctx; "$c(" lhs "," rhs ")");
} }
} }
} }
impl<Ctx: CodegenContext> Compile<Ctx> for Call { fn compile_unop(
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { rhs: RawIrRef<'_>,
kind: UnOpKind,
ctx: &impl CodegenContext,
buf: &mut CodeBuffer,
) {
use UnOpKind::*;
match kind {
Neg => {
// 0 - rhs
code!(buf, ctx; "$os(0n," rhs ")");
}
Not => {
code!(buf, ctx; "!$fb(" rhs ")");
}
}
}
fn compile_func<'ir, Ctx: CodegenContext>(
ArgId(id): ArgId,
thunks: &[(ThunkId, RawIrRef<'ir>)],
param: &Option<Param<'ir>>,
body: RawIrRef<'ir>,
ctx: &Ctx,
buf: &mut CodeBuffer,
) {
let has_thunks = !thunks.is_empty();
if let Some(Param {
required,
optional,
ellipsis,
}) = &param
{
code!(buf, "$mf(a{}=>", id);
if has_thunks {
code!(buf, ctx; "{" thunks "return " body "}");
} else {
code!(buf, ctx; "(" body ")");
}
code!(buf, ctx; code!(buf, ctx;
"Nix.call(" ",["
ctx.get_ir(self.func) joined(required.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
"," code!(buf, ctx; ctx.get_sym(sym));
ctx.get_ir(self.arg) })
"," "],["
self.span joined(optional.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
code!(buf, ctx; ctx.get_sym(sym));
})
"],new Map(["
joined(required.iter().chain(optional.iter()), ",", |ctx: &Ctx, buf, &(sym, span)| {
code!(buf, ctx; "[" ctx.get_sym(sym) "," span "]");
})
"]),"
ellipsis
")" ")"
); );
} else {
code!(buf, "a{}=>", id);
if has_thunks {
code!(buf, ctx; "{" thunks "return " body "}");
} else {
code!(buf, ctx; "(" body ")");
}
} }
} }
impl<Ctx: CodegenContext> Compile<Ctx> for [(ExprId, ExprId)] { impl<'ir, Ctx: CodegenContext> Compile<Ctx> for [(ThunkId, RawIrRef<'ir>)] {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
if self.is_empty() { if self.is_empty() {
return; return;
} }
for &(slot, inner) in self { code!(
let inner_ir = ctx.get_ir(inner); buf, ctx;
let inner_span = inner_ir.span(); "const "
joined(self.iter(), ",", |ctx: &Ctx, buf, &(slot, inner)| {
code!( code!(buf, ctx; "e" slot.0 "=$t(()=>(" inner ")," "'e" slot.0 "')");
buf, ctx;
"let expr" slot.0 "=Nix.createThunk(()=>(" inner_ir "),"
"\"expr" slot.0 " "
ctx.get_current_source().get_name() ":"
usize::from(inner_span.start()) ":"
usize::from(inner_span.end())
"\");"
);
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for TopLevel {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
if self.thunks.is_empty() {
ctx.get_ir(self.body).compile(ctx, buf);
} else {
let body = ctx.get_ir(self.body);
code!(buf, ctx; "(()=>{" self.thunks "return " body "})()");
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for WithExpr {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
let namespace = ctx.get_ir(self.namespace);
let body = ctx.get_ir(self.body);
let has_thunks = !self.thunks.is_empty();
if has_thunks {
code!(buf, ctx; "((__with)=>{" self.thunks "return " body "})({env:" namespace ",last:__with})");
} else {
code!(buf, ctx; "((__with)=>(" body "))({env:" namespace ",last:__with})");
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for Select {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
if let Some(default) = self.default {
code!(buf, ctx;
"Nix.selectWithDefault("
ctx.get_ir(self.expr)
",["
joined(self.attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
}
})
"],"
ctx.get_ir(default)
","
self.span
")"
);
} else {
code!(buf, ctx;
"Nix.select("
ctx.get_ir(self.expr)
",["
joined(self.attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
}
})
"],"
self.span
")"
);
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
if !self.dyns.is_empty() {
code!(buf, ctx;
"Nix.mkAttrsWithPos({"
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(expr, _))| {
let key = ctx.get_sym(sym);
let val = ctx.get_ir(expr);
code!(
buf, ctx;
key ":Nix.withContext(\"while evaluating the attribute '" escaped(&key) "'\"," val.span() ",()=>(" val "))"
);
})
"},{"
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
code!(buf, ctx; ctx.get_sym(sym) ":" span);
})
"},{dynKeys:["
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (key, _, _)| {
code!(buf, ctx; ctx.get_ir(*key));
})
"],dynVals:["
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (_, val, _)| {
let val = ctx.get_ir(*val);
code!(
buf, ctx;
"Nix.withContext(\"while evaluating a dynamic attribute\"," val.span() ",()=>(" val "))"
);
})
"],dynSpans:["
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (_, _, attr_span)| {
code!(buf, ctx; attr_span);
})
"]})"
);
} else if !self.stcs.is_empty() {
code!(buf, ctx;
"Nix.mkAttrsWithPos({"
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(expr, _))| {
let key = ctx.get_sym(sym);
let val = ctx.get_ir(expr);
code!(
buf, ctx;
key ":Nix.withContext(\"while evaluating the attribute '" escaped(&key) "'\"," val.span() ",()=>(" val "))"
);
})
"},{"
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
code!(buf, ctx; ctx.get_sym(sym) ":" span);
})
"})"
);
} else {
code!(buf, ctx; "{}");
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for List {
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
code!(buf, ctx;
"["
joined(self.items.iter().enumerate(), ",", |ctx: &Ctx, buf, (idx, item)| {
let item = ctx.get_ir(*item);
code!(
buf, ctx;
"Nix.withContext(\"while evaluating list element " idx "\"," item.span() ",()=>(" item "))"
);
}) })
"]" ";"
); );
} }
} }
impl<Ctx: CodegenContext> Compile<Ctx> for ConcatStrings { fn compile_toplevel<'ir, Ctx: CodegenContext>(
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { body: RawIrRef<'ir>,
code!(buf, ctx; thunks: &[(ThunkId, RawIrRef<'ir>)],
"Nix.concatStringsWithContext([" ctx: &Ctx,
joined(self.parts.iter(), ",", |ctx: &Ctx, buf, part| { buf: &mut CodeBuffer,
let part = ctx.get_ir(*part); ) {
code!( if thunks.is_empty() {
buf, ctx; body.compile(ctx, buf);
"Nix.withContext(\"while evaluating a path segment\"," part.span() ",()=>(" part "))" } else {
); code!(buf, ctx; "(()=>{" thunks "return " body "})()");
})
"]," self.force_string ")"
);
} }
} }
impl<Ctx: CodegenContext> Compile<Ctx> for HasAttr { fn compile_with<'ir>(
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) { namespace: RawIrRef<'ir>,
body: RawIrRef<'ir>,
thunks: &[(ThunkId, RawIrRef<'ir>)],
ctx: &impl CodegenContext,
buf: &mut CodeBuffer,
) {
let has_thunks = !thunks.is_empty();
if has_thunks {
code!(buf, ctx; "((_w)=>{" thunks "return " body "})({env:" namespace ",last:_w})");
} else {
code!(buf, ctx; "((_w)=>(" body "))({env:" namespace ",last:_w})");
}
}
fn compile_select<'ir, Ctx: CodegenContext>(
expr: RawIrRef<'ir>,
attrpath: &[Attr<RawIrRef<'ir>>],
default: Option<RawIrRef<'ir>>,
span: TextRange,
ctx: &Ctx,
buf: &mut CodeBuffer,
) {
if let Some(default) = default {
code!(buf, ctx; code!(buf, ctx;
"Nix.hasAttr(" "$sd("
ctx.get_ir(self.lhs) expr
",[" ",["
joined(self.rhs.iter(), ",", |ctx: &Ctx, buf, attr| { joined(attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr { match attr {
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)), Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)), Attr::Dynamic(expr_id, _) => code!(buf, ctx; *expr_id),
} }
}) })
"])" "],"
default
","
span
")"
);
} else {
code!(buf, ctx;
"$s("
expr
",["
joined(attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr, _) => code!(buf, ctx; expr),
}
})
"],"
span
")"
); );
} }
} }
fn compile_attrset<'ir, Ctx: CodegenContext>(
stcs: &HashMap<'ir, SymId, (RawIrRef<'ir>, TextRange)>,
dyns: &[(RawIrRef<'ir>, RawIrRef<'ir>, TextRange)],
ctx: &Ctx,
buf: &mut CodeBuffer,
) {
if !dyns.is_empty() {
code!(buf, ctx;
"$ma(new Map(["
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(val, _))| {
let key = ctx.get_sym(sym);
code!(
buf, ctx;
"[" key "," val "]"
);
})
"]),new Map(["
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
code!(buf, ctx; "[" ctx.get_sym(sym) "," span "]");
})
"]),{dynKeys:["
joined(dyns.iter(), ",", |ctx: &Ctx, buf, (key, _, _)| {
code!(buf, ctx; key);
})
"],dynVals:["
joined(dyns.iter(), ",", |ctx: &Ctx, buf, (_, val, _)| {
code!(buf, ctx; val);
})
"],dynSpans:["
joined(dyns.iter(), ",", |ctx: &Ctx, buf, (_, _, attr_span)| {
code!(buf, ctx; attr_span);
})
"]})"
);
} else if !stcs.is_empty() {
code!(buf, ctx;
"$ma(new Map(["
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(val, _))| {
let key = ctx.get_sym(sym);
code!(
buf, ctx;
"[" key "," val "]"
);
})
"]),new Map(["
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
code!(buf, ctx; "[" ctx.get_sym(sym) "," span "]");
})
"]))"
);
} else {
code!(buf, ctx; "$e");
}
}
fn compile_list<Ctx: CodegenContext>(items: &[RawIrRef<'_>], ctx: &Ctx, buf: &mut CodeBuffer) {
code!(buf, ctx;
"["
joined(items.iter(), ",", |ctx: &Ctx, buf, item| {
code!(buf, ctx; item);
})
"]"
);
}
fn compile_concat_strings<Ctx: CodegenContext>(
parts: &[RawIrRef<'_>],
force_string: bool,
ctx: &Ctx,
buf: &mut CodeBuffer,
) {
code!(buf, ctx;
"$cs(["
joined(parts.iter(), ",", |ctx: &Ctx, buf, part| {
code!(buf, ctx; part);
})
"]," force_string ")"
);
}
fn compile_has_attr<'ir, Ctx: CodegenContext>(
lhs: RawIrRef<'ir>,
rhs: &[Attr<RawIrRef<'ir>>],
ctx: &Ctx,
buf: &mut CodeBuffer,
) {
code!(buf, ctx;
"$h("
lhs
",["
joined(rhs.iter(), ",", |ctx: &Ctx, buf, attr| {
match attr {
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
Attr::Dynamic(expr, _) => code!(buf, ctx; expr),
}
})
"])"
);
}

View File

@@ -1,18 +1,22 @@
use std::cell::UnsafeCell;
use std::hash::BuildHasher;
use std::path::Path; use std::path::Path;
use std::ptr::NonNull;
use hashbrown::{HashMap, HashSet}; use bumpalo::Bump;
use ghost_cell::{GhostCell, GhostToken};
use hashbrown::{DefaultHashBuilder, HashMap, HashSet, HashTable};
use rnix::TextRange; use rnix::TextRange;
use string_interner::DefaultStringInterner; use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, compile, compile_scoped}; use crate::bytecode::{self, Bytecode, BytecodeContext, Constant};
use crate::codegen::{CodegenContext, compile};
use crate::disassembler::{Disassembler, DisassemblerContext};
use crate::downgrade::*; use crate::downgrade::*;
use crate::error::{Error, Result, Source}; use crate::error::{Error, Result, Source};
use crate::ir::{ use crate::ir::{ArgId, Ir, IrKey, IrRef, RawIrRef, SymId, ThunkId, ir_content_eq};
Arg, ArgId, Bool, Builtin, ExprId, Ir, Null, ReplBinding, ScopedImportBinding, SymId, Thunk, #[cfg(feature = "inspector")]
ToIr as _, WithLookup, use crate::runtime::inspector::InspectorServer;
}; use crate::runtime::{ForceMode, Runtime, RuntimeContext};
use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{DaemonStore, Store, StoreConfig}; use crate::store::{DaemonStore, Store, StoreConfig};
use crate::value::{Symbol, Value}; use crate::value::{Symbol, Value};
@@ -47,18 +51,20 @@ fn handle_parse_error<'a>(
pub struct Context { pub struct Context {
ctx: Ctx, ctx: Ctx,
runtime: Runtime<Ctx>, runtime: Runtime<Ctx>,
#[cfg(feature = "inspector")]
_inspector_server: Option<InspectorServer>,
} }
macro_rules! eval { macro_rules! eval_bc {
($name:ident, $wrapper:literal) => { ($name:ident, $mode:expr) => {
pub fn $name(&mut self, source: Source) -> Result<Value> { pub fn $name(&mut self, source: Source) -> Result<Value> {
tracing::info!("Starting evaluation"); tracing::info!("Starting evaluation");
tracing::debug!("Compiling code"); tracing::debug!("Compiling bytecode");
let code = self.compile(source)?; let bytecode = self.ctx.compile_bytecode(source)?;
tracing::debug!("Executing JavaScript"); tracing::debug!("Executing bytecode");
self.runtime.eval(format!($wrapper, code), &mut self.ctx) self.runtime.eval_bytecode(bytecode, &mut self.ctx, $mode)
} }
}; };
} }
@@ -66,30 +72,76 @@ macro_rules! eval {
impl Context { impl Context {
pub fn new() -> Result<Self> { pub fn new() -> Result<Self> {
let ctx = Ctx::new()?; let ctx = Ctx::new()?;
#[cfg(feature = "inspector")]
let runtime = Runtime::new(Default::default())?;
#[cfg(not(feature = "inspector"))]
let runtime = Runtime::new()?; let runtime = Runtime::new()?;
let mut context = Self { ctx, runtime }; let mut context = Self {
context.init_derivation()?; ctx,
runtime,
#[cfg(feature = "inspector")]
_inspector_server: None,
};
context.init()?;
Ok(context) Ok(context)
} }
fn init_derivation(&mut self) -> Result<()> { #[cfg(feature = "inspector")]
pub fn new_with_inspector(addr: std::net::SocketAddr, wait_for_session: bool) -> Result<Self> {
use crate::runtime::InspectorOptions;
let ctx = Ctx::new()?;
let runtime = Runtime::new(InspectorOptions {
enable: true,
wait: wait_for_session,
})?;
let server = crate::runtime::inspector::InspectorServer::new(addr, "nix-js")
.map_err(|e| Error::internal(e.to_string()))?;
server.register_inspector("nix-js".to_string(), runtime.inspector(), wait_for_session);
let mut context = Self {
ctx,
runtime,
_inspector_server: Some(server),
};
context.init()?;
Ok(context)
}
#[cfg(feature = "inspector")]
pub fn wait_for_inspector_disconnect(&mut self) {
self.runtime.wait_for_inspector_disconnect();
}
fn init(&mut self) -> Result<()> {
const DERIVATION_NIX: &str = include_str!("runtime/corepkgs/derivation.nix"); const DERIVATION_NIX: &str = include_str!("runtime/corepkgs/derivation.nix");
let source = Source::new_virtual( let source = Source::new_virtual(
"<nix/derivation-internal.nix>".into(), "<nix/derivation-internal.nix>".into(),
DERIVATION_NIX.to_string(), DERIVATION_NIX.to_string(),
); );
let code = self.ctx.compile(source, None)?; let code = self.ctx.compile(source, None)?;
self.runtime self.runtime.eval(
.eval(format!("Nix.builtins.derivation = {}", code), &mut self.ctx)?; format!(
"Nix.builtins.set('derivation',({}));Nix.builtins.set('storeDir','{}');{}0n",
code,
self.get_store_dir(),
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
"Nix.DEBUG_THUNKS.enabled=true;"
} else {
""
}
),
&mut self.ctx,
)?;
Ok(()) Ok(())
} }
eval!(eval, "Nix.force({})"); eval_bc!(eval, ForceMode::Force);
eval!(eval_shallow, "Nix.forceShallow({})"); eval_bc!(eval_shallow, ForceMode::ForceShallow);
eval!(eval_deep, "Nix.forceDeep({})"); eval_bc!(eval_deep, ForceMode::ForceDeep);
pub fn eval_repl<'a>(&'a mut self, source: Source, scope: &'a HashSet<SymId>) -> Result<Value> { pub fn eval_repl<'a>(&'a mut self, source: Source, scope: &'a HashSet<SymId>) -> Result<Value> {
tracing::info!("Starting evaluation"); tracing::info!("Starting evaluation");
@@ -105,6 +157,18 @@ impl Context {
self.ctx.compile(source, None) self.ctx.compile(source, None)
} }
pub fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode> {
self.ctx.compile_bytecode(source)
}
pub fn disassemble(&self, bytecode: &Bytecode) -> String {
Disassembler::new(bytecode, &self.ctx).disassemble()
}
pub fn disassemble_colored(&self, bytecode: &Bytecode) -> String {
Disassembler::new(bytecode, &self.ctx).disassemble_colored()
}
pub fn get_store_dir(&self) -> &str { pub fn get_store_dir(&self) -> &str {
self.ctx.get_store_dir() self.ctx.get_store_dir()
} }
@@ -130,32 +194,44 @@ impl Context {
} }
} }
pub(crate) struct Ctx { struct Ctx {
irs: Vec<Ir>,
symbols: DefaultStringInterner, symbols: DefaultStringInterner,
global: NonNull<HashMap<SymId, ExprId>>, global: HashMap<SymId, Ir<'static, RawIrRef<'static>>>,
sources: Vec<Source>, sources: Vec<Source>,
store: DaemonStore, store: DaemonStore,
spans: UnsafeCell<Vec<(usize, TextRange)>>,
thunk_count: usize,
global_strings: Vec<String>,
global_string_map: HashMap<String, u32>,
global_constants: Vec<Constant>,
global_constant_map: HashMap<Constant, u32>,
synced_strings: usize,
synced_constants: usize,
}
/// Owns the bump allocator and a read-only reference into it.
///
/// # Safety
/// The `ir` field points into `_bump`'s storage. We use `'static` as a sentinel
/// lifetime because the struct owns the backing memory. The `as_ref` method
/// re-binds the lifetime to `&self`, preventing use-after-free.
struct OwnedIr {
_bump: Bump,
ir: RawIrRef<'static>,
}
impl OwnedIr {
fn as_ref(&self) -> RawIrRef<'_> {
self.ir
}
} }
impl Ctx { impl Ctx {
fn new() -> Result<Self> { fn new() -> Result<Self> {
use crate::ir::{Builtins, ToIr as _};
let mut symbols = DefaultStringInterner::new(); let mut symbols = DefaultStringInterner::new();
let mut irs = Vec::new();
let mut global = HashMap::new(); let mut global = HashMap::new();
irs.push(
Builtins {
span: rnix::TextRange::default(),
}
.to_ir(),
);
let builtins_expr = ExprId(0);
let builtins_sym = symbols.get_or_intern("builtins"); let builtins_sym = symbols.get_or_intern("builtins");
global.insert(builtins_sym, builtins_expr); global.insert(builtins_sym, Ir::Builtins);
let free_globals = [ let free_globals = [
"abort", "abort",
@@ -179,48 +255,19 @@ impl Ctx {
"toString", "toString",
]; ];
let consts = [ let consts = [
( ("true", Ir::Bool(true)),
"true", ("false", Ir::Bool(false)),
Bool { ("null", Ir::Null),
inner: true,
span: rnix::TextRange::default(),
}
.to_ir(),
),
(
"false",
Bool {
inner: false,
span: rnix::TextRange::default(),
}
.to_ir(),
),
(
"null",
Null {
span: rnix::TextRange::default(),
}
.to_ir(),
),
]; ];
for name in free_globals { for name in free_globals {
let name_sym = symbols.get_or_intern(name); let name = symbols.get_or_intern(name);
let id = ExprId(irs.len()); let value = Ir::Builtin(name);
irs.push( global.insert(name, value);
Builtin {
inner: name_sym,
span: rnix::TextRange::default(),
}
.to_ir(),
);
global.insert(name_sym, id);
} }
for (name, value) in consts { for (name, value) in consts {
let name_sym = symbols.get_or_intern(name); let name = symbols.get_or_intern(name);
let id = ExprId(irs.len()); global.insert(name, value);
irs.push(value);
global.insert(name_sym, id);
} }
let config = StoreConfig::from_env(); let config = StoreConfig::from_env();
@@ -228,19 +275,39 @@ impl Ctx {
Ok(Self { Ok(Self {
symbols, symbols,
irs, global,
global: unsafe { NonNull::new_unchecked(Box::leak(Box::new(global))) },
sources: Vec::new(), sources: Vec::new(),
store, store,
spans: UnsafeCell::new(Vec::new()),
thunk_count: 0,
global_strings: Vec::new(),
global_string_map: HashMap::new(),
global_constants: Vec::new(),
global_constant_map: HashMap::new(),
synced_strings: 0,
synced_constants: 0,
}) })
} }
fn downgrade_ctx<'a>(&'a mut self, extra_scope: Option<Scope<'a>>) -> DowngradeCtx<'a> { fn downgrade_ctx<'ctx, 'id, 'ir>(
let global_ref = unsafe { self.global.as_ref() }; &'ctx mut self,
DowngradeCtx::new(self, global_ref, extra_scope) bump: &'ir Bump,
token: GhostToken<'id>,
extra_scope: Option<Scope<'ctx>>,
) -> DowngradeCtx<'ctx, 'id, 'ir> {
let source = self.get_current_source();
DowngradeCtx::new(
bump,
token,
&mut self.symbols,
&self.global,
extra_scope,
&mut self.thunk_count,
source,
)
} }
pub(crate) fn get_current_dir(&self) -> &Path { fn get_current_dir(&self) -> &Path {
self.sources self.sources
.last() .last()
.as_ref() .as_ref()
@@ -248,18 +315,18 @@ impl Ctx {
.get_dir() .get_dir()
} }
pub(crate) fn get_current_source(&self) -> Source { fn get_current_source(&self) -> Source {
self.sources self.sources
.last() .last()
.expect("current_source is not set") .expect("current_source is not set")
.clone() .clone()
} }
pub(crate) fn get_source(&self, id: usize) -> Source { fn downgrade<'ctx>(
self.sources.get(id).expect("source not found").clone() &'ctx mut self,
} source: Source,
extra_scope: Option<Scope<'ctx>>,
fn downgrade<'a>(&mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<ExprId> { ) -> Result<OwnedIr> {
tracing::debug!("Parsing Nix expression"); tracing::debug!("Parsing Nix expression");
self.sources.push(source.clone()); self.sources.push(source.clone());
@@ -272,18 +339,29 @@ impl Ctx {
.tree() .tree()
.expr() .expr()
.ok_or_else(|| Error::parse_error("unexpected EOF".into()))?; .ok_or_else(|| Error::parse_error("unexpected EOF".into()))?;
self.downgrade_ctx(extra_scope).downgrade(expr) let bump = Bump::new();
GhostToken::new(|token| {
let ir = self
.downgrade_ctx(&bump, token, extra_scope)
.downgrade_toplevel(expr)?;
let ir = unsafe { std::mem::transmute::<RawIrRef<'_>, RawIrRef<'static>>(ir) };
Ok(OwnedIr { _bump: bump, ir })
})
} }
fn compile<'a>(&'a mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<String> { fn compile<'ctx>(
&'ctx mut self,
source: Source,
extra_scope: Option<Scope<'ctx>>,
) -> Result<String> {
let root = self.downgrade(source, extra_scope)?; let root = self.downgrade(source, extra_scope)?;
tracing::debug!("Generating JavaScript code"); tracing::debug!("Generating JavaScript code");
let code = compile(self.get_ir(root), self); let code = compile::<false>(root.as_ref(), self);
tracing::debug!("Generated code: {}", &code); tracing::debug!("Generated code: {}", &code);
Ok(code) Ok(code)
} }
pub(crate) fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> { fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
let scope = Scope::ScopedImport( let scope = Scope::ScopedImport(
scope scope
.into_iter() .into_iter()
@@ -292,16 +370,33 @@ impl Ctx {
); );
let root = self.downgrade(source, Some(scope))?; let root = self.downgrade(source, Some(scope))?;
tracing::debug!("Generating JavaScript code for scoped import"); tracing::debug!("Generating JavaScript code for scoped import");
let code = compile_scoped(self.get_ir(root), self); let code = compile::<true>(root.as_ref(), self);
tracing::debug!("Generated scoped code: {}", &code); tracing::debug!("Generated scoped code: {}", &code);
Ok(code) Ok(code)
} }
fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode> {
let root = self.downgrade(source, None)?;
tracing::debug!("Generating bytecode");
let bytecode = bytecode::compile_bytecode(root.as_ref(), self);
tracing::debug!("Compiled bytecode: {:#04X?}", bytecode.code);
Ok(bytecode)
}
fn compile_bytecode_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<Bytecode> {
let scope = Scope::ScopedImport(
scope
.into_iter()
.map(|k| self.symbols.get_or_intern(k))
.collect(),
);
let root = self.downgrade(source, Some(scope))?;
tracing::debug!("Generating bytecode for scoped import");
Ok(bytecode::compile_bytecode_scoped(root.as_ref(), self))
}
} }
impl CodegenContext for Ctx { impl CodegenContext for Ctx {
fn get_ir(&self, id: ExprId) -> &Ir {
self.irs.get(id.0).expect("ExprId out of bounds")
}
fn get_sym(&self, id: SymId) -> Symbol<'_> { fn get_sym(&self, id: SymId) -> Symbol<'_> {
self.symbols self.symbols
.resolve(id) .resolve(id)
@@ -317,12 +412,49 @@ impl CodegenContext for Ctx {
.checked_sub(1) .checked_sub(1)
.expect("current_source not set") .expect("current_source not set")
} }
fn get_current_source(&self) -> crate::error::Source {
self.sources.last().expect("current_source not set").clone()
}
fn get_store_dir(&self) -> &str { fn get_store_dir(&self) -> &str {
self.store.get_store_dir() self.store.get_store_dir()
} }
fn register_span(&self, range: rnix::TextRange) -> usize {
let spans = unsafe { &mut *self.spans.get() };
let id = spans.len();
spans.push((self.get_current_source_id(), range));
id
}
}
impl BytecodeContext for Ctx {
fn intern_string(&mut self, s: &str) -> u32 {
if let Some(&idx) = self.global_string_map.get(s) {
return idx;
}
let idx = self.global_strings.len() as u32;
self.global_strings.push(s.to_string());
self.global_string_map.insert(s.to_string(), idx);
idx
}
fn intern_constant(&mut self, c: Constant) -> u32 {
if let Some(&idx) = self.global_constant_map.get(&c) {
return idx;
}
let idx = self.global_constants.len() as u32;
self.global_constants.push(c.clone());
self.global_constant_map.insert(c, idx);
idx
}
fn register_span(&self, range: TextRange) -> u32 {
CodegenContext::register_span(self, range) as u32
}
fn get_sym(&self, id: SymId) -> &str {
self.symbols.resolve(id).expect("SymId out of bounds")
}
fn get_current_dir(&self) -> &Path {
Ctx::get_current_dir(self)
}
} }
impl RuntimeContext for Ctx { impl RuntimeContext for Ctx {
@@ -338,156 +470,239 @@ impl RuntimeContext for Ctx {
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> { fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
self.compile_scoped(source, scope) self.compile_scoped(source, scope)
} }
fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode> {
self.compile_bytecode(source)
}
fn compile_bytecode_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<Bytecode> {
self.compile_bytecode_scoped(source, scope)
}
fn get_source(&self, id: usize) -> Source { fn get_source(&self, id: usize) -> Source {
self.get_source(id) self.sources.get(id).expect("source not found").clone()
} }
fn get_store(&self) -> &DaemonStore { fn get_store(&self) -> &DaemonStore {
&self.store &self.store
} }
fn get_span(&self, id: usize) -> (usize, TextRange) {
let spans = unsafe { &*self.spans.get() };
spans[id]
}
fn get_unsynced(&mut self) -> (&[String], &[Constant], usize, usize) {
let strings_base = self.synced_strings;
let constants_base = self.synced_constants;
let new_strings = &self.global_strings[strings_base..];
let new_constants = &self.global_constants[constants_base..];
self.synced_strings = self.global_strings.len();
self.synced_constants = self.global_constants.len();
(new_strings, new_constants, strings_base, constants_base)
}
}
impl DisassemblerContext for Ctx {
fn lookup_string(&self, id: u32) -> &str {
self.global_strings
.get(id as usize)
.expect("string not found")
}
fn lookup_constant(&self, id: u32) -> &Constant {
self.global_constants
.get(id as usize)
.expect("constant not found")
}
} }
enum Scope<'ctx> { enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>), Global(&'ctx HashMap<SymId, Ir<'static, RawIrRef<'static>>>),
Repl(&'ctx HashSet<SymId>), Repl(&'ctx HashSet<SymId>),
ScopedImport(HashSet<SymId>), ScopedImport(HashSet<SymId>),
Let(HashMap<SymId, ExprId>), Let(HashMap<SymId, ThunkId>),
Param(SymId, ExprId), Param(SymId, ArgId),
} }
struct ScopeGuard<'a, 'ctx> { struct ScopeGuard<'a, 'ctx, 'id, 'ir> {
ctx: &'a mut DowngradeCtx<'ctx>, ctx: &'a mut DowngradeCtx<'ctx, 'id, 'ir>,
} }
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> { impl Drop for ScopeGuard<'_, '_, '_, '_> {
fn drop(&mut self) { fn drop(&mut self) {
self.ctx.scopes.pop(); self.ctx.scopes.pop();
} }
} }
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> { impl<'id, 'ir, 'ctx> ScopeGuard<'_, 'ctx, 'id, 'ir> {
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> { fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx, 'id, 'ir> {
self.ctx self.ctx
} }
} }
pub struct DowngradeCtx<'ctx> { struct ThunkScope<'id, 'ir> {
ctx: &'ctx mut Ctx, bindings: bumpalo::collections::Vec<'ir, (ThunkId, IrRef<'id, 'ir>)>,
irs: Vec<Ir>, cache: HashTable<(IrRef<'id, 'ir>, ThunkId)>,
hasher: DefaultHashBuilder,
}
impl<'id, 'ir> ThunkScope<'id, 'ir> {
fn new_in(bump: &'ir Bump) -> Self {
Self {
bindings: bumpalo::collections::Vec::new_in(bump),
cache: HashTable::new(),
hasher: DefaultHashBuilder::default(),
}
}
fn lookup_cache(&self, key: IrRef<'id, 'ir>, token: &GhostToken<'id>) -> Option<ThunkId> {
let hash = self.hasher.hash_one(IrKey(key, token));
self.cache
.find(hash, |&(ir, _)| ir_content_eq(key, ir, token))
.map(|&(_, id)| id)
}
fn add_binding(&mut self, id: ThunkId, ir: IrRef<'id, 'ir>, token: &GhostToken<'id>) {
self.bindings.push((id, ir));
let hash = self.hasher.hash_one(IrKey(ir, token));
self.cache.insert_unique(hash, (ir, id), |&(ir, _)| {
self.hasher.hash_one(IrKey(ir, token))
});
}
fn extend_bindings(&mut self, iter: impl IntoIterator<Item = (ThunkId, IrRef<'id, 'ir>)>) {
self.bindings.extend(iter);
}
}
struct DowngradeCtx<'ctx, 'id, 'ir> {
bump: &'ir Bump,
token: GhostToken<'id>,
symbols: &'ctx mut DefaultStringInterner,
source: Source,
scopes: Vec<Scope<'ctx>>, scopes: Vec<Scope<'ctx>>,
with_scope_count: usize, with_scope_count: usize,
arg_id: usize, arg_count: usize,
thunk_scopes: Vec<Vec<(ExprId, ExprId)>>, thunk_count: &'ctx mut usize,
thunk_scopes: Vec<ThunkScope<'id, 'ir>>,
} }
impl<'ctx> DowngradeCtx<'ctx> { fn should_thunk<'id>(ir: IrRef<'id, '_>, token: &GhostToken<'id>) -> bool {
fn new( !matches!(
ctx: &'ctx mut Ctx, ir.borrow(token),
global: &'ctx HashMap<SymId, ExprId>, Ir::Builtin(_)
extra_scope: Option<Scope<'ctx>>, | Ir::Builtins
) -> Self {
Self {
scopes: std::iter::once(Scope::Global(global))
.chain(extra_scope)
.collect(),
irs: vec![],
arg_id: 0,
with_scope_count: 0,
thunk_scopes: vec![Vec::new()],
ctx,
}
}
}
impl DowngradeContext for DowngradeCtx<'_> {
fn new_expr(&mut self, expr: Ir) -> ExprId {
self.irs.push(expr);
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_arg(&mut self, span: TextRange) -> ExprId {
self.irs.push(
Arg {
inner: ArgId(self.arg_id),
span,
}
.to_ir(),
);
self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn get_ir(&self, id: ExprId) -> &Ir {
if id.0 < self.ctx.irs.len() {
self.ctx.irs.get(id.0).expect("unreachable")
} else {
self.irs
.get(id.0 - self.ctx.irs.len())
.expect("ExprId out of bounds")
}
}
fn maybe_thunk(&mut self, id: ExprId) -> ExprId {
let ir = self.get_ir(id);
match ir {
Ir::Builtin(_)
| Ir::Builtins(_)
| Ir::Int(_) | Ir::Int(_)
| Ir::Float(_) | Ir::Float(_)
| Ir::Bool(_) | Ir::Bool(_)
| Ir::Null(_) | Ir::Null
| Ir::Str(_) | Ir::Str(_)
| Ir::Thunk(_) => id, | Ir::Thunk(_)
_ => { )
let span = ir.span(); }
let slot = self.reserve_slots(1).next().expect("reserve_slots failed");
self.replace_ir(slot, Thunk { inner: slot, span }.to_ir()); impl<'ctx, 'id, 'ir> DowngradeCtx<'ctx, 'id, 'ir> {
self.register_thunk(slot, id); fn new(
slot bump: &'ir Bump,
} token: GhostToken<'id>,
symbols: &'ctx mut DefaultStringInterner,
global: &'ctx HashMap<SymId, Ir<'static, RawIrRef<'static>>>,
extra_scope: Option<Scope<'ctx>>,
thunk_count: &'ctx mut usize,
source: Source,
) -> Self {
Self {
bump,
token,
symbols,
source,
scopes: std::iter::once(Scope::Global(global))
.chain(extra_scope)
.collect(),
thunk_count,
arg_count: 0,
with_scope_count: 0,
thunk_scopes: vec![ThunkScope::new_in(bump)],
} }
} }
}
impl<'ctx: 'ir, 'id, 'ir> DowngradeContext<'id, 'ir> for DowngradeCtx<'ctx, 'id, 'ir> {
fn new_expr(&self, expr: Ir<'ir, IrRef<'id, 'ir>>) -> IrRef<'id, 'ir> {
IrRef::new(self.bump.alloc(GhostCell::new(expr)))
}
fn new_arg(&mut self) -> ArgId {
self.arg_count += 1;
ArgId(self.arg_count - 1)
}
fn maybe_thunk(&mut self, ir: IrRef<'id, 'ir>) -> IrRef<'id, 'ir> {
if !should_thunk(ir, &self.token) {
return ir;
}
let cached = self
.thunk_scopes
.last()
.expect("no active cache scope")
.lookup_cache(ir, &self.token);
if let Some(id) = cached {
return IrRef::alloc(self.bump, Ir::Thunk(id));
}
let id = ThunkId(*self.thunk_count);
*self.thunk_count = self.thunk_count.checked_add(1).expect("thunk id overflow");
self.thunk_scopes
.last_mut()
.expect("no active cache scope")
.add_binding(id, ir, &self.token);
IrRef::alloc(self.bump, Ir::Thunk(id))
}
fn new_sym(&mut self, sym: String) -> SymId { fn new_sym(&mut self, sym: String) -> SymId {
self.ctx.symbols.get_or_intern(sym) self.symbols.get_or_intern(sym)
} }
fn get_sym(&self, id: SymId) -> Symbol<'_> { fn get_sym(&self, id: SymId) -> Symbol<'_> {
self.ctx.get_sym(id) self.symbols.resolve(id).expect("no symbol found").into()
} }
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId> { fn lookup(&self, sym: SymId, span: TextRange) -> Result<IrRef<'id, 'ir>> {
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
match scope { match scope {
&Scope::Global(global_scope) => { &Scope::Global(global_scope) => {
if let Some(&expr) = global_scope.get(&sym) { if let Some(expr) = global_scope.get(&sym) {
return Ok(expr); let ir = match expr {
Ir::Builtins => Ir::Builtins,
Ir::Builtin(s) => Ir::Builtin(*s),
Ir::Bool(b) => Ir::Bool(*b),
Ir::Null => Ir::Null,
_ => unreachable!("globals should only contain leaf IR nodes"),
};
return Ok(self.new_expr(ir));
} }
} }
&Scope::Repl(repl_bindings) => { &Scope::Repl(repl_bindings) => {
if repl_bindings.contains(&sym) { if repl_bindings.contains(&sym) {
return Ok(self.new_expr(ReplBinding { inner: sym, span }.to_ir())); return Ok(self.new_expr(Ir::ReplBinding(sym)));
} }
} }
Scope::ScopedImport(scoped_bindings) => { Scope::ScopedImport(scoped_bindings) => {
if scoped_bindings.contains(&sym) { if scoped_bindings.contains(&sym) {
return Ok(self.new_expr(ScopedImportBinding { inner: sym, span }.to_ir())); return Ok(self.new_expr(Ir::ScopedImportBinding(sym)));
} }
} }
Scope::Let(let_scope) => { Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) { if let Some(&expr) = let_scope.get(&sym) {
return Ok(self.new_expr(Thunk { inner: expr, span }.to_ir())); return Ok(self.new_expr(Ir::Thunk(expr)));
} }
} }
&Scope::Param(param_sym, expr) => { &Scope::Param(param_sym, id) => {
if param_sym == sym { if param_sym == sym {
return Ok(expr); return Ok(self.new_expr(Ir::Arg(id)));
} }
} }
} }
} }
if self.with_scope_count > 0 { if self.with_scope_count > 0 {
Ok(self.new_expr(WithLookup { inner: sym, span }.to_ir())) Ok(self.new_expr(Ir::WithLookup(sym)))
} else { } else {
Err(Error::downgrade_error( Err(Error::downgrade_error(
format!("'{}' not found", self.get_sym(sym)), format!("'{}' not found", self.get_sym(sym)),
@@ -497,49 +712,37 @@ impl DowngradeContext for DowngradeCtx<'_> {
} }
} }
fn replace_ir(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len();
*self.irs.get_mut(local_id).expect("ExprId out of bounds") = expr;
}
fn get_current_source(&self) -> Source { fn get_current_source(&self) -> Source {
self.ctx.get_current_source() self.source.clone()
} }
#[allow(refining_impl_trait)] fn with_let_scope<F, R>(&mut self, keys: &[SymId], f: F) -> Result<R>
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();
let range = (start..start + slots).map(ExprId);
let span = rnix::TextRange::default();
// Fill reserved slots with placeholder value
self.irs.extend(
range
.clone()
.map(|slot| Thunk { inner: slot, span }.to_ir()),
);
range
}
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
use crate::ir::TopLevel;
let body = root.downgrade(&mut self)?;
let thunks = self.thunk_scopes.pop().expect("no thunk scope left???");
let span = self.get_ir(body).span();
let top_level = self.new_expr(TopLevel { body, thunks, span }.to_ir());
self.ctx.irs.extend(self.irs);
Ok(top_level)
}
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where where
F: FnOnce(&mut Self) -> R, F: FnOnce(&mut Self) -> Result<(bumpalo::collections::Vec<'ir, IrRef<'id, 'ir>>, R)>,
{ {
self.scopes.push(Scope::Let(bindings)); let base = *self.thunk_count;
let mut guard = ScopeGuard { ctx: self }; *self.thunk_count = self
f(guard.as_ctx()) .thunk_count
.checked_add(keys.len())
.expect("thunk id overflow");
let iter = keys.iter().enumerate().map(|(offset, &key)| {
(
key,
ThunkId(unsafe { base.checked_add(offset).unwrap_unchecked() }),
)
});
self.scopes.push(Scope::Let(iter.collect()));
let (vals, ret) = {
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())?
};
assert_eq!(keys.len(), vals.len());
let scope = self.thunk_scopes.last_mut().expect("no active thunk scope");
scope.extend_bindings((base..base + keys.len()).map(ThunkId).zip(vals));
Ok(ret)
} }
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R fn with_param_scope<F, R>(&mut self, param: SymId, arg: ArgId, f: F) -> R
where where
F: FnOnce(&mut Self) -> R, F: FnOnce(&mut Self) -> R,
{ {
@@ -558,22 +761,41 @@ impl DowngradeContext for DowngradeCtx<'_> {
ret ret
} }
fn with_thunk_scope<F, R>(&mut self, f: F) -> (R, Vec<(ExprId, ExprId)>) fn with_thunk_scope<F, R>(
&mut self,
f: F,
) -> (
R,
bumpalo::collections::Vec<'ir, (ThunkId, IrRef<'id, 'ir>)>,
)
where where
F: FnOnce(&mut Self) -> R, F: FnOnce(&mut Self) -> R,
{ {
self.thunk_scopes.push(Vec::new()); self.thunk_scopes.push(ThunkScope::new_in(self.bump));
let ret = f(self); let ret = f(self);
( (
ret, ret,
self.thunk_scopes.pop().expect("no thunk scope left???"), self.thunk_scopes
.pop()
.expect("no thunk scope left???")
.bindings,
) )
} }
fn register_thunk(&mut self, slot: ExprId, inner: ExprId) { fn bump(&self) -> &'ir bumpalo::Bump {
self.thunk_scopes self.bump
.last_mut() }
.expect("register_thunk without active scope") }
.push((slot, inner));
impl<'id, 'ir, 'ctx: 'ir> DowngradeCtx<'ctx, 'id, 'ir> {
fn downgrade_toplevel(mut self, root: rnix::ast::Expr) -> Result<RawIrRef<'ir>> {
let body = root.downgrade(&mut self)?;
let thunks = self
.thunk_scopes
.pop()
.expect("no thunk scope left???")
.bindings;
let ir = IrRef::alloc(self.bump, Ir::TopLevel { body, thunks });
Ok(ir.freeze(self.token))
} }
} }

142
nix-js/src/derivation.rs Normal file
View File

@@ -0,0 +1,142 @@
use std::collections::{BTreeMap, BTreeSet};
pub struct OutputInfo {
pub path: String,
pub hash_algo: String,
pub hash: String,
}
pub struct DerivationData {
pub name: String,
pub outputs: BTreeMap<String, OutputInfo>,
pub input_drvs: BTreeMap<String, BTreeSet<String>>,
pub input_srcs: BTreeSet<String>,
pub platform: String,
pub builder: String,
pub args: Vec<String>,
pub env: BTreeMap<String, String>,
}
fn escape_string(s: &str) -> String {
let mut result = String::with_capacity(s.len() + 2);
result.push('"');
for c in s.chars() {
match c {
'"' => result.push_str("\\\""),
'\\' => result.push_str("\\\\"),
'\n' => result.push_str("\\n"),
'\r' => result.push_str("\\r"),
'\t' => result.push_str("\\t"),
_ => result.push(c),
}
}
result.push('"');
result
}
fn quote_string(s: &str) -> String {
format!("\"{}\"", s)
}
impl DerivationData {
pub fn generate_aterm(&self) -> String {
let mut output_entries = Vec::new();
for (name, info) in &self.outputs {
output_entries.push(format!(
"({},{},{},{})",
quote_string(name),
quote_string(&info.path),
quote_string(&info.hash_algo),
quote_string(&info.hash),
));
}
let outputs = output_entries.join(",");
let mut input_drv_entries = Vec::new();
for (drv_path, output_names) in &self.input_drvs {
let sorted_outs: Vec<String> = output_names.iter().map(|s| quote_string(s)).collect();
let out_list = format!("[{}]", sorted_outs.join(","));
input_drv_entries.push(format!("({},{})", quote_string(drv_path), out_list));
}
let input_drvs = input_drv_entries.join(",");
let input_srcs: Vec<String> = self.input_srcs.iter().map(|s| quote_string(s)).collect();
let input_srcs = input_srcs.join(",");
let args: Vec<String> = self.args.iter().map(|s| escape_string(s)).collect();
let args = args.join(",");
let mut env_entries: Vec<String> = Vec::new();
for (k, v) in &self.env {
env_entries.push(format!("({},{})", escape_string(k), escape_string(v)));
}
format!(
"Derive([{}],[{}],[{}],{},{},[{}],[{}])",
outputs,
input_drvs,
input_srcs,
quote_string(&self.platform),
escape_string(&self.builder),
args,
env_entries.join(","),
)
}
pub fn generate_aterm_modulo(&self, input_drv_hashes: &BTreeMap<String, String>) -> String {
let mut output_entries = Vec::new();
for (name, info) in &self.outputs {
output_entries.push(format!(
"({},{},{},{})",
quote_string(name),
quote_string(&info.path),
quote_string(&info.hash_algo),
quote_string(&info.hash),
));
}
let outputs = output_entries.join(",");
let mut input_drv_entries = Vec::new();
for (drv_hash, outputs_csv) in input_drv_hashes {
let mut sorted_outs: Vec<&str> = outputs_csv.split(',').collect();
sorted_outs.sort();
let out_list: Vec<String> = sorted_outs.iter().map(|s| quote_string(s)).collect();
let out_list = format!("[{}]", out_list.join(","));
input_drv_entries.push(format!("({},{})", quote_string(drv_hash), out_list));
}
let input_drvs = input_drv_entries.join(",");
let input_srcs: Vec<String> = self.input_srcs.iter().map(|s| quote_string(s)).collect();
let input_srcs = input_srcs.join(",");
let args: Vec<String> = self.args.iter().map(|s| escape_string(s)).collect();
let args = args.join(",");
let mut env_entries: Vec<String> = Vec::new();
for (k, v) in &self.env {
env_entries.push(format!("({},{})", escape_string(k), escape_string(v)));
}
format!(
"Derive([{}],[{}],[{}],{},{},[{}],[{}])",
outputs,
input_drvs,
input_srcs,
quote_string(&self.platform),
escape_string(&self.builder),
args,
env_entries.join(","),
)
}
pub fn collect_references(&self) -> Vec<String> {
let mut refs = BTreeSet::new();
for src in &self.input_srcs {
refs.insert(src.clone());
}
for drv_path in self.input_drvs.keys() {
refs.insert(drv_path.clone());
}
refs.into_iter().collect()
}
}

354
nix-js/src/disassembler.rs Normal file
View File

@@ -0,0 +1,354 @@
use std::fmt::Write;
use colored::Colorize;
use num_enum::TryFromPrimitive;
use crate::bytecode::{Bytecode, Constant, Op};
pub(crate) trait DisassemblerContext {
fn lookup_string(&self, id: u32) -> &str;
fn lookup_constant(&self, id: u32) -> &Constant;
}
pub(crate) struct Disassembler<'a, Ctx> {
code: &'a [u8],
ctx: &'a Ctx,
pos: usize,
}
impl<'a, Ctx: DisassemblerContext> Disassembler<'a, Ctx> {
pub fn new(bytecode: &'a Bytecode, ctx: &'a Ctx) -> Self {
Self {
code: &bytecode.code,
ctx,
pos: 0,
}
}
fn read_u8(&mut self) -> u8 {
let b = self.code[self.pos];
self.pos += 1;
b
}
fn read_u16(&mut self) -> u16 {
let bytes = self.code[self.pos..self.pos + 2]
.try_into()
.expect("no enough bytes");
self.pos += 2;
u16::from_le_bytes(bytes)
}
fn read_u32(&mut self) -> u32 {
let bytes = self.code[self.pos..self.pos + 4]
.try_into()
.expect("no enough bytes");
self.pos += 4;
u32::from_le_bytes(bytes)
}
fn read_i32(&mut self) -> i32 {
let bytes = self.code[self.pos..self.pos + 4]
.try_into()
.expect("no enough bytes");
self.pos += 4;
i32::from_le_bytes(bytes)
}
pub fn disassemble(&mut self) -> String {
self.disassemble_impl(false)
}
pub fn disassemble_colored(&mut self) -> String {
self.disassemble_impl(true)
}
fn disassemble_impl(&mut self, color: bool) -> String {
let mut out = String::new();
if color {
let _ = writeln!(out, "{}", "=== Bytecode Disassembly ===".bold().white());
let _ = writeln!(
out,
"{} {}",
"Length:".white(),
format!("{} bytes", self.code.len()).cyan()
);
} else {
let _ = writeln!(out, "=== Bytecode Disassembly ===");
let _ = writeln!(out, "Length: {} bytes", self.code.len());
}
while self.pos < self.code.len() {
let start_pos = self.pos;
let op_byte = self.read_u8();
let (mnemonic, args) = self.decode_instruction(op_byte, start_pos);
let bytes_slice = &self.code[start_pos + 1..self.pos];
for (i, chunk) in bytes_slice.chunks(4).enumerate() {
let bytes_str = {
let mut temp = String::new();
if i == 0 {
let _ = write!(&mut temp, "{:02x}", self.code[start_pos]);
} else {
let _ = write!(&mut temp, " ");
}
for b in chunk.iter() {
let _ = write!(&mut temp, " {:02x}", b);
}
temp
};
if i == 0 {
if color {
let sep = if args.is_empty() { "" } else { " " };
let _ = writeln!(
out,
"{} {:<14} | {}{}{}",
format!("{:04x}", start_pos).dimmed(),
bytes_str.green(),
mnemonic.yellow().bold(),
sep,
args.cyan()
);
} else {
let op_str = if args.is_empty() {
mnemonic.to_string()
} else {
format!("{} {}", mnemonic, args)
};
let _ = writeln!(out, "{:04x} {:<14} | {}", start_pos, bytes_str, op_str);
}
} else {
let extra_width = start_pos.ilog2() >> 4;
if color {
let _ = write!(out, " ");
for _ in 0..extra_width {
let _ = write!(out, " ");
}
let _ = writeln!(out, " {:<14} |", bytes_str.green());
} else {
let _ = write!(out, " ");
for _ in 0..extra_width {
let _ = write!(out, " ");
}
let _ = writeln!(out, " {:<14} |", bytes_str);
}
}
}
}
out
}
fn decode_instruction(&mut self, op_byte: u8, current_pc: usize) -> (&'static str, String) {
let op = Op::try_from_primitive(op_byte).expect("invalid op code");
match op {
Op::PushConst => {
let idx = self.read_u32();
let val = self.ctx.lookup_constant(idx);
let val_str = match val {
Constant::Int(i) => format!("Int({})", i),
Constant::Float(f) => format!("Float(bits: {})", f),
};
("PushConst", format!("@{} ({})", idx, val_str))
}
Op::PushString => {
let idx = self.read_u32();
let s = self.ctx.lookup_string(idx);
let len = s.len();
let mut s_fmt = format!("{:?}", s);
if s_fmt.len() > 60 {
s_fmt.truncate(57);
#[allow(clippy::unwrap_used)]
write!(s_fmt, "...\" (total {len} bytes)").unwrap();
}
("PushString", format!("@{} {}", idx, s_fmt))
}
Op::PushNull => ("PushNull", String::new()),
Op::PushTrue => ("PushTrue", String::new()),
Op::PushFalse => ("PushFalse", String::new()),
Op::LoadLocal => {
let idx = self.read_u32();
("LoadLocal", format!("[{}]", idx))
}
Op::LoadOuter => {
let depth = self.read_u8();
let idx = self.read_u32();
("LoadOuter", format!("depth={} [{}]", depth, idx))
}
Op::StoreLocal => {
let idx = self.read_u32();
("StoreLocal", format!("[{}]", idx))
}
Op::AllocLocals => {
let count = self.read_u32();
("AllocLocals", format!("count={}", count))
}
Op::MakeThunk => {
let offset = self.read_u32();
let label_idx = self.read_u32();
let label = self.ctx.lookup_string(label_idx);
("MakeThunk", format!("-> {:04x} label={}", offset, label))
}
Op::MakeClosure => {
let offset = self.read_u32();
let slots = self.read_u32();
("MakeClosure", format!("-> {:04x} slots={}", offset, slots))
}
Op::MakePatternClosure => {
let offset = self.read_u32();
let slots = self.read_u32();
let req_count = self.read_u16();
let opt_count = self.read_u16();
let ellipsis = self.read_u8() != 0;
let mut arg_str = format!(
"-> {:04x} slots={} req={} opt={} ...={})",
offset, slots, req_count, opt_count, ellipsis
);
arg_str.push_str(" Args=[");
for _ in 0..req_count {
let idx = self.read_u32();
arg_str.push_str(&format!("Req({}) ", self.ctx.lookup_string(idx)));
}
for _ in 0..opt_count {
let idx = self.read_u32();
arg_str.push_str(&format!("Opt({}) ", self.ctx.lookup_string(idx)));
}
let total_args = req_count + opt_count;
for _ in 0..total_args {
let _name_idx = self.read_u32();
let _span_id = self.read_u32();
}
arg_str.push(']');
("MakePatternClosure", arg_str)
}
Op::Call => {
let span_id = self.read_u32();
("Call", format!("span={}", span_id))
}
Op::CallNoSpan => ("CallNoSpan", String::new()),
Op::MakeAttrs => {
let count = self.read_u32();
("MakeAttrs", format!("size={}", count))
}
Op::MakeAttrsDyn => {
let static_count = self.read_u32();
let dyn_count = self.read_u32();
(
"MakeAttrsDyn",
format!("static={} dyn={}", static_count, dyn_count),
)
}
Op::MakeEmptyAttrs => ("MakeEmptyAttrs", String::new()),
Op::Select => {
let path_len = self.read_u16();
let span_id = self.read_u32();
("Select", format!("path_len={} span={}", path_len, span_id))
}
Op::SelectDefault => {
let path_len = self.read_u16();
let span_id = self.read_u32();
(
"SelectDefault",
format!("path_len={} span={}", path_len, span_id),
)
}
Op::HasAttr => {
let path_len = self.read_u16();
("HasAttr", format!("path_len={}", path_len))
}
Op::MakeList => {
let count = self.read_u32();
("MakeList", format!("size={}", count))
}
Op::OpAdd => ("OpAdd", String::new()),
Op::OpSub => ("OpSub", String::new()),
Op::OpMul => ("OpMul", String::new()),
Op::OpDiv => ("OpDiv", String::new()),
Op::OpEq => ("OpEq", String::new()),
Op::OpNeq => ("OpNeq", String::new()),
Op::OpLt => ("OpLt", String::new()),
Op::OpGt => ("OpGt", String::new()),
Op::OpLeq => ("OpLeq", String::new()),
Op::OpGeq => ("OpGeq", String::new()),
Op::OpConcat => ("OpConcat", String::new()),
Op::OpUpdate => ("OpUpdate", String::new()),
Op::OpNeg => ("OpNeg", String::new()),
Op::OpNot => ("OpNot", String::new()),
Op::ForceBool => ("ForceBool", String::new()),
Op::JumpIfFalse => {
let offset = self.read_i32();
let target = (current_pc as isize + 1 + 4 + offset as isize) as usize;
(
"JumpIfFalse",
format!("-> {:04x} offset={}", target, offset),
)
}
Op::JumpIfTrue => {
let offset = self.read_i32();
let target = (current_pc as isize + 1 + 4 + offset as isize) as usize;
("JumpIfTrue", format!("-> {:04x} offset={}", target, offset))
}
Op::Jump => {
let offset = self.read_i32();
let target = (current_pc as isize + 1 + 4 + offset as isize) as usize;
("Jump", format!("-> {:04x} offset={}", target, offset))
}
Op::ConcatStrings => {
let count = self.read_u16();
let force = self.read_u8();
("ConcatStrings", format!("count={} force={}", count, force))
}
Op::ResolvePath => ("ResolvePath", String::new()),
Op::Assert => {
let raw_idx = self.read_u32();
let span_id = self.read_u32();
("Assert", format!("text_id={} span={}", raw_idx, span_id))
}
Op::PushWith => ("PushWith", String::new()),
Op::PopWith => ("PopWith", String::new()),
Op::WithLookup => {
let idx = self.read_u32();
let name = self.ctx.lookup_string(idx);
("WithLookup", format!("{:?}", name))
}
Op::LoadBuiltins => ("LoadBuiltins", String::new()),
Op::LoadBuiltin => {
let idx = self.read_u32();
let name = self.ctx.lookup_string(idx);
("LoadBuiltin", format!("{:?}", name))
}
Op::MkPos => {
let span_id = self.read_u32();
("MkPos", format!("id={}", span_id))
}
Op::LoadReplBinding => {
let idx = self.read_u32();
let name = self.ctx.lookup_string(idx);
("LoadReplBinding", format!("{:?}", name))
}
Op::LoadScopedBinding => {
let idx = self.read_u32();
let name = self.ctx.lookup_string(idx);
("LoadScopedBinding", format!("{:?}", name))
}
Op::Return => ("Return", String::new()),
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -292,43 +292,32 @@ fn parse_frames(stack: &str, ctx: &impl RuntimeContext) -> Vec<NixStackFrame> {
let mut frames = Vec::new(); let mut frames = Vec::new();
for line in stack.lines() { for line in stack.lines() {
// Format: NIX_STACK_FRAME:source_id:start:end[:extra_data] // Format: NIX_STACK_FRAME:span_id:message
let Some(rest) = line.strip_prefix("NIX_STACK_FRAME:") else { let Some(rest) = line.strip_prefix("NIX_STACK_FRAME:") else {
continue; continue;
}; };
let parts: Vec<&str> = rest.splitn(4, ':').collect(); let parts: Vec<&str> = rest.splitn(2, ':').collect();
if parts.len() < 3 { if parts.is_empty() {
continue; continue;
} }
let src = match parts[0].parse() { let span_id: usize = match parts[0].parse() {
Ok(id) => ctx.get_source(id), Ok(id) => id,
Err(_) => continue,
};
let start: u32 = match parts[1].parse() {
Ok(v) => v,
Err(_) => continue,
};
let end: u32 = match parts[2].parse() {
Ok(v) => v,
Err(_) => continue, Err(_) => continue,
}; };
let (source_id, span) = ctx.get_span(span_id);
let src = ctx.get_source(source_id);
let span = rnix::TextRange::new(rnix::TextSize::from(start), rnix::TextSize::from(end)); let message = if parts.len() == 2 {
parts[1].to_string()
let message = { } else {
if parts.len() == 4 { String::new()
parts[3].to_string()
} else {
String::new()
}
}; };
frames.push(NixStackFrame { span, message, src }); frames.push(NixStackFrame { span, message, src });
} }
// Deduplicate consecutive identical frames
frames.dedup_by(|a, b| a.span == b.span && a.message == b.message); frames.dedup_by(|a, b| a.span == b.span && a.message == b.message);
frames frames

View File

@@ -1,8 +1,8 @@
use deno_core::OpState; use deno_core::OpState;
use deno_core::ToV8;
use deno_core::op2; use deno_core::op2;
use nix_compat::nixhash::HashAlgo; use nix_compat::nixhash::HashAlgo;
use nix_compat::nixhash::NixHash; use nix_compat::nixhash::NixHash;
use serde::Serialize;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use crate::runtime::OpStateExt; use crate::runtime::OpStateExt;
@@ -22,19 +22,19 @@ pub use metadata_cache::MetadataCache;
use crate::nar; use crate::nar;
use crate::runtime::NixRuntimeError; use crate::runtime::NixRuntimeError;
#[derive(Serialize)] #[derive(ToV8)]
pub struct FetchUrlResult { pub struct FetchUrlResult {
pub store_path: String, pub store_path: String,
pub hash: String, pub hash: String,
} }
#[derive(Serialize)] #[derive(ToV8)]
pub struct FetchTarballResult { pub struct FetchTarballResult {
pub store_path: String, pub store_path: String,
pub nar_hash: String, pub nar_hash: String,
} }
#[derive(Serialize)] #[derive(ToV8)]
pub struct FetchGitResult { pub struct FetchGitResult {
pub out_path: String, pub out_path: String,
pub rev: String, pub rev: String,
@@ -47,7 +47,6 @@ pub struct FetchGitResult {
} }
#[op2] #[op2]
#[serde]
pub fn op_fetch_url<Ctx: RuntimeContext>( pub fn op_fetch_url<Ctx: RuntimeContext>(
state: &mut OpState, state: &mut OpState,
#[string] url: String, #[string] url: String,
@@ -152,7 +151,6 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
} }
#[op2] #[op2]
#[serde]
pub fn op_fetch_tarball<Ctx: RuntimeContext>( pub fn op_fetch_tarball<Ctx: RuntimeContext>(
state: &mut OpState, state: &mut OpState,
#[string] url: String, #[string] url: String,
@@ -266,7 +264,6 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
} }
#[op2] #[op2]
#[serde]
pub fn op_fetch_git<Ctx: RuntimeContext>( pub fn op_fetch_git<Ctx: RuntimeContext>(
state: &mut OpState, state: &mut OpState,
#[string] url: String, #[string] url: String,

View File

@@ -1,6 +1,7 @@
use reqwest::blocking::Client;
use std::time::Duration; use std::time::Duration;
use reqwest::blocking::Client;
pub struct Downloader { pub struct Downloader {
client: Client, client: Client,
} }

View File

@@ -1,9 +1,10 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use rusqlite::{Connection, OptionalExtension, params}; use rusqlite::{Connection, OptionalExtension, params};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
#[derive(Debug)] #[derive(Debug)]
pub enum CacheError { pub enum CacheError {

View File

@@ -1,46 +1,158 @@
use derive_more::{IsVariant, TryUnwrap, Unwrap}; use std::{
use hashbrown::HashMap; hash::{Hash, Hasher},
ops::Deref,
};
use bumpalo::{Bump, boxed::Box, collections::Vec};
use ghost_cell::{GhostCell, GhostToken};
use rnix::{TextRange, ast}; use rnix::{TextRange, ast};
use string_interner::symbol::SymbolU32; use string_interner::symbol::SymbolU32;
use nix_js_macros::ir; pub type HashMap<'ir, K, V> = hashbrown::HashMap<K, V, hashbrown::DefaultHashBuilder, &'ir Bump>;
ir! { #[repr(transparent)]
Ir, #[derive(Clone, Copy)]
pub struct IrRef<'id, 'ir>(&'ir GhostCell<'id, Ir<'ir, Self>>);
impl<'id, 'ir> IrRef<'id, 'ir> {
pub fn new(ir: &'ir GhostCell<'id, Ir<'ir, Self>>) -> Self {
Self(ir)
}
pub fn alloc(bump: &'ir Bump, ir: Ir<'ir, Self>) -> Self {
Self(bump.alloc(GhostCell::new(ir)))
}
/// Freeze a mutable IR reference into a read-only one, consuming the
/// `GhostToken` to prevent any further mutation.
///
/// # Safety
/// The transmute is sound because:
/// - `GhostCell<'id, T>` is `#[repr(transparent)]` over `T`
/// - `IrRef<'id, 'ir>` is `#[repr(transparent)]` over
/// `&'ir GhostCell<'id, Ir<'ir, Self>>`
/// - `RawIrRef<'ir>` is `#[repr(transparent)]` over `&'ir Ir<'ir, Self>`
/// - `Ir<'ir, Ref>` is `#[repr(C)]` and both ref types are pointer-sized
///
/// Consuming the `GhostToken` guarantees no `borrow_mut` calls can occur
/// afterwards, so the shared `&Ir` references from `RawIrRef::Deref` can
/// never alias with mutable references.
pub fn freeze(self, _token: GhostToken<'id>) -> RawIrRef<'ir> {
unsafe { std::mem::transmute(self) }
}
}
impl<'id, 'ir> Deref for IrRef<'id, 'ir> {
type Target = GhostCell<'id, Ir<'ir, IrRef<'id, 'ir>>>;
fn deref(&self) -> &Self::Target {
self.0
}
}
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct RawIrRef<'ir>(&'ir Ir<'ir, Self>);
impl<'ir> Deref for RawIrRef<'ir> {
type Target = Ir<'ir, RawIrRef<'ir>>;
fn deref(&self) -> &Self::Target {
self.0
}
}
#[repr(C)]
pub enum Ir<'ir, Ref> {
Int(i64), Int(i64),
Float(f64), Float(f64),
Bool(bool), Bool(bool),
Null, Null,
Str { pub val: String }, Str(Box<'ir, String>),
AttrSet { pub stcs: HashMap<SymId, (ExprId, rnix::TextRange)>, pub dyns: Vec<(ExprId, ExprId, rnix::TextRange)> }, AttrSet {
List { pub items: Vec<ExprId> }, stcs: HashMap<'ir, SymId, (Ref, TextRange)>,
dyns: Vec<'ir, (Ref, Ref, TextRange)>,
},
List {
items: Vec<'ir, Ref>,
},
Path(Ref),
ConcatStrings {
parts: Vec<'ir, Ref>,
force_string: bool,
},
HasAttr { pub lhs: ExprId, pub rhs: Vec<Attr> }, // OPs
BinOp { pub lhs: ExprId, pub rhs: ExprId, pub kind: BinOpKind }, UnOp {
UnOp { pub rhs: ExprId, pub kind: UnOpKind }, rhs: Ref,
Select { pub expr: ExprId, pub attrpath: Vec<Attr>, pub default: Option<ExprId> }, kind: UnOpKind,
If { pub cond: ExprId, pub consq: ExprId, pub alter: ExprId }, },
Call { pub func: ExprId, pub arg: ExprId }, BinOp {
Assert { pub assertion: ExprId, pub expr: ExprId, pub assertion_raw: String }, lhs: Ref,
ConcatStrings { pub parts: Vec<ExprId>, pub force_string: bool }, rhs: Ref,
Path { pub expr: ExprId }, kind: BinOpKind,
Func { pub body: ExprId, pub param: Option<Param>, pub arg: ExprId, pub thunks: Vec<(ExprId, ExprId)> }, },
TopLevel { pub body: ExprId, pub thunks: Vec<(ExprId, ExprId)> }, HasAttr {
lhs: Ref,
rhs: Vec<'ir, Attr<Ref>>,
},
Select {
expr: Ref,
attrpath: Vec<'ir, Attr<Ref>>,
default: Option<Ref>,
span: TextRange,
},
// Conditionals
If {
cond: Ref,
consq: Ref,
alter: Ref,
},
Assert {
assertion: Ref,
expr: Ref,
assertion_raw: String,
span: TextRange,
},
With {
namespace: Ref,
body: Ref,
thunks: Vec<'ir, (ThunkId, Ref)>,
},
WithLookup(SymId),
// Function related
Func {
body: Ref,
param: Option<Param<'ir>>,
arg: ArgId,
thunks: Vec<'ir, (ThunkId, Ref)>,
},
Arg(ArgId), Arg(ArgId),
Thunk(ExprId), Call {
func: Ref,
arg: Ref,
span: TextRange,
},
// Builtins
Builtins, Builtins,
Builtin(SymId), Builtin(SymId),
CurPos,
// Misc
TopLevel {
body: Ref,
thunks: Vec<'ir, (ThunkId, Ref)>,
},
Thunk(ThunkId),
CurPos(TextRange),
ReplBinding(SymId), ReplBinding(SymId),
ScopedImportBinding(SymId), ScopedImportBinding(SymId),
WithExpr { pub namespace: ExprId, pub body: ExprId, pub thunks: Vec<(ExprId, ExprId)> },
WithLookup(SymId),
} }
#[repr(transparent)] #[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ExprId(pub usize); pub struct ThunkId(pub usize);
pub type SymId = SymbolU32; pub type SymId = SymbolU32;
@@ -50,18 +162,18 @@ pub struct ArgId(pub usize);
/// Represents a key in an attribute path. /// Represents a key in an attribute path.
#[allow(unused)] #[allow(unused)]
#[derive(Debug, TryUnwrap)] #[derive(Debug)]
pub enum Attr { pub enum Attr<Ref> {
/// A dynamic attribute key, which is an expression that must evaluate to a string. /// A dynamic attribute key, which is an expression that must evaluate to a string.
/// Example: `attrs.${key}` /// Example: `attrs.${key}`
Dynamic(ExprId, TextRange), Dynamic(Ref, TextRange),
/// A static attribute key. /// A static attribute key.
/// Example: `attrs.key` /// Example: `attrs.key`
Str(SymId, TextRange), Str(SymId, TextRange),
} }
/// The kinds of binary operations supported in Nix. /// The kinds of binary operations supported in Nix.
#[derive(Clone, Debug)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum BinOpKind { pub enum BinOpKind {
// Arithmetic // Arithmetic
Add, Add,
@@ -118,7 +230,7 @@ impl From<ast::BinOpKind> for BinOpKind {
} }
/// The kinds of unary operations. /// The kinds of unary operations.
#[derive(Clone, Debug)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum UnOpKind { pub enum UnOpKind {
Neg, // Negation (`-`) Neg, // Negation (`-`)
Not, // Logical not (`!`) Not, // Logical not (`!`)
@@ -135,8 +247,437 @@ impl From<ast::UnaryOpKind> for UnOpKind {
/// Describes the parameters of a function. /// Describes the parameters of a function.
#[derive(Debug)] #[derive(Debug)]
pub struct Param { pub struct Param<'ir> {
pub required: Vec<(SymId, TextRange)>, pub required: Vec<'ir, (SymId, TextRange)>,
pub optional: Vec<(SymId, TextRange)>, pub optional: Vec<'ir, (SymId, TextRange)>,
pub ellipsis: bool, pub ellipsis: bool,
} }
#[derive(Clone, Copy)]
pub(crate) struct IrKey<'id, 'ir, 'a>(pub IrRef<'id, 'ir>, pub &'a GhostToken<'id>);
impl std::hash::Hash for IrKey<'_, '_, '_> {
fn hash<H: Hasher>(&self, state: &mut H) {
ir_content_hash(self.0, self.1, state);
}
}
impl PartialEq for IrKey<'_, '_, '_> {
fn eq(&self, other: &Self) -> bool {
ir_content_eq(self.0, other.0, self.1)
}
}
impl Eq for IrKey<'_, '_, '_> {}
fn attr_content_hash<'id>(
attr: &Attr<IrRef<'id, '_>>,
token: &GhostToken<'id>,
state: &mut impl Hasher,
) {
core::mem::discriminant(attr).hash(state);
match attr {
Attr::Dynamic(expr, _) => ir_content_hash(*expr, token, state),
Attr::Str(sym, _) => sym.hash(state),
}
}
fn attr_content_eq<'id, 'ir>(
a: &Attr<IrRef<'id, 'ir>>,
b: &Attr<IrRef<'id, 'ir>>,
token: &GhostToken<'id>,
) -> bool {
match (a, b) {
(Attr::Dynamic(ae, _), Attr::Dynamic(be, _)) => ir_content_eq(*ae, *be, token),
(Attr::Str(a, _), Attr::Str(b, _)) => a == b,
_ => false,
}
}
fn param_content_hash(param: &Param<'_>, state: &mut impl Hasher) {
param.required.len().hash(state);
for (sym, _) in param.required.iter() {
sym.hash(state);
}
param.optional.len().hash(state);
for (sym, _) in param.optional.iter() {
sym.hash(state);
}
param.ellipsis.hash(state);
}
fn param_content_eq(a: &Param<'_>, b: &Param<'_>) -> bool {
a.ellipsis == b.ellipsis
&& a.required.len() == b.required.len()
&& a.optional.len() == b.optional.len()
&& a.required
.iter()
.zip(b.required.iter())
.all(|((a, _), (b, _))| a == b)
&& a.optional
.iter()
.zip(b.optional.iter())
.all(|((a, _), (b, _))| a == b)
}
fn thunks_content_hash<'id>(
thunks: &[(ThunkId, IrRef<'id, '_>)],
token: &GhostToken<'id>,
state: &mut impl Hasher,
) {
thunks.len().hash(state);
for &(id, ir) in thunks {
id.hash(state);
ir_content_hash(ir, token, state);
}
}
fn thunks_content_eq<'id, 'ir>(
a: &[(ThunkId, IrRef<'id, 'ir>)],
b: &[(ThunkId, IrRef<'id, 'ir>)],
token: &GhostToken<'id>,
) -> bool {
a.len() == b.len()
&& a.iter()
.zip(b.iter())
.all(|(&(ai, ae), &(bi, be))| ai == bi && ir_content_eq(ae, be, token))
}
fn ir_content_hash<'id>(ir: IrRef<'id, '_>, token: &GhostToken<'id>, state: &mut impl Hasher) {
let ir = ir.borrow(token);
core::mem::discriminant(ir).hash(state);
match ir {
Ir::Int(x) => x.hash(state),
Ir::Float(x) => x.to_bits().hash(state),
Ir::Bool(x) => x.hash(state),
Ir::Null => {}
Ir::Str(x) => x.hash(state),
Ir::AttrSet { stcs, dyns } => {
stcs.len().hash(state);
let mut combined: u64 = 0;
for (&key, &(val, _)) in stcs.iter() {
let mut h = std::hash::DefaultHasher::new();
key.hash(&mut h);
ir_content_hash(val, token, &mut h);
combined = combined.wrapping_add(h.finish());
}
combined.hash(state);
dyns.len().hash(state);
for &(k, v, _) in dyns.iter() {
ir_content_hash(k, token, state);
ir_content_hash(v, token, state);
}
}
Ir::List { items } => {
items.len().hash(state);
for &item in items.iter() {
ir_content_hash(item, token, state);
}
}
Ir::HasAttr { lhs, rhs } => {
ir_content_hash(*lhs, token, state);
rhs.len().hash(state);
for attr in rhs.iter() {
attr_content_hash(attr, token, state);
}
}
&Ir::BinOp { lhs, rhs, kind } => {
ir_content_hash(lhs, token, state);
ir_content_hash(rhs, token, state);
kind.hash(state);
}
&Ir::UnOp { rhs, kind } => {
ir_content_hash(rhs, token, state);
kind.hash(state);
}
Ir::Select {
expr,
attrpath,
default,
..
} => {
ir_content_hash(*expr, token, state);
attrpath.len().hash(state);
for attr in attrpath.iter() {
attr_content_hash(attr, token, state);
}
default.is_some().hash(state);
if let Some(d) = default {
ir_content_hash(*d, token, state);
}
}
&Ir::If { cond, consq, alter } => {
ir_content_hash(cond, token, state);
ir_content_hash(consq, token, state);
ir_content_hash(alter, token, state);
}
&Ir::Call { func, arg, .. } => {
ir_content_hash(func, token, state);
ir_content_hash(arg, token, state);
}
Ir::Assert {
assertion,
expr,
assertion_raw,
..
} => {
ir_content_hash(*assertion, token, state);
ir_content_hash(*expr, token, state);
assertion_raw.hash(state);
}
Ir::ConcatStrings {
force_string,
parts,
} => {
force_string.hash(state);
parts.len().hash(state);
for &part in parts.iter() {
ir_content_hash(part, token, state);
}
}
&Ir::Path(expr) => ir_content_hash(expr, token, state),
Ir::Func {
body,
arg,
param,
thunks,
} => {
ir_content_hash(*body, token, state);
arg.hash(state);
param.is_some().hash(state);
if let Some(p) = param {
param_content_hash(p, state);
}
thunks_content_hash(thunks, token, state);
}
Ir::TopLevel { body, thunks } => {
ir_content_hash(*body, token, state);
thunks_content_hash(thunks, token, state);
}
Ir::Arg(x) => x.hash(state),
Ir::Thunk(x) => x.hash(state),
Ir::Builtins => {}
Ir::Builtin(x) => x.hash(state),
Ir::CurPos(x) => x.hash(state),
Ir::ReplBinding(x) => x.hash(state),
Ir::ScopedImportBinding(x) => x.hash(state),
&Ir::With {
namespace,
body,
ref thunks,
} => {
ir_content_hash(namespace, token, state);
ir_content_hash(body, token, state);
thunks_content_hash(thunks, token, state);
}
Ir::WithLookup(x) => x.hash(state),
}
}
pub(crate) fn ir_content_eq<'id, 'ir>(
a: IrRef<'id, 'ir>,
b: IrRef<'id, 'ir>,
token: &GhostToken<'id>,
) -> bool {
std::ptr::eq(a.0, b.0)
|| match (a.borrow(token), b.borrow(token)) {
(Ir::Int(a), Ir::Int(b)) => a == b,
(Ir::Float(a), Ir::Float(b)) => a.to_bits() == b.to_bits(),
(Ir::Bool(a), Ir::Bool(b)) => a == b,
(Ir::Null, Ir::Null) => true,
(Ir::Str(a), Ir::Str(b)) => **a == **b,
(
Ir::AttrSet {
stcs: a_stcs,
dyns: a_dyns,
},
Ir::AttrSet {
stcs: b_stcs,
dyns: b_dyns,
},
) => {
a_stcs.len() == b_stcs.len()
&& a_dyns.len() == b_dyns.len()
&& a_stcs.iter().all(|(&k, &(av, _))| {
b_stcs
.get(&k)
.is_some_and(|&(bv, _)| ir_content_eq(av, bv, token))
})
&& a_dyns
.iter()
.zip(b_dyns.iter())
.all(|(&(ak, av, _), &(bk, bv, _))| {
ir_content_eq(ak, bk, token) && ir_content_eq(av, bv, token)
})
}
(Ir::List { items: a }, Ir::List { items: b }) => {
a.len() == b.len()
&& a.iter()
.zip(b.iter())
.all(|(&a, &b)| ir_content_eq(a, b, token))
}
(Ir::HasAttr { lhs: al, rhs: ar }, Ir::HasAttr { lhs: bl, rhs: br }) => {
ir_content_eq(*al, *bl, token)
&& ar.len() == br.len()
&& ar
.iter()
.zip(br.iter())
.all(|(a, b)| attr_content_eq(a, b, token))
}
(
&Ir::BinOp {
lhs: al,
rhs: ar,
kind: ak,
},
&Ir::BinOp {
lhs: bl,
rhs: br,
kind: bk,
},
) => ak == bk && ir_content_eq(al, bl, token) && ir_content_eq(ar, br, token),
(&Ir::UnOp { rhs: ar, kind: ak }, &Ir::UnOp { rhs: br, kind: bk }) => {
ak == bk && ir_content_eq(ar, br, token)
}
(
Ir::Select {
expr: ae,
attrpath: aa,
default: ad,
..
},
Ir::Select {
expr: be,
attrpath: ba,
default: bd,
..
},
) => {
ir_content_eq(*ae, *be, token)
&& aa.len() == ba.len()
&& aa
.iter()
.zip(ba.iter())
.all(|(a, b)| attr_content_eq(a, b, token))
&& match (ad, bd) {
(Some(a), Some(b)) => ir_content_eq(*a, *b, token),
(None, None) => true,
_ => false,
}
}
(
&Ir::If {
cond: ac,
consq: acs,
alter: aa,
},
&Ir::If {
cond: bc,
consq: bcs,
alter: ba,
},
) => {
ir_content_eq(ac, bc, token)
&& ir_content_eq(acs, bcs, token)
&& ir_content_eq(aa, ba, token)
}
(
&Ir::Call {
func: af, arg: aa, ..
},
&Ir::Call {
func: bf, arg: ba, ..
},
) => ir_content_eq(af, bf, token) && ir_content_eq(aa, ba, token),
(
Ir::Assert {
assertion: aa,
expr: ae,
assertion_raw: ar,
..
},
Ir::Assert {
assertion: ba,
expr: be,
assertion_raw: br,
..
},
) => ar == br && ir_content_eq(*aa, *ba, token) && ir_content_eq(*ae, *be, token),
(
Ir::ConcatStrings {
force_string: af,
parts: ap,
},
Ir::ConcatStrings {
force_string: bf,
parts: bp,
},
) => {
af == bf
&& ap.len() == bp.len()
&& ap
.iter()
.zip(bp.iter())
.all(|(&a, &b)| ir_content_eq(a, b, token))
}
(&Ir::Path(a), &Ir::Path(b)) => ir_content_eq(a, b, token),
(
Ir::Func {
body: ab,
arg: aa,
param: ap,
thunks: at,
},
Ir::Func {
body: bb,
arg: ba,
param: bp,
thunks: bt,
},
) => {
ir_content_eq(*ab, *bb, token)
&& aa == ba
&& match (ap, bp) {
(Some(a), Some(b)) => param_content_eq(a, b),
(None, None) => true,
_ => false,
}
&& thunks_content_eq(at, bt, token)
}
(
Ir::TopLevel {
body: ab,
thunks: at,
},
Ir::TopLevel {
body: bb,
thunks: bt,
},
) => ir_content_eq(*ab, *bb, token) && thunks_content_eq(at, bt, token),
(Ir::Arg(a), Ir::Arg(b)) => a == b,
(Ir::Thunk(a), Ir::Thunk(b)) => a == b,
(Ir::Builtins, Ir::Builtins) => true,
(Ir::Builtin(a), Ir::Builtin(b)) => a == b,
(Ir::CurPos(a), Ir::CurPos(b)) => a == b,
(Ir::ReplBinding(a), Ir::ReplBinding(b)) => a == b,
(Ir::ScopedImportBinding(a), Ir::ScopedImportBinding(b)) => a == b,
(
Ir::With {
namespace: a_ns,
body: a_body,
thunks: a_thunks,
},
Ir::With {
namespace: b_ns,
body: b_body,
thunks: b_thunks,
},
) => {
ir_content_eq(*a_ns, *b_ns, token)
&& ir_content_eq(*a_body, *b_body, token)
&& thunks_content_eq(a_thunks, b_thunks, token)
}
(Ir::WithLookup(a), Ir::WithLookup(b)) => a == b,
_ => false,
}
}

View File

@@ -5,7 +5,10 @@ pub mod error;
pub mod logging; pub mod logging;
pub mod value; pub mod value;
mod bytecode;
mod codegen; mod codegen;
mod derivation;
mod disassembler;
mod downgrade; mod downgrade;
mod fetcher; mod fetcher;
mod ir; mod ir;
@@ -13,6 +16,7 @@ mod nar;
mod nix_utils; mod nix_utils;
mod runtime; mod runtime;
mod store; mod store;
mod string_context;
#[global_allocator] #[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

View File

@@ -1,5 +1,6 @@
use std::env; use std::env;
use std::io::IsTerminal; use std::io::IsTerminal;
use tracing_subscriber::{EnvFilter, Layer, fmt, layer::SubscriberExt, util::SubscriberInitExt}; use tracing_subscriber::{EnvFilter, Layer, fmt, layer::SubscriberExt, util::SubscriberInitExt};
pub fn init_logging() { pub fn init_logging() {

187
nix-js/src/main.rs Normal file
View File

@@ -0,0 +1,187 @@
use std::path::PathBuf;
use std::process::exit;
use anyhow::Result;
use clap::{Args, Parser, Subcommand};
use hashbrown::HashSet;
use nix_js::context::Context;
use nix_js::error::Source;
use rustyline::DefaultEditor;
use rustyline::error::ReadlineError;
#[derive(Parser)]
#[command(name = "nix-js", about = "Nix expression evaluator")]
struct Cli {
#[cfg(feature = "inspector")]
#[arg(long, value_name = "HOST:PORT", num_args = 0..=1, default_missing_value = "127.0.0.1:9229")]
inspect: Option<String>,
#[cfg(feature = "inspector")]
#[arg(long, value_name = "HOST:PORT", num_args = 0..=1, default_missing_value = "127.0.0.1:9229")]
inspect_brk: Option<String>,
#[command(subcommand)]
command: Command,
}
#[derive(Subcommand)]
enum Command {
Compile {
#[clap(flatten)]
source: ExprSource,
#[arg(long)]
silent: bool,
},
Eval {
#[clap(flatten)]
source: ExprSource,
},
Repl,
}
#[derive(Args)]
#[group(required = true, multiple = false)]
struct ExprSource {
#[clap(short, long)]
expr: Option<String>,
#[clap(short, long)]
file: Option<PathBuf>,
}
fn create_context(#[cfg(feature = "inspector")] cli: &Cli) -> Result<Context> {
#[cfg(feature = "inspector")]
{
let (addr_str, wait) = if let Some(ref addr) = cli.inspect_brk {
(Some(addr.as_str()), true)
} else if let Some(ref addr) = cli.inspect {
(Some(addr.as_str()), false)
} else {
(None, false)
};
if let Some(addr_str) = addr_str {
let addr: std::net::SocketAddr = addr_str
.parse()
.map_err(|e| anyhow::anyhow!("invalid inspector address '{}': {}", addr_str, e))?;
return Ok(Context::new_with_inspector(addr, wait)?);
}
}
Ok(Context::new()?)
}
fn run_compile(context: &mut Context, src: ExprSource, silent: bool) -> Result<()> {
let src = if let Some(expr) = src.expr {
Source::new_eval(expr)?
} else if let Some(file) = src.file {
Source::new_file(file)?
} else {
unreachable!()
};
match context.compile_bytecode(src) {
Ok(compiled) => {
if !silent {
println!("{}", context.disassemble_colored(&compiled));
}
}
Err(err) => {
eprintln!("{:?}", miette::Report::new(*err));
exit(1);
}
};
#[cfg(feature = "inspector")]
context.wait_for_inspector_disconnect();
Ok(())
}
fn run_eval(context: &mut Context, src: ExprSource) -> Result<()> {
let src = if let Some(expr) = src.expr {
Source::new_eval(expr)?
} else if let Some(file) = src.file {
Source::new_file(file)?
} else {
unreachable!()
};
match context.eval_deep(src) {
Ok(value) => {
println!("{}", value.display_compat());
}
Err(err) => {
eprintln!("{:?}", miette::Report::new(*err));
exit(1);
}
};
#[cfg(feature = "inspector")]
context.wait_for_inspector_disconnect();
Ok(())
}
fn run_repl(context: &mut Context) -> Result<()> {
let mut rl = DefaultEditor::new()?;
let mut scope = HashSet::new();
const RE: ere::Regex<3> = ere::compile_regex!("^[ \t]*([a-zA-Z_][a-zA-Z0-9_'-]*)[ \t]*(.*)$");
loop {
let readline = rl.readline("nix-js-repl> ");
match readline {
Ok(line) => {
if line.trim().is_empty() {
continue;
}
let _ = rl.add_history_entry(line.as_str());
if let Some([Some(_), Some(ident), Some(rest)]) = RE.exec(&line) {
if let Some(expr) = rest.strip_prefix('=') {
let expr = expr.trim_start();
if expr.is_empty() {
eprintln!("Error: missing expression after '='");
continue;
}
match context.add_binding(ident, expr, &mut scope) {
Ok(value) => println!("{} = {}", ident, value),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
} else {
let src = Source::new_repl(line)?;
match context.eval_repl(src, &scope) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
}
} else {
let src = Source::new_repl(line)?;
match context.eval_shallow(src) {
Ok(value) => println!("{value}"),
Err(err) => eprintln!("{:?}", miette::Report::new(*err)),
}
}
}
Err(ReadlineError::Interrupted) => {
println!();
}
Err(ReadlineError::Eof) => {
println!("CTRL-D");
break;
}
Err(err) => {
eprintln!("Error: {err:?}");
break;
}
}
}
Ok(())
}
fn main() -> Result<()> {
nix_js::logging::init_logging();
let cli = Cli::parse();
let mut context = create_context(
#[cfg(feature = "inspector")]
&cli,
)?;
match cli.command {
Command::Compile { source, silent } => run_compile(&mut context, source, silent),
Command::Eval { source } => run_eval(&mut context, source),
Command::Repl => run_repl(&mut context),
}
}

View File

@@ -1,8 +1,9 @@
use nix_nar::Encoder;
use sha2::{Digest, Sha256};
use std::io::Read; use std::io::Read;
use std::path::Path; use std::path::Path;
use nix_nar::Encoder;
use sha2::{Digest, Sha256};
use crate::error::{Error, Result}; use crate::error::{Error, Result};
pub fn compute_nar_hash(path: &Path) -> Result<[u8; 32]> { pub fn compute_nar_hash(path: &Path) -> Result<[u8; 32]> {
@@ -27,11 +28,13 @@ pub fn pack_nar(path: &Path) -> Result<Vec<u8>> {
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)] #[allow(clippy::unwrap_used)]
mod tests { mod tests {
use super::*;
use std::fs; use std::fs;
use tempfile::TempDir; use tempfile::TempDir;
#[test] use super::*;
#[test_log::test]
fn test_simple_file() { fn test_simple_file() {
let temp = TempDir::new().unwrap(); let temp = TempDir::new().unwrap();
let file_path = temp.path().join("test.txt"); let file_path = temp.path().join("test.txt");
@@ -46,7 +49,7 @@ mod tests {
assert_eq!(hash.len(), 64); assert_eq!(hash.len(), 64);
} }
#[test] #[test_log::test]
fn test_directory() { fn test_directory() {
let temp = TempDir::new().unwrap(); let temp = TempDir::new().unwrap();
fs::write(temp.path().join("a.txt"), "aaa").unwrap(); fs::write(temp.path().join("a.txt"), "aaa").unwrap();

View File

@@ -2,14 +2,20 @@ use std::borrow::Cow;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::path::Path; use std::path::Path;
#[cfg(feature = "inspector")]
use deno_core::PollEventLoopOptions;
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8}; use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
use crate::bytecode::{Bytecode, Constant};
use crate::error::{Error, Result, Source}; use crate::error::{Error, Result, Source};
use crate::store::DaemonStore; use crate::store::DaemonStore;
use crate::value::{AttrSet, List, Symbol, Value}; use crate::value::{AttrSet, List, Symbol, Value};
#[cfg(feature = "inspector")]
pub(crate) mod inspector;
mod ops; mod ops;
use ops::*; use ops::*;
mod value;
type ScopeRef<'p, 's> = v8::PinnedRef<'p, v8::HandleScope<'s>>; type ScopeRef<'p, 's> = v8::PinnedRef<'p, v8::HandleScope<'s>>;
type LocalValue<'a> = v8::Local<'a, v8::Value>; type LocalValue<'a> = v8::Local<'a, v8::Value>;
@@ -20,8 +26,12 @@ pub(crate) trait RuntimeContext: 'static {
fn add_source(&mut self, path: Source); fn add_source(&mut self, path: Source);
fn compile(&mut self, source: Source) -> Result<String>; fn compile(&mut self, source: Source) -> Result<String>;
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String>; fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String>;
fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode>;
fn compile_bytecode_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<Bytecode>;
fn get_source(&self, id: usize) -> Source; fn get_source(&self, id: usize) -> Source;
fn get_store(&self) -> &DaemonStore; fn get_store(&self) -> &DaemonStore;
fn get_span(&self, id: usize) -> (usize, rnix::TextRange);
fn get_unsynced(&mut self) -> (&[String], &[Constant], usize, usize);
} }
pub(crate) trait OpStateExt<Ctx: RuntimeContext> { pub(crate) trait OpStateExt<Ctx: RuntimeContext> {
@@ -46,31 +56,30 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
let mut ops = vec![ let mut ops = vec![
op_import::<Ctx>(), op_import::<Ctx>(),
op_scoped_import::<Ctx>(), op_scoped_import::<Ctx>(),
op_resolve_path(),
op_read_file(), op_read_file(),
op_read_file_type(), op_read_file_type(),
op_read_dir(), op_read_dir(),
op_path_exists(), op_path_exists(),
op_resolve_path(), op_walk_dir(),
op_sha256_hex(),
op_make_placeholder(), op_make_placeholder(),
op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(),
op_parse_hash(),
op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(),
op_store_path::<Ctx>(), op_store_path::<Ctx>(),
op_convert_hash(),
op_hash_string(),
op_hash_file(),
op_parse_hash(),
op_add_path::<Ctx>(),
op_add_filtered_path::<Ctx>(),
op_decode_span::<Ctx>(),
op_to_file::<Ctx>(), op_to_file::<Ctx>(),
op_write_derivation::<Ctx>(),
op_read_derivation_outputs(),
op_compute_fs_closure(),
op_copy_path_to_store::<Ctx>(), op_copy_path_to_store::<Ctx>(),
op_get_env(), op_get_env(),
op_walk_dir(),
op_add_filtered_path::<Ctx>(),
op_match(), op_match(),
op_split(), op_split(),
op_from_json(), op_from_json(),
op_from_toml(), op_from_toml(),
op_finalize_derivation::<Ctx>(),
op_to_xml(),
]; ];
ops.extend(crate::fetcher::register_ops::<Ctx>()); ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -86,9 +95,8 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
mod private { mod private {
use deno_error::js_error_wrapper; use deno_error::js_error_wrapper;
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub struct SimpleErrorWrapper(pub(crate) String); pub struct SimpleErrorWrapper(String);
impl std::fmt::Display for SimpleErrorWrapper { impl std::fmt::Display for SimpleErrorWrapper {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.0, f) std::fmt::Display::fmt(&self.0, f)
@@ -113,24 +121,37 @@ pub(crate) use private::NixRuntimeError;
pub(crate) struct Runtime<Ctx: RuntimeContext> { pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime, js_runtime: JsRuntime,
is_thunk_symbol: v8::Global<v8::Symbol>, #[cfg(feature = "inspector")]
primop_metadata_symbol: v8::Global<v8::Symbol>, rt: tokio::runtime::Runtime,
has_context_symbol: v8::Global<v8::Symbol>, #[cfg(feature = "inspector")]
is_path_symbol: v8::Global<v8::Symbol>, wait_for_inspector: bool,
is_cycle_symbol: v8::Global<v8::Symbol>, symbols: GlobalSymbols,
cached_fns: CachedFunctions,
_marker: PhantomData<Ctx>, _marker: PhantomData<Ctx>,
} }
#[cfg(feature = "inspector")]
#[derive(Debug, Clone, Copy, Default)]
pub(crate) struct InspectorOptions {
pub(crate) enable: bool,
pub(crate) wait: bool,
}
impl<Ctx: RuntimeContext> Runtime<Ctx> { impl<Ctx: RuntimeContext> Runtime<Ctx> {
pub(crate) fn new() -> Result<Self> { pub(crate) fn new(
#[cfg(feature = "inspector")] inspector_options: InspectorOptions,
) -> Result<Self> {
use std::sync::Once; use std::sync::Once;
// Initialize V8 once
static INIT: Once = Once::new(); static INIT: Once = Once::new();
INIT.call_once(|| { INIT.call_once(|| {
// First flag is always not recognized
assert_eq!( assert_eq!(
deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]), deno_core::v8_set_flags(vec![
"".into(),
format!("--stack-size={}", 8 * 1024),
#[cfg(feature = "prof")]
("--prof".into())
]),
[""] [""]
); );
JsRuntime::init_platform(Some(v8::new_default_platform(0, false).make_shared())); JsRuntime::init_platform(Some(v8::new_default_platform(0, false).make_shared()));
@@ -138,37 +159,64 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
let mut js_runtime = JsRuntime::new(RuntimeOptions { let mut js_runtime = JsRuntime::new(RuntimeOptions {
extensions: vec![runtime_extension::<Ctx>()], extensions: vec![runtime_extension::<Ctx>()],
#[cfg(feature = "inspector")]
inspector: inspector_options.enable,
is_main: true,
..Default::default() ..Default::default()
}); });
js_runtime.op_state().borrow_mut().put(RegexCache::new()); js_runtime.op_state().borrow_mut().put(RegexCache::new());
js_runtime.op_state().borrow_mut().put(DrvHashCache::new());
let ( let (symbols, cached_fns) = {
is_thunk_symbol,
primop_metadata_symbol,
has_context_symbol,
is_path_symbol,
is_cycle_symbol,
) = {
deno_core::scope!(scope, &mut js_runtime); deno_core::scope!(scope, &mut js_runtime);
Self::get_symbols(scope)? let symbols = Self::get_symbols(scope)?;
let cached_fns = Self::get_cached_functions(scope)?;
(symbols, cached_fns)
}; };
Ok(Self { Ok(Self {
js_runtime, js_runtime,
is_thunk_symbol, #[cfg(feature = "inspector")]
primop_metadata_symbol, rt: tokio::runtime::Builder::new_current_thread()
has_context_symbol, .enable_all()
is_path_symbol, .build()
is_cycle_symbol, .expect("failed to build tokio runtime"),
#[cfg(feature = "inspector")]
wait_for_inspector: inspector_options.wait,
symbols,
cached_fns,
_marker: PhantomData, _marker: PhantomData,
}) })
} }
#[cfg(feature = "inspector")]
pub(crate) fn inspector(&self) -> std::rc::Rc<deno_core::JsRuntimeInspector> {
self.js_runtime.inspector()
}
#[cfg(feature = "inspector")]
pub(crate) fn wait_for_inspector_disconnect(&mut self) {
let _ = self
.rt
.block_on(self.js_runtime.run_event_loop(PollEventLoopOptions {
wait_for_inspector: true,
..Default::default()
}));
}
pub(crate) fn eval(&mut self, script: String, ctx: &mut Ctx) -> Result<Value> { pub(crate) fn eval(&mut self, script: String, ctx: &mut Ctx) -> Result<Value> {
let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) }; let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) };
self.js_runtime.op_state().borrow_mut().put(ctx); self.js_runtime.op_state().borrow_mut().put(ctx);
#[cfg(feature = "inspector")]
if self.wait_for_inspector {
self.js_runtime
.inspector()
.wait_for_session_and_break_on_next_statement();
} else {
self.js_runtime.inspector().wait_for_session();
}
let global_value = self let global_value = self
.js_runtime .js_runtime
.execute_script("<eval>", script) .execute_script("<eval>", script)
@@ -183,34 +231,93 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
// Retrieve scope from JsRuntime // Retrieve scope from JsRuntime
deno_core::scope!(scope, self.js_runtime); deno_core::scope!(scope, self.js_runtime);
let local_value = v8::Local::new(scope, &global_value); let local_value = v8::Local::new(scope, &global_value);
let is_thunk_symbol = v8::Local::new(scope, &self.is_thunk_symbol); let symbols = &self.symbols.local(scope);
let primop_metadata_symbol = v8::Local::new(scope, &self.primop_metadata_symbol);
let has_context_symbol = v8::Local::new(scope, &self.has_context_symbol);
let is_path_symbol = v8::Local::new(scope, &self.is_path_symbol);
let is_cycle_symbol = v8::Local::new(scope, &self.is_cycle_symbol);
Ok(to_value( Ok(to_value(local_value, scope, symbols))
local_value,
scope,
is_thunk_symbol,
primop_metadata_symbol,
has_context_symbol,
is_path_symbol,
is_cycle_symbol,
))
} }
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT, IS_PATH, IS_CYCLE) pub(crate) fn eval_bytecode(
#[allow(clippy::type_complexity)] &mut self,
fn get_symbols( result: Bytecode,
scope: &ScopeRef, ctx: &mut Ctx,
) -> Result<( force_mode: ForceMode,
v8::Global<v8::Symbol>, ) -> Result<Value> {
v8::Global<v8::Symbol>, let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) };
v8::Global<v8::Symbol>, {
v8::Global<v8::Symbol>, deno_core::scope!(scope, self.js_runtime);
v8::Global<v8::Symbol>, sync_global_tables(scope, &self.cached_fns, ctx);
)> { }
let op_state = self.js_runtime.op_state();
op_state.borrow_mut().put(ctx);
#[cfg(feature = "inspector")]
if self.wait_for_inspector {
self.js_runtime
.inspector()
.wait_for_session_and_break_on_next_statement();
} else {
self.js_runtime.inspector().wait_for_session();
}
deno_core::scope!(scope, self.js_runtime);
let store = v8::ArrayBuffer::new_backing_store_from_boxed_slice(result.code);
let ab = v8::ArrayBuffer::with_backing_store(scope, &store.make_shared());
let u8a = v8::Uint8Array::new(scope, ab, 0, ab.byte_length())
.ok_or_else(|| Error::internal("failed to create Uint8Array".into()))?;
let dir = v8::String::new(scope, &result.current_dir)
.ok_or_else(|| Error::internal("failed to create dir string".into()))?;
let undef = v8::undefined(scope);
let tc = std::pin::pin!(v8::TryCatch::new(scope));
let scope = &mut tc.init();
let exec_bytecode = v8::Local::new(scope, &self.cached_fns.exec_bytecode);
let raw_result = exec_bytecode
.call(scope, undef.into(), &[u8a.into(), dir.into()])
.ok_or_else(|| {
scope
.exception()
.map(|e| {
let op_state_borrow = op_state.borrow();
let ctx: &Ctx = op_state_borrow.get_ctx();
Box::new(crate::error::parse_js_error(
deno_core::error::JsError::from_v8_exception(scope, e),
ctx,
))
})
.unwrap_or_else(|| Error::internal("bytecode execution failed".into()))
})?;
let force_fn = match force_mode {
ForceMode::Force => &self.cached_fns.force_fn,
ForceMode::ForceShallow => &self.cached_fns.force_shallow_fn,
ForceMode::ForceDeep => &self.cached_fns.force_deep_fn,
};
let force_fn = v8::Local::new(scope, force_fn);
let forced = force_fn
.call(scope, undef.into(), &[raw_result])
.ok_or_else(|| {
scope
.exception()
.map(|e| {
let op_state_borrow = op_state.borrow();
let ctx: &Ctx = op_state_borrow.get_ctx();
Box::new(crate::error::parse_js_error(
deno_core::error::JsError::from_v8_exception(scope, e),
ctx,
))
})
.unwrap_or_else(|| Error::internal("force failed".into()))
})?;
let symbols = &self.symbols.local(scope);
Ok(to_value(forced, scope, symbols))
}
fn get_symbols(scope: &ScopeRef) -> Result<GlobalSymbols> {
let global = scope.get_current_context().global(scope); let global = scope.get_current_context().global(scope);
let nix_key = v8::String::new(scope, "Nix") let nix_key = v8::String::new(scope, "Nix")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?; .ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
@@ -242,18 +349,148 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
let is_path = get_symbol("IS_PATH")?; let is_path = get_symbol("IS_PATH")?;
let is_cycle = get_symbol("IS_CYCLE")?; let is_cycle = get_symbol("IS_CYCLE")?;
Ok((is_thunk, primop_metadata, has_context, is_path, is_cycle)) Ok(GlobalSymbols {
is_thunk,
primop_metadata,
has_context,
is_path,
is_cycle,
})
}
fn get_cached_functions(scope: &ScopeRef) -> Result<CachedFunctions> {
let global = scope.get_current_context().global(scope);
let nix_key = v8::String::new(scope, "Nix")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let nix_obj = global
.get(scope, nix_key.into())
.ok_or_else(|| Error::internal("failed to get global Nix object".into()))?
.to_object(scope)
.ok_or_else(|| {
Error::internal("failed to convert global Nix Value to object".into())
})?;
let get_fn = |name: &str| -> Result<v8::Global<v8::Function>> {
let key = v8::String::new(scope, name)
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let val = nix_obj
.get(scope, key.into())
.ok_or_else(|| Error::internal(format!("failed to get Nix.{name}")))?;
let func = val
.try_cast::<v8::Function>()
.map_err(|err| Error::internal(format!("Nix.{name} is not a function ({err})")))?;
Ok(v8::Global::new(scope, func))
};
let exec_bytecode = get_fn("execBytecode")?;
let force_fn = get_fn("force")?;
let force_shallow_fn = get_fn("forceShallow")?;
let force_deep_fn = get_fn("forceDeep")?;
let strings_key = v8::String::new(scope, "strings")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let strings_array = nix_obj
.get(scope, strings_key.into())
.ok_or_else(|| Error::internal("failed to get Nix.strings".into()))?
.try_cast::<v8::Array>()
.map_err(|err| Error::internal(format!("Nix.strings is not an array ({err})")))?;
let constants_key = v8::String::new(scope, "constants")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let constants_array = nix_obj
.get(scope, constants_key.into())
.ok_or_else(|| Error::internal("failed to get Nix.constants".into()))?
.try_cast::<v8::Array>()
.map_err(|err| Error::internal(format!("Nix.constants is not an array ({err})")))?;
Ok(CachedFunctions {
exec_bytecode,
force_fn,
force_shallow_fn,
force_deep_fn,
strings_array: v8::Global::new(scope, strings_array),
constants_array: v8::Global::new(scope, constants_array),
})
}
}
struct GlobalSymbols {
is_thunk: v8::Global<v8::Symbol>,
primop_metadata: v8::Global<v8::Symbol>,
has_context: v8::Global<v8::Symbol>,
is_path: v8::Global<v8::Symbol>,
is_cycle: v8::Global<v8::Symbol>,
}
impl GlobalSymbols {
fn local<'a>(&self, scope: &ScopeRef<'a, '_>) -> LocalSymbols<'a> {
LocalSymbols {
is_thunk: v8::Local::new(scope, &self.is_thunk),
primop_metadata: v8::Local::new(scope, &self.primop_metadata),
has_context: v8::Local::new(scope, &self.has_context),
is_path: v8::Local::new(scope, &self.is_path),
is_cycle: v8::Local::new(scope, &self.is_cycle),
}
}
}
struct LocalSymbols<'a> {
is_thunk: v8::Local<'a, v8::Symbol>,
primop_metadata: v8::Local<'a, v8::Symbol>,
has_context: v8::Local<'a, v8::Symbol>,
is_path: v8::Local<'a, v8::Symbol>,
is_cycle: v8::Local<'a, v8::Symbol>,
}
struct CachedFunctions {
exec_bytecode: v8::Global<v8::Function>,
force_fn: v8::Global<v8::Function>,
force_shallow_fn: v8::Global<v8::Function>,
force_deep_fn: v8::Global<v8::Function>,
strings_array: v8::Global<v8::Array>,
constants_array: v8::Global<v8::Array>,
}
pub(crate) enum ForceMode {
Force,
ForceShallow,
ForceDeep,
}
fn sync_global_tables<Ctx: RuntimeContext>(
scope: &ScopeRef,
cached: &CachedFunctions,
ctx: &mut Ctx,
) {
let (new_strings, new_constants, strings_base, constants_base) = ctx.get_unsynced();
if !new_strings.is_empty() {
let s_array = v8::Local::new(scope, &cached.strings_array);
for (i, s) in new_strings.iter().enumerate() {
let idx = (strings_base + i) as u32;
#[allow(clippy::unwrap_used)]
let val = v8::String::new(scope, s).unwrap();
s_array.set_index(scope, idx, val.into());
}
}
if !new_constants.is_empty() {
let k_array = v8::Local::new(scope, &cached.constants_array);
for (i, c) in new_constants.iter().enumerate() {
let idx = (constants_base + i) as u32;
let val: v8::Local<v8::Value> = match c {
Constant::Int(n) => v8::BigInt::new_from_i64(scope, *n).into(),
Constant::Float(bits) => v8::Number::new(scope, f64::from_bits(*bits)).into(),
};
k_array.set_index(scope, idx, val);
}
} }
} }
fn to_value<'a>( fn to_value<'a>(
val: LocalValue<'a>, val: LocalValue<'a>,
scope: &ScopeRef<'a, '_>, scope: &ScopeRef<'a, '_>,
is_thunk_symbol: LocalSymbol<'a>, symbols: &LocalSymbols<'a>,
primop_metadata_symbol: LocalSymbol<'a>,
has_context_symbol: LocalSymbol<'a>,
is_path_symbol: LocalSymbol<'a>,
is_cycle_symbol: LocalSymbol<'a>,
) -> Value { ) -> Value {
match () { match () {
_ if val.is_big_int() => { _ if val.is_big_int() => {
@@ -283,40 +520,51 @@ fn to_value<'a>(
let list = (0..len) let list = (0..len)
.map(|i| { .map(|i| {
let val = val.get_index(scope, i).expect("infallible index operation"); let val = val.get_index(scope, i).expect("infallible index operation");
to_value( to_value(val, scope, symbols)
val,
scope,
is_thunk_symbol,
primop_metadata_symbol,
has_context_symbol,
is_path_symbol,
is_cycle_symbol,
)
}) })
.collect(); .collect();
Value::List(List::new(list)) Value::List(List::new(list))
} }
_ if val.is_function() => { _ if val.is_function() => {
if let Some(primop) = to_primop(val, scope, primop_metadata_symbol) { if let Some(primop) = to_primop(val, scope, symbols.primop_metadata) {
primop primop
} else { } else {
Value::Func Value::Func
} }
} }
_ if val.is_map() => {
let val = val.try_cast::<v8::Map>().expect("infallible conversion");
let size = val.size() as u32;
let array = val.as_array(scope);
let attrs = (0..size)
.map(|i| {
let key = array
.get_index(scope, i * 2)
.expect("infallible index operation");
let key = key.to_rust_string_lossy(scope);
let val = array
.get_index(scope, i * 2 + 1)
.expect("infallible index operation");
let val = to_value(val, scope, symbols);
(Symbol::new(Cow::Owned(key)), val)
})
.collect();
Value::AttrSet(AttrSet::new(attrs))
}
_ if val.is_object() => { _ if val.is_object() => {
if is_thunk(val, scope, is_thunk_symbol) { if is_thunk(val, scope, symbols.is_thunk) {
return Value::Thunk; return Value::Thunk;
} }
if is_cycle(val, scope, is_cycle_symbol) { if is_cycle(val, scope, symbols.is_cycle) {
return Value::Repeated; return Value::Repeated;
} }
if let Some(path_val) = extract_path(val, scope, is_path_symbol) { if let Some(path_val) = extract_path(val, scope, symbols.is_path) {
return Value::Path(path_val); return Value::Path(path_val);
} }
if let Some(string_val) = extract_string_with_context(val, scope, has_context_symbol) { if let Some(string_val) = extract_string_with_context(val, scope, symbols.has_context) {
return Value::String(string_val); return Value::String(string_val);
} }
@@ -332,18 +580,7 @@ fn to_value<'a>(
.expect("infallible index operation"); .expect("infallible index operation");
let val = val.get(scope, key).expect("infallible operation"); let val = val.get(scope, key).expect("infallible operation");
let key = key.to_rust_string_lossy(scope); let key = key.to_rust_string_lossy(scope);
( (Symbol::from(key), to_value(val, scope, symbols))
Symbol::from(key),
to_value(
val,
scope,
is_thunk_symbol,
primop_metadata_symbol,
has_context_symbol,
is_path_symbol,
is_cycle_symbol,
),
)
}) })
.collect(); .collect();
Value::AttrSet(AttrSet::new(attrs)) Value::AttrSet(AttrSet::new(attrs))

View File

@@ -0,0 +1,493 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Alias for the future `!` type.
use core::convert::Infallible as Never;
use std::cell::RefCell;
use std::net::SocketAddr;
use std::pin::pin;
use std::process;
use std::rc::Rc;
use std::task::Poll;
use std::thread;
use deno_core::InspectorMsg;
use deno_core::InspectorSessionChannels;
use deno_core::InspectorSessionKind;
use deno_core::InspectorSessionProxy;
use deno_core::JsRuntimeInspector;
use deno_core::anyhow::Context;
use deno_core::futures::channel::mpsc;
use deno_core::futures::channel::mpsc::UnboundedReceiver;
use deno_core::futures::channel::mpsc::UnboundedSender;
use deno_core::futures::channel::oneshot;
use deno_core::futures::prelude::*;
use deno_core::futures::stream::StreamExt;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::unsync::spawn;
use deno_core::url::Url;
use fastwebsockets::Frame;
use fastwebsockets::OpCode;
use fastwebsockets::WebSocket;
use hashbrown::HashMap;
use hyper::body::Bytes;
use hyper_util::rt::TokioIo;
use tokio::net::TcpListener;
use tokio::sync::broadcast;
use uuid::Uuid;
/// Websocket server that is used to proxy connections from
/// devtools to the inspector.
pub struct InspectorServer {
pub host: SocketAddr,
register_inspector_tx: UnboundedSender<InspectorInfo>,
shutdown_server_tx: Option<broadcast::Sender<()>>,
thread_handle: Option<thread::JoinHandle<()>>,
}
impl InspectorServer {
pub fn new(host: SocketAddr, name: &'static str) -> Result<Self, anyhow::Error> {
let (register_inspector_tx, register_inspector_rx) = mpsc::unbounded::<InspectorInfo>();
let (shutdown_server_tx, shutdown_server_rx) = broadcast::channel(1);
let tcp_listener = std::net::TcpListener::bind(host)
.with_context(|| format!("Failed to bind inspector server socket at {}", host))?;
tcp_listener.set_nonblocking(true)?;
let thread_handle = thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("failed to build tokio runtime");
let local = tokio::task::LocalSet::new();
local.block_on(
&rt,
server(
tcp_listener,
register_inspector_rx,
shutdown_server_rx,
name,
),
)
});
Ok(Self {
host,
register_inspector_tx,
shutdown_server_tx: Some(shutdown_server_tx),
thread_handle: Some(thread_handle),
})
}
pub fn register_inspector(
&self,
module_url: String,
inspector: Rc<JsRuntimeInspector>,
wait_for_session: bool,
) {
let session_sender = inspector.get_session_sender();
let deregister_rx = inspector.add_deregister_handler();
let info = InspectorInfo::new(
self.host,
session_sender,
deregister_rx,
module_url,
wait_for_session,
);
self.register_inspector_tx
.unbounded_send(info)
.expect("unreachable");
}
}
impl Drop for InspectorServer {
fn drop(&mut self) {
if let Some(shutdown_server_tx) = self.shutdown_server_tx.take() {
shutdown_server_tx
.send(())
.expect("unable to send shutdown signal");
}
if let Some(thread_handle) = self.thread_handle.take() {
thread_handle.join().expect("unable to join thread");
}
}
}
fn handle_ws_request(
req: http::Request<hyper::body::Incoming>,
inspector_map_rc: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
let (parts, body) = req.into_parts();
let req = http::Request::from_parts(parts, ());
let maybe_uuid = req
.uri()
.path()
.strip_prefix("/ws/")
.and_then(|s| Uuid::parse_str(s).ok());
let Some(uuid) = maybe_uuid else {
return http::Response::builder()
.status(http::StatusCode::BAD_REQUEST)
.body(Box::new(Bytes::from("Malformed inspector UUID").into()));
};
// run in a block to not hold borrow to `inspector_map` for too long
let new_session_tx = {
let inspector_map = inspector_map_rc.borrow();
let maybe_inspector_info = inspector_map.get(&uuid);
let Some(info) = maybe_inspector_info else {
return http::Response::builder()
.status(http::StatusCode::NOT_FOUND)
.body(Box::new(Bytes::from("Invalid inspector UUID").into()));
};
info.new_session_tx.clone()
};
let (parts, _) = req.into_parts();
let mut req = http::Request::from_parts(parts, body);
let Ok((resp, upgrade_fut)) = fastwebsockets::upgrade::upgrade(&mut req) else {
return http::Response::builder()
.status(http::StatusCode::BAD_REQUEST)
.body(Box::new(
Bytes::from("Not a valid Websocket Request").into(),
));
};
// spawn a task that will wait for websocket connection and then pump messages between
// the socket and inspector proxy
spawn(async move {
let websocket = match upgrade_fut.await {
Ok(w) => w,
Err(err) => {
eprintln!(
"Inspector server failed to upgrade to WS connection: {:?}",
err
);
return;
}
};
// The 'outbound' channel carries messages sent to the websocket.
let (outbound_tx, outbound_rx) = mpsc::unbounded();
// The 'inbound' channel carries messages received from the websocket.
let (inbound_tx, inbound_rx) = mpsc::unbounded();
let inspector_session_proxy = InspectorSessionProxy {
channels: InspectorSessionChannels::Regular {
tx: outbound_tx,
rx: inbound_rx,
},
kind: InspectorSessionKind::NonBlocking {
wait_for_disconnect: true,
},
};
eprintln!("Debugger session started.");
let _ = new_session_tx.unbounded_send(inspector_session_proxy);
pump_websocket_messages(websocket, inbound_tx, outbound_rx).await;
});
let (parts, _body) = resp.into_parts();
let resp = http::Response::from_parts(parts, Box::new(http_body_util::Full::new(Bytes::new())));
Ok(resp)
}
fn handle_json_request(
inspector_map: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
host: Option<String>,
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
let data = inspector_map
.borrow()
.values()
.map(move |info| info.get_json_metadata(&host))
.collect::<Vec<_>>();
let body: http_body_util::Full<Bytes> =
Bytes::from(serde_json::to_string(&data).expect("unreachable")).into();
http::Response::builder()
.status(http::StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.body(Box::new(body))
}
fn handle_json_version_request(
version_response: Value,
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
let body = Box::new(http_body_util::Full::from(
serde_json::to_string(&version_response).expect("unreachable"),
));
http::Response::builder()
.status(http::StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.body(body)
}
async fn server(
listener: std::net::TcpListener,
register_inspector_rx: UnboundedReceiver<InspectorInfo>,
shutdown_server_rx: broadcast::Receiver<()>,
name: &str,
) {
let inspector_map_ = Rc::new(RefCell::new(HashMap::<Uuid, InspectorInfo>::new()));
let inspector_map = Rc::clone(&inspector_map_);
let register_inspector_handler =
listen_for_new_inspectors(register_inspector_rx, inspector_map.clone()).boxed_local();
let inspector_map = Rc::clone(&inspector_map_);
let deregister_inspector_handler = future::poll_fn(|cx| {
inspector_map
.borrow_mut()
.retain(|_, info| info.deregister_rx.poll_unpin(cx) == Poll::Pending);
Poll::<Never>::Pending
})
.boxed_local();
let json_version_response = json!({
"Browser": name,
"Protocol-Version": "1.3",
"V8-Version": deno_core::v8::VERSION_STRING,
});
// Create the server manually so it can use the Local Executor
let listener = match TcpListener::from_std(listener) {
Ok(l) => l,
Err(err) => {
eprintln!("Cannot create async listener from std listener: {:?}", err);
return;
}
};
let server_handler = async move {
loop {
let mut rx = shutdown_server_rx.resubscribe();
let mut shutdown_rx = pin!(rx.recv());
let mut accept = pin!(listener.accept());
let stream = tokio::select! {
accept_result =
&mut accept => {
match accept_result {
Ok((s, _)) => s,
Err(err) => {
eprintln!("Failed to accept inspector connection: {:?}", err);
continue;
}
}
},
_ = &mut shutdown_rx => {
break;
}
};
let io = TokioIo::new(stream);
let inspector_map = Rc::clone(&inspector_map_);
let json_version_response = json_version_response.clone();
let mut shutdown_server_rx = shutdown_server_rx.resubscribe();
let service =
hyper::service::service_fn(move |req: http::Request<hyper::body::Incoming>| {
future::ready({
// If the host header can make a valid URL, use it
let host = req
.headers()
.get("host")
.and_then(|host| host.to_str().ok())
.and_then(|host| Url::parse(&format!("http://{host}")).ok())
.and_then(|url| match (url.host(), url.port()) {
(Some(host), Some(port)) => Some(format!("{host}:{port}")),
(Some(host), None) => Some(format!("{host}")),
_ => None,
});
match (req.method(), req.uri().path()) {
(&http::Method::GET, path) if path.starts_with("/ws/") => {
handle_ws_request(req, Rc::clone(&inspector_map))
}
(&http::Method::GET, "/json/version") => {
handle_json_version_request(json_version_response.clone())
}
(&http::Method::GET, "/json") => {
handle_json_request(Rc::clone(&inspector_map), host)
}
(&http::Method::GET, "/json/list") => {
handle_json_request(Rc::clone(&inspector_map), host)
}
_ => http::Response::builder()
.status(http::StatusCode::NOT_FOUND)
.body(Box::new(http_body_util::Full::new(Bytes::from(
"Not Found",
)))),
}
})
});
deno_core::unsync::spawn(async move {
let server = hyper::server::conn::http1::Builder::new();
let mut conn = pin!(server.serve_connection(io, service).with_upgrades());
let mut shutdown_rx = pin!(shutdown_server_rx.recv());
tokio::select! {
result = conn.as_mut() => {
if let Err(err) = result {
eprintln!("Failed to serve connection: {:?}", err);
}
},
_ = &mut shutdown_rx => {
conn.as_mut().graceful_shutdown();
let _ = conn.await;
}
}
});
}
}
.boxed_local();
tokio::select! {
_ = register_inspector_handler => {},
_ = deregister_inspector_handler => unreachable!(),
_ = server_handler => {},
}
}
async fn listen_for_new_inspectors(
mut register_inspector_rx: UnboundedReceiver<InspectorInfo>,
inspector_map: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
) {
while let Some(info) = register_inspector_rx.next().await {
eprintln!(
"Debugger listening on {}",
info.get_websocket_debugger_url(&info.host.to_string())
);
eprintln!("Visit chrome://inspect to connect to the debugger.");
if info.wait_for_session {
eprintln!("nix-js is waiting for debugger to connect.");
}
if inspector_map.borrow_mut().insert(info.uuid, info).is_some() {
panic!("Inspector UUID already in map");
}
}
}
/// The pump future takes care of forwarding messages between the websocket
/// and channels. It resolves when either side disconnects, ignoring any
/// errors.
///
/// The future proxies messages sent and received on a WebSocket
/// to a UnboundedSender/UnboundedReceiver pair. We need these "unbounded" channel ends to sidestep
/// Tokio's task budget, which causes issues when JsRuntimeInspector::poll_sessions()
/// needs to block the thread because JavaScript execution is paused.
///
/// This works because UnboundedSender/UnboundedReceiver are implemented in the
/// 'futures' crate, therefore they can't participate in Tokio's cooperative
/// task yielding.
async fn pump_websocket_messages(
mut websocket: WebSocket<TokioIo<hyper::upgrade::Upgraded>>,
inbound_tx: UnboundedSender<String>,
mut outbound_rx: UnboundedReceiver<InspectorMsg>,
) {
'pump: loop {
tokio::select! {
Some(msg) = outbound_rx.next() => {
let msg = Frame::text(msg.content.into_bytes().into());
let _ = websocket.write_frame(msg).await;
}
Ok(msg) = websocket.read_frame() => {
match msg.opcode {
OpCode::Text => {
if let Ok(s) = String::from_utf8(msg.payload.to_vec()) {
let _ = inbound_tx.unbounded_send(s);
}
}
OpCode::Close => {
// Users don't care if there was an error coming from debugger,
// just about the fact that debugger did disconnect.
eprintln!("Debugger session ended");
break 'pump;
}
_ => {
// Ignore other messages.
}
}
}
else => {
break 'pump;
}
}
}
}
/// Inspector information that is sent from the isolate thread to the server
/// thread when a new inspector is created.
pub struct InspectorInfo {
pub host: SocketAddr,
pub uuid: Uuid,
pub thread_name: Option<String>,
pub new_session_tx: UnboundedSender<InspectorSessionProxy>,
pub deregister_rx: oneshot::Receiver<()>,
pub url: String,
pub wait_for_session: bool,
}
impl InspectorInfo {
pub fn new(
host: SocketAddr,
new_session_tx: mpsc::UnboundedSender<InspectorSessionProxy>,
deregister_rx: oneshot::Receiver<()>,
url: String,
wait_for_session: bool,
) -> Self {
Self {
host,
uuid: Uuid::new_v4(),
thread_name: thread::current().name().map(|n| n.to_owned()),
new_session_tx,
deregister_rx,
url,
wait_for_session,
}
}
fn get_json_metadata(&self, host: &Option<String>) -> Value {
let host_listen = format!("{}", self.host);
let host = host.as_ref().unwrap_or(&host_listen);
json!({
"description": "nix-js",
"devtoolsFrontendUrl": self.get_frontend_url(host),
"faviconUrl": "https://deno.land/favicon.ico",
"id": self.uuid.to_string(),
"title": self.get_title(),
"type": "node",
"url": self.url.to_string(),
"webSocketDebuggerUrl": self.get_websocket_debugger_url(host),
})
}
pub fn get_websocket_debugger_url(&self, host: &str) -> String {
format!("ws://{}/ws/{}", host, &self.uuid)
}
fn get_frontend_url(&self, host: &str) -> String {
format!(
"devtools://devtools/bundled/js_app.html?ws={}/ws/{}&experiments=true&v8only=true",
host, &self.uuid
)
}
fn get_title(&self) -> String {
format!(
"nix-js{} [pid: {}]",
self.thread_name
.as_ref()
.map(|n| format!(" - {n}"))
.unwrap_or_default(),
process::id(),
)
}
}

File diff suppressed because it is too large Load Diff

500
nix-js/src/runtime/value.rs Normal file
View File

@@ -0,0 +1,500 @@
use std::fmt;
use std::marker::PhantomData;
use boxing::nan::raw::{RawBox, RawStore, RawTag, Value as RawValue};
use gc_arena::{Collect, Gc};
use hashbrown::HashTable;
// ---------------------------------------------------------------------------
// Tag layout
// ---------------------------------------------------------------------------
//
// Positive tags (sign=false) — inline data in 6 bytes:
// 1: SmallInt — i32
// 2: Bool — u8 (0 or 1)
// 3: Null — no payload
// 4: SmallString — SmallStringId (u32)
//
// Negative tags (sign=true) — GC heap pointer (48-bit address):
// 1: BigInt → Gc<'gc, i64>
// 2: String → Gc<'gc, NixString>
// 3: SmallAttrSet → Gc<'gc, SmallAttrSet<'gc>>
// 4: AttrSet → Gc<'gc, AttrSet<'gc>>
// 5: List → Gc<'gc, Box<[Value<'gc>]>>
//
// Floats are stored directly as f64 (no tag).
const TAG_SMI: (bool, u8) = (false, 1);
const TAG_BOOL: (bool, u8) = (false, 2);
const TAG_NULL: (bool, u8) = (false, 3);
const TAG_SMALL_STRING: (bool, u8) = (false, 4);
const TAG_BIG_INT: (bool, u8) = (true, 1);
const TAG_STRING: (bool, u8) = (true, 2);
const TAG_SMALL_ATTRS: (bool, u8) = (true, 3);
const TAG_ATTRS: (bool, u8) = (true, 4);
const TAG_LIST: (bool, u8) = (true, 5);
/// # Nix runtime value representation
///
/// NaN-boxed value fitting in 8 bytes. Morally equivalent to:
/// ```ignore
/// enum NixValue<'gc> {
/// Float(SingleNaNF64),
/// SmallInt(i32),
/// BigInt(Gc<'gc, i64>),
/// Bool(bool),
/// Null,
/// SmallString(SmallStringId),
/// String(Gc<'gc, NixString>),
/// SmallAttrSet(Gc<'gc, SmallAttrSet<'gc>>),
/// AttrSet(Gc<'gc, AttrSet<'gc>>),
/// List(Gc<'gc, Box<[Value<'gc>]>>),
/// }
/// ```
#[repr(transparent)]
pub(crate) struct Value<'gc> {
raw: RawBox,
_marker: PhantomData<Gc<'gc, ()>>,
}
impl<'gc> Clone for Value<'gc> {
#[inline]
fn clone(&self) -> Self {
Self {
raw: self.raw.clone(),
_marker: PhantomData,
}
}
}
unsafe impl<'gc> Collect for Value<'gc> {
fn trace(&self, cc: &gc_arena::Collection) {
let Some(tag) = self.raw.tag() else { return };
let (neg, val) = tag.neg_val();
if !neg {
return; // inline values hold no GC pointers
}
// Negative tags are heap pointers — reconstruct the Gc and trace it.
unsafe {
match val {
1 => self.load_gc::<i64>().trace(cc),
2 => self.load_gc::<NixString>().trace(cc),
3 => self.load_gc::<SmallAttrSet<'gc>>().trace(cc),
4 => self.load_gc::<AttrSet<'gc>>().trace(cc),
5 => self.load_gc::<Box<[Value<'gc>]>>().trace(cc),
_ => debug_assert!(false, "invalid negative tag value: {val}"),
}
}
}
fn needs_trace() -> bool
where
Self: Sized,
{
true
}
}
// ---------------------------------------------------------------------------
// Private helpers
// ---------------------------------------------------------------------------
impl<'gc> Value<'gc> {
#[inline(always)]
fn mk_tag(neg: bool, val: u8) -> RawTag {
debug_assert!((1..=7).contains(&val));
// Safety: val is asserted to be in 1..=7.
unsafe { RawTag::new_unchecked(neg, val) }
}
#[inline(always)]
fn from_raw_value(rv: RawValue) -> Self {
Self {
raw: RawBox::from_value(rv),
_marker: PhantomData,
}
}
/// Store a GC pointer with the given (negative) tag value.
#[inline(always)]
fn store_gc<T>(tag_val: u8, gc: Gc<'gc, T>) -> Self {
let ptr = Gc::as_ptr(gc);
Self::from_raw_value(RawValue::store(Self::mk_tag(true, tag_val), ptr))
}
/// Load a GC pointer from a value with a negative tag.
///
/// # Safety
///
/// The value must actually store a `Gc<'gc, T>` with the matching type.
#[inline(always)]
unsafe fn load_gc<T>(&self) -> Gc<'gc, T> {
unsafe {
let rv = self.raw.value().unwrap_unchecked();
let ptr: *const T = <*const T as RawStore>::from_val(rv);
Gc::from_ptr(ptr)
}
}
/// Returns the `(negative, val)` tag, or `None` for a float.
#[inline(always)]
fn tag_nv(&self) -> Option<(bool, u8)> {
self.raw.tag().map(|t| t.neg_val())
}
}
// ---------------------------------------------------------------------------
// Constructors
// ---------------------------------------------------------------------------
impl<'gc> Value<'gc> {
#[inline]
pub(crate) fn new_float(val: f64) -> Self {
Self {
raw: RawBox::from_float(val),
_marker: PhantomData,
}
}
#[inline]
pub(crate) fn new_smi(val: i32) -> Self {
Self::from_raw_value(RawValue::store(Self::mk_tag(TAG_SMI.0, TAG_SMI.1), val))
}
#[inline]
pub(crate) fn new_int(gc: Gc<'gc, i64>) -> Self {
Self::store_gc(TAG_BIG_INT.1, gc)
}
#[inline]
pub(crate) fn new_bool(val: bool) -> Self {
Self::from_raw_value(RawValue::store(
Self::mk_tag(TAG_BOOL.0, TAG_BOOL.1),
val,
))
}
#[inline]
pub(crate) fn new_null() -> Self {
Self::from_raw_value(RawValue::empty(Self::mk_tag(TAG_NULL.0, TAG_NULL.1)))
}
#[inline]
pub(crate) fn new_small_string(id: SmallStringId) -> Self {
Self::from_raw_value(RawValue::store(
Self::mk_tag(TAG_SMALL_STRING.0, TAG_SMALL_STRING.1),
id.0,
))
}
#[inline]
pub(crate) fn new_string(gc: Gc<'gc, NixString>) -> Self {
Self::store_gc(TAG_STRING.1, gc)
}
#[inline]
pub(crate) fn new_small_attrs(gc: Gc<'gc, SmallAttrSet<'gc>>) -> Self {
Self::store_gc(TAG_SMALL_ATTRS.1, gc)
}
#[inline]
pub(crate) fn new_attrs(gc: Gc<'gc, AttrSet<'gc>>) -> Self {
Self::store_gc(TAG_ATTRS.1, gc)
}
#[inline]
pub(crate) fn new_list(gc: Gc<'gc, Box<[Value<'gc>]>>) -> Self {
Self::store_gc(TAG_LIST.1, gc)
}
}
// ---------------------------------------------------------------------------
// Type checks
// ---------------------------------------------------------------------------
impl<'gc> Value<'gc> {
#[inline]
pub(crate) fn is_float(&self) -> bool {
self.raw.is_float()
}
#[inline]
pub(crate) fn is_smi(&self) -> bool {
self.tag_nv() == Some(TAG_SMI)
}
#[inline]
pub(crate) fn is_big_int(&self) -> bool {
self.tag_nv() == Some(TAG_BIG_INT)
}
/// True for float, small int, or big int.
#[inline]
pub(crate) fn is_number(&self) -> bool {
match self.tag_nv() {
None => true,
Some(TAG_SMI) | Some(TAG_BIG_INT) => true,
_ => false,
}
}
#[inline]
pub(crate) fn is_bool(&self) -> bool {
self.tag_nv() == Some(TAG_BOOL)
}
#[inline]
pub(crate) fn is_null(&self) -> bool {
self.tag_nv() == Some(TAG_NULL)
}
#[inline]
pub(crate) fn is_small_string(&self) -> bool {
self.tag_nv() == Some(TAG_SMALL_STRING)
}
#[inline]
pub(crate) fn is_heap_string(&self) -> bool {
self.tag_nv() == Some(TAG_STRING)
}
/// True for small string or heap string.
#[inline]
pub(crate) fn is_string(&self) -> bool {
matches!(self.tag_nv(), Some(TAG_SMALL_STRING | TAG_STRING))
}
#[inline]
pub(crate) fn is_small_attrs(&self) -> bool {
self.tag_nv() == Some(TAG_SMALL_ATTRS)
}
#[inline]
pub(crate) fn is_heap_attrs(&self) -> bool {
self.tag_nv() == Some(TAG_ATTRS)
}
/// True for small or heap attr set.
#[inline]
pub(crate) fn is_attrs(&self) -> bool {
matches!(self.tag_nv(), Some(TAG_SMALL_ATTRS | TAG_ATTRS))
}
#[inline]
pub(crate) fn is_list(&self) -> bool {
self.tag_nv() == Some(TAG_LIST)
}
}
// ---------------------------------------------------------------------------
// Accessors
// ---------------------------------------------------------------------------
impl<'gc> Value<'gc> {
#[inline]
pub(crate) fn as_float(&self) -> Option<f64> {
self.raw.float().copied()
}
#[inline]
pub(crate) fn as_smi(&self) -> Option<i32> {
if self.is_smi() {
Some(unsafe {
let rv = self.raw.value().unwrap_unchecked();
<i32 as RawStore>::from_val(rv)
})
} else {
None
}
}
#[inline]
pub(crate) fn as_big_int(&self) -> Option<Gc<'gc, i64>> {
if self.is_big_int() {
Some(unsafe { self.load_gc() })
} else {
None
}
}
/// Read the integer value as `i64` regardless of smi/big-int representation.
#[inline]
pub(crate) fn as_i64(&self) -> Option<i64> {
match self.tag_nv() {
Some(TAG_SMI) => Some(unsafe {
let rv = self.raw.value().unwrap_unchecked();
<i32 as RawStore>::from_val(rv) as i64
}),
Some(TAG_BIG_INT) => Some(unsafe { *self.load_gc::<i64>() }),
_ => None,
}
}
#[inline]
pub(crate) fn as_bool(&self) -> Option<bool> {
if self.is_bool() {
Some(unsafe {
let rv = self.raw.value().unwrap_unchecked();
<bool as RawStore>::from_val(rv)
})
} else {
None
}
}
#[inline]
pub(crate) fn as_small_string(&self) -> Option<SmallStringId> {
if self.is_small_string() {
Some(SmallStringId(unsafe {
let rv = self.raw.value().unwrap_unchecked();
<u32 as RawStore>::from_val(rv)
}))
} else {
None
}
}
#[inline]
pub(crate) fn as_heap_string(&self) -> Option<Gc<'gc, NixString>> {
if self.is_heap_string() {
Some(unsafe { self.load_gc() })
} else {
None
}
}
#[inline]
pub(crate) fn as_small_attr_set(&self) -> Option<Gc<'gc, SmallAttrSet<'gc>>> {
if self.is_small_attrs() {
Some(unsafe { self.load_gc() })
} else {
None
}
}
#[inline]
pub(crate) fn as_heap_attr_set(&self) -> Option<Gc<'gc, AttrSet<'gc>>> {
if self.is_heap_attrs() {
Some(unsafe { self.load_gc() })
} else {
None
}
}
#[inline]
pub(crate) fn as_list(&self) -> Option<Gc<'gc, Box<[Value<'gc>]>>> {
if self.is_list() {
Some(unsafe { self.load_gc() })
} else {
None
}
}
}
// ---------------------------------------------------------------------------
// Debug
// ---------------------------------------------------------------------------
impl fmt::Debug for Value<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.tag_nv() {
None => {
let v = self.raw.float().unwrap();
write!(f, "Float({v:?})")
}
Some(TAG_SMI) => write!(f, "SmallInt({:?})", self.as_smi().unwrap()),
Some(TAG_BOOL) => write!(f, "Bool({:?})", self.as_bool().unwrap()),
Some(TAG_NULL) => write!(f, "Null"),
Some(TAG_SMALL_STRING) => {
write!(f, "SmallString({:?})", self.as_small_string().unwrap())
}
Some(TAG_BIG_INT) => write!(f, "BigInt(Gc<..>)"),
Some(TAG_STRING) => write!(f, "String(Gc<..>)"),
Some(TAG_SMALL_ATTRS) => write!(f, "SmallAttrSet(Gc<..>)"),
Some(TAG_ATTRS) => write!(f, "AttrSet(Gc<..>)"),
Some(TAG_LIST) => write!(f, "List(Gc<..>)"),
Some((neg, val)) => write!(f, "Unknown(neg={neg}, val={val})"),
}
}
}
// ===========================================================================
// Supporting types
// ===========================================================================
// TODO: size?
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Collect)]
#[collect(require_static)]
pub(crate) struct SmallStringId(u32);
/// Heap-allocated Nix string.
///
/// Stored on the GC heap via `Gc<'gc, NixString>`. The string data itself
/// lives in a standard `Box<str>` owned by this struct; the GC only manages
/// the outer allocation.
#[derive(Collect)]
#[collect(require_static)]
pub(crate) struct NixString {
data: Box<str>,
// TODO: string context for derivation dependency tracking
}
impl NixString {
pub(crate) fn new(s: impl Into<Box<str>>) -> Self {
Self { data: s.into() }
}
pub(crate) fn as_str(&self) -> &str {
&self.data
}
}
impl fmt::Debug for NixString {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.data, f)
}
}
/// Fixed-size attribute set (up to 8 entries).
#[derive(Collect)]
#[collect(no_drop)]
pub(crate) struct SmallAttrSet<'gc> {
// TODO: proper key storage, length tracking, and lookup
inner: [Value<'gc>; 8],
}
/// Hash-table-backed attribute set.
pub(crate) struct AttrSet<'gc> {
inner: HashTable<AttrSetEntry<'gc>>,
}
unsafe impl<'gc> Collect for AttrSet<'gc> {
fn trace(&self, cc: &gc_arena::Collection) {
for entry in self.inner.iter() {
Collect::trace(&entry.key, cc);
Collect::trace(&entry.value, cc);
}
}
fn needs_trace() -> bool
where
Self: Sized,
{
true
}
}
#[derive(Collect)]
#[collect(no_drop)]
struct AttrSetEntry<'gc> {
key: AttrKey<'gc>,
value: Value<'gc>,
}
#[derive(Collect)]
#[collect(no_drop)]
pub(crate) enum AttrKey<'gc> {
Small(SmallStringId),
Large(Gc<'gc, str>),
}

View File

@@ -1,5 +1,3 @@
#![allow(dead_code)]
use crate::error::Result; use crate::error::Result;
mod config; mod config;
@@ -39,12 +37,4 @@ pub trait Store: Send + Sync {
content: &str, content: &str,
references: Vec<String>, references: Vec<String>,
) -> Result<String>; ) -> Result<String>;
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String>;
} }

View File

@@ -87,11 +87,12 @@ impl Store for DaemonStore {
recursive: bool, recursive: bool,
references: Vec<String>, references: Vec<String>,
) -> Result<String> { ) -> Result<String> {
use std::fs;
use nix_compat::nix_daemon::types::AddToStoreNarRequest; use nix_compat::nix_daemon::types::AddToStoreNarRequest;
use nix_compat::nixhash::{CAHash, NixHash}; use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::{StorePath, build_ca_path}; use nix_compat::store_path::{StorePath, build_ca_path};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::fs;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
let temp_file = NamedTempFile::new() let temp_file = NamedTempFile::new()
@@ -237,11 +238,12 @@ impl Store for DaemonStore {
content: &str, content: &str,
references: Vec<String>, references: Vec<String>,
) -> Result<String> { ) -> Result<String> {
use std::fs;
use nix_compat::nix_daemon::types::AddToStoreNarRequest; use nix_compat::nix_daemon::types::AddToStoreNarRequest;
use nix_compat::nixhash::CAHash; use nix_compat::nixhash::CAHash;
use nix_compat::store_path::{StorePath, build_text_path}; use nix_compat::store_path::{StorePath, build_text_path};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::fs;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
let temp_file = NamedTempFile::new() let temp_file = NamedTempFile::new()
@@ -304,51 +306,6 @@ impl Store for DaemonStore {
Ok(store_path_str) Ok(store_path_str)
} }
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String> {
use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::build_ca_path;
let nix_hash = match hash_algo {
"sha256" => {
let hash_bytes = hex::decode(hash)
.map_err(|e| Error::internal(format!("Invalid hash hex: {}", e)))?;
if hash_bytes.len() != 32 {
return Err(Error::internal(format!(
"Invalid sha256 hash length: expected 32, got {}",
hash_bytes.len()
)));
}
let mut arr = [0u8; 32];
arr.copy_from_slice(&hash_bytes);
NixHash::Sha256(arr)
}
_ => {
return Err(Error::internal(format!(
"Unsupported hash algorithm: {}",
hash_algo
)));
}
};
let ca_hash = if hash_mode == "r" {
CAHash::Nar(nix_hash)
} else {
CAHash::Flat(nix_hash)
};
let store_path: nix_compat::store_path::StorePath<String> =
build_ca_path(name, &ca_hash, Vec::<String>::new(), false)
.map_err(|e| Error::internal(format!("Failed to build store path: {}", e)))?;
Ok(store_path.to_absolute_path())
}
} }
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37); const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37);
@@ -585,6 +542,7 @@ impl NixDaemonClient {
} }
/// Query information about a store path /// Query information about a store path
#[allow(dead_code)]
pub async fn query_path_info(&mut self, path: &str) -> IoResult<Option<UnkeyedValidPathInfo>> { pub async fn query_path_info(&mut self, path: &str) -> IoResult<Option<UnkeyedValidPathInfo>> {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes()) let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?; .map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
@@ -658,6 +616,7 @@ impl NixDaemonClient {
} }
/// Query which paths are valid /// Query which paths are valid
#[allow(dead_code)]
pub async fn query_valid_paths(&mut self, paths: Vec<String>) -> IoResult<Vec<String>> { pub async fn query_valid_paths(&mut self, paths: Vec<String>) -> IoResult<Vec<String>> {
let store_paths: IoResult<Vec<StorePath<String>>> = paths let store_paths: IoResult<Vec<StorePath<String>>> = paths
.iter() .iter()
@@ -762,6 +721,7 @@ impl NixDaemonConnection {
} }
/// Query information about a store path /// Query information about a store path
#[allow(dead_code)]
pub async fn query_path_info(&self, path: &str) -> IoResult<Option<UnkeyedValidPathInfo>> { pub async fn query_path_info(&self, path: &str) -> IoResult<Option<UnkeyedValidPathInfo>> {
let mut client = self.client.lock().await; let mut client = self.client.lock().await;
client.query_path_info(path).await client.query_path_info(path).await
@@ -774,6 +734,7 @@ impl NixDaemonConnection {
} }
/// Query which paths are valid /// Query which paths are valid
#[allow(dead_code)]
pub async fn query_valid_paths(&self, paths: Vec<String>) -> IoResult<Vec<String>> { pub async fn query_valid_paths(&self, paths: Vec<String>) -> IoResult<Vec<String>> {
let mut client = self.client.lock().await; let mut client = self.client.lock().await;
client.query_valid_paths(paths).await client.query_valid_paths(paths).await

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use std::fmt; use std::fmt;
#[derive(Debug)] #[derive(Debug)]

View File

@@ -81,7 +81,7 @@ pub fn validate_store_path(store_dir: &str, path: &str) -> Result<()> {
mod tests { mod tests {
use super::*; use super::*;
#[test] #[test_log::test]
fn test_valid_store_paths() { fn test_valid_store_paths() {
let store_dir = "/nix/store"; let store_dir = "/nix/store";
let valid_paths = vec![ let valid_paths = vec![
@@ -100,7 +100,7 @@ mod tests {
} }
} }
#[test] #[test_log::test]
fn test_invalid_store_paths() { fn test_invalid_store_paths() {
let store_dir = "/nix/store"; let store_dir = "/nix/store";
let invalid_paths = vec![ let invalid_paths = vec![

View File

@@ -0,0 +1,209 @@
use std::collections::{BTreeMap, BTreeSet, VecDeque};
pub enum StringContextElem {
Opaque { path: String },
DrvDeep { drv_path: String },
Built { drv_path: String, output: String },
}
impl StringContextElem {
pub fn decode(encoded: &str) -> Self {
if let Some(drv_path) = encoded.strip_prefix('=') {
StringContextElem::DrvDeep {
drv_path: drv_path.to_string(),
}
} else if let Some(rest) = encoded.strip_prefix('!') {
if let Some(second_bang) = rest.find('!') {
let output = rest[..second_bang].to_string();
let drv_path = rest[second_bang + 1..].to_string();
StringContextElem::Built { drv_path, output }
} else {
StringContextElem::Opaque {
path: encoded.to_string(),
}
}
} else {
StringContextElem::Opaque {
path: encoded.to_string(),
}
}
}
}
pub type InputDrvs = BTreeMap<String, BTreeSet<String>>;
pub type Srcs = BTreeSet<String>;
pub fn extract_input_drvs_and_srcs(context: &[String]) -> Result<(InputDrvs, Srcs), String> {
let mut input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut input_srcs: BTreeSet<String> = BTreeSet::new();
for encoded in context {
match StringContextElem::decode(encoded) {
StringContextElem::Opaque { path } => {
input_srcs.insert(path);
}
StringContextElem::DrvDeep { drv_path } => {
compute_fs_closure(&drv_path, &mut input_drvs, &mut input_srcs)?;
}
StringContextElem::Built { drv_path, output } => {
input_drvs.entry(drv_path).or_default().insert(output);
}
}
}
Ok((input_drvs, input_srcs))
}
fn compute_fs_closure(
drv_path: &str,
input_drvs: &mut BTreeMap<String, BTreeSet<String>>,
input_srcs: &mut BTreeSet<String>,
) -> Result<(), String> {
let mut queue: VecDeque<String> = VecDeque::new();
let mut visited: BTreeSet<String> = BTreeSet::new();
queue.push_back(drv_path.to_string());
while let Some(current_path) = queue.pop_front() {
if visited.contains(&current_path) {
continue;
}
visited.insert(current_path.clone());
input_srcs.insert(current_path.clone());
if !current_path.ends_with(".drv") {
continue;
}
let content = std::fs::read_to_string(&current_path)
.map_err(|e| format!("failed to read derivation {}: {}", current_path, e))?;
let inputs = parse_derivation_inputs(&content)
.ok_or_else(|| format!("failed to parse derivation {}", current_path))?;
for src in inputs.input_srcs {
input_srcs.insert(src.clone());
if !visited.contains(&src) {
queue.push_back(src);
}
}
for (dep_drv, outputs) in inputs.input_drvs {
input_srcs.insert(dep_drv.clone());
let entry = input_drvs.entry(dep_drv.clone()).or_default();
for output in outputs {
entry.insert(output);
}
if !visited.contains(&dep_drv) {
queue.push_back(dep_drv);
}
}
}
Ok(())
}
struct DerivationInputs {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
let aterm = aterm.strip_prefix("Derive([")?;
let mut bracket_count: i32 = 1;
let mut pos = 0;
let bytes = aterm.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
_ => {}
}
pos += 1;
}
if bracket_count != 0 {
return None;
}
let rest = &aterm[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_drvs = Vec::new();
let mut bracket_count: i32 = 1;
let mut start = 0;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'(' if bracket_count == 1 => {
start = pos;
}
b')' if bracket_count == 1 => {
let entry = &rest[start + 1..pos];
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
input_drvs.push((drv_path, outputs));
}
}
_ => {}
}
pos += 1;
}
let rest = &rest[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_srcs = Vec::new();
bracket_count = 1;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'"' if bracket_count == 1 => {
pos += 1;
let src_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
pos += 2;
} else {
pos += 1;
}
}
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
input_srcs.push(src.to_string());
}
_ => {}
}
pos += 1;
}
Some(DerivationInputs {
input_drvs,
input_srcs,
})
}
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
let entry = entry.strip_prefix('"')?;
let quote_end = entry.find('"')?;
let drv_path = entry[..quote_end].to_string();
let rest = &entry[quote_end + 1..];
let rest = rest.strip_prefix(",[")?;
let rest = rest.strip_suffix(']')?;
let mut outputs = Vec::new();
for part in rest.split(',') {
let part = part.trim();
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
outputs.push(name.to_string());
}
}
Some((drv_path, outputs))
}

View File

@@ -1,7 +1,6 @@
use core::fmt::{Debug, Display, Formatter, Result as FmtResult}; use core::fmt::{Debug, Display, Formatter, Result as FmtResult};
use core::hash::Hash; use core::hash::Hash;
use core::ops::Deref; use core::ops::Deref;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::ops::DerefMut; use std::ops::DerefMut;

View File

@@ -1 +0,0 @@
[ { } { a = 1; } { a = 1; } { a = "a"; } { m = 1; } { m = "m"; } { n = 1; } { n = "n"; } { n = 1; p = 2; } { n = "n"; p = "p"; } { n = 1; p = 2; } { n = "n"; p = "p"; } { a = "a"; b = "b"; c = "c"; d = "d"; e = "e"; f = "f"; g = "g"; h = "h"; i = "i"; j = "j"; k = "k"; l = "l"; m = "m"; n = "n"; o = "o"; p = "p"; q = "q"; r = "r"; s = "s"; t = "t"; u = "u"; v = "v"; w = "w"; x = "x"; y = "y"; z = "z"; } true ]

View File

@@ -1 +0,0 @@
[ null <PRIMOP> <PRIMOP-APP> <LAMBDA> [ [ «repeated» ] ] ]

View File

@@ -1,36 +1,33 @@
mod utils;
use nix_js::value::Value; use nix_js::value::Value;
use utils::eval;
use crate::utils::eval_result; use crate::utils::{eval, eval_result};
#[test] #[test_log::test]
fn arithmetic() { fn arithmetic() {
assert_eq!(eval("1 + 1"), Value::Int(2)); assert_eq!(eval("1 + 1"), Value::Int(2));
} }
#[test] #[test_log::test]
fn simple_function_application() { fn simple_function_application() {
assert_eq!(eval("(x: x) 1"), Value::Int(1)); assert_eq!(eval("(x: x) 1"), Value::Int(1));
} }
#[test] #[test_log::test]
fn curried_function() { fn curried_function() {
assert_eq!(eval("(x: y: x - y) 2 1"), Value::Int(1)); assert_eq!(eval("(x: y: x - y) 2 1"), Value::Int(1));
} }
#[test] #[test_log::test]
fn rec_attrset() { fn rec_attrset() {
assert_eq!(eval("rec { b = a; a = 1; }.b"), Value::Int(1)); assert_eq!(eval("rec { b = a; a = 1; }.b"), Value::Int(1));
} }
#[test] #[test_log::test]
fn let_binding() { fn let_binding() {
assert_eq!(eval("let b = a; a = 1; in b"), Value::Int(1)); assert_eq!(eval("let b = a; a = 1; in b"), Value::Int(1));
} }
#[test] #[test_log::test]
fn fibonacci() { fn fibonacci() {
assert_eq!( assert_eq!(
eval( eval(
@@ -40,7 +37,7 @@ fn fibonacci() {
); );
} }
#[test] #[test_log::test]
fn fixed_point_combinator() { fn fixed_point_combinator() {
assert_eq!( assert_eq!(
eval("((f: let x = f x; in x)(self: { x = 1; y = self.x + 1; })).y"), eval("((f: let x = f x; in x)(self: { x = 1; y = self.x + 1; })).y"),
@@ -48,17 +45,17 @@ fn fixed_point_combinator() {
); );
} }
#[test] #[test_log::test]
fn conditional_true() { fn conditional_true() {
assert_eq!(eval("if true then 1 else 0"), Value::Int(1)); assert_eq!(eval("if true then 1 else 0"), Value::Int(1));
} }
#[test] #[test_log::test]
fn conditional_false() { fn conditional_false() {
assert_eq!(eval("if false then 1 else 0"), Value::Int(0)); assert_eq!(eval("if false then 1 else 0"), Value::Int(0));
} }
#[test] #[test_log::test]
fn nested_let() { fn nested_let() {
assert_eq!( assert_eq!(
eval("let x = 1; in let y = x + 1; z = y + 1; in z"), eval("let x = 1; in let y = x + 1; z = y + 1; in z"),
@@ -66,7 +63,7 @@ fn nested_let() {
); );
} }
#[test] #[test_log::test]
fn rec_inherit_fails() { fn rec_inherit_fails() {
assert!(eval_result("{ inherit x; }").is_err()); assert!(eval_result("{ inherit x; }").is_err());
} }

View File

@@ -1,33 +1,32 @@
mod utils;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use nix_js::value::{AttrSet, List, Value}; use nix_js::value::{AttrSet, List, Value};
use utils::eval;
#[test] use crate::utils::eval;
#[test_log::test]
fn builtins_accessible() { fn builtins_accessible() {
let result = eval("builtins"); let result = eval("builtins");
assert!(matches!(result, Value::AttrSet(_))); assert!(matches!(result, Value::AttrSet(_)));
} }
#[test] #[test_log::test]
fn builtins_self_reference() { fn builtins_self_reference() {
let result = eval("builtins.builtins"); let result = eval("builtins.builtins");
assert!(matches!(result, Value::AttrSet(_))); assert!(matches!(result, Value::AttrSet(_)));
} }
#[test] #[test_log::test]
fn builtins_add() { fn builtins_add() {
assert_eq!(eval("builtins.add 1 2"), Value::Int(3)); assert_eq!(eval("builtins.add 1 2"), Value::Int(3));
} }
#[test] #[test_log::test]
fn builtins_length() { fn builtins_length() {
assert_eq!(eval("builtins.length [1 2 3]"), Value::Int(3)); assert_eq!(eval("builtins.length [1 2 3]"), Value::Int(3));
} }
#[test] #[test_log::test]
fn builtins_map() { fn builtins_map() {
assert_eq!( assert_eq!(
eval("builtins.map (x: x * 2) [1 2 3]"), eval("builtins.map (x: x * 2) [1 2 3]"),
@@ -35,7 +34,7 @@ fn builtins_map() {
); );
} }
#[test] #[test_log::test]
fn builtins_filter() { fn builtins_filter() {
assert_eq!( assert_eq!(
eval("builtins.filter (x: x > 1) [1 2 3]"), eval("builtins.filter (x: x > 1) [1 2 3]"),
@@ -43,7 +42,7 @@ fn builtins_filter() {
); );
} }
#[test] #[test_log::test]
fn builtins_attrnames() { fn builtins_attrnames() {
let result = eval("builtins.attrNames { a = 1; b = 2; }"); let result = eval("builtins.attrNames { a = 1; b = 2; }");
assert!(matches!(result, Value::List(_))); assert!(matches!(result, Value::List(_)));
@@ -52,12 +51,12 @@ fn builtins_attrnames() {
} }
} }
#[test] #[test_log::test]
fn builtins_head() { fn builtins_head() {
assert_eq!(eval("builtins.head [1 2 3]"), Value::Int(1)); assert_eq!(eval("builtins.head [1 2 3]"), Value::Int(1));
} }
#[test] #[test_log::test]
fn builtins_tail() { fn builtins_tail() {
assert_eq!( assert_eq!(
eval("builtins.tail [1 2 3]"), eval("builtins.tail [1 2 3]"),
@@ -65,17 +64,17 @@ fn builtins_tail() {
); );
} }
#[test] #[test_log::test]
fn builtins_in_let() { fn builtins_in_let() {
assert_eq!(eval("let b = builtins; in b.add 5 3"), Value::Int(8)); assert_eq!(eval("let b = builtins; in b.add 5 3"), Value::Int(8));
} }
#[test] #[test_log::test]
fn builtins_in_with() { fn builtins_in_with() {
assert_eq!(eval("with builtins; add 10 20"), Value::Int(30)); assert_eq!(eval("with builtins; add 10 20"), Value::Int(30));
} }
#[test] #[test_log::test]
fn builtins_nested_calls() { fn builtins_nested_calls() {
assert_eq!( assert_eq!(
eval("builtins.add (builtins.mul 2 3) (builtins.sub 10 5)"), eval("builtins.add (builtins.mul 2 3) (builtins.sub 10 5)"),
@@ -83,32 +82,32 @@ fn builtins_nested_calls() {
); );
} }
#[test] #[test_log::test]
fn builtins_is_list() { fn builtins_is_list() {
assert_eq!(eval("builtins.isList [1 2 3]"), Value::Bool(true)); assert_eq!(eval("builtins.isList [1 2 3]"), Value::Bool(true));
} }
#[test] #[test_log::test]
fn builtins_is_attrs() { fn builtins_is_attrs() {
assert_eq!(eval("builtins.isAttrs { a = 1; }"), Value::Bool(true)); assert_eq!(eval("builtins.isAttrs { a = 1; }"), Value::Bool(true));
} }
#[test] #[test_log::test]
fn builtins_is_function() { fn builtins_is_function() {
assert_eq!(eval("builtins.isFunction (x: x)"), Value::Bool(true)); assert_eq!(eval("builtins.isFunction (x: x)"), Value::Bool(true));
} }
#[test] #[test_log::test]
fn builtins_is_null() { fn builtins_is_null() {
assert_eq!(eval("builtins.isNull null"), Value::Bool(true)); assert_eq!(eval("builtins.isNull null"), Value::Bool(true));
} }
#[test] #[test_log::test]
fn builtins_is_bool() { fn builtins_is_bool() {
assert_eq!(eval("builtins.isBool true"), Value::Bool(true)); assert_eq!(eval("builtins.isBool true"), Value::Bool(true));
} }
#[test] #[test_log::test]
fn builtins_shadowing() { fn builtins_shadowing() {
assert_eq!( assert_eq!(
eval("let builtins = { add = x: y: x - y; }; in builtins.add 5 3"), eval("let builtins = { add = x: y: x - y; }; in builtins.add 5 3"),
@@ -116,13 +115,13 @@ fn builtins_shadowing() {
); );
} }
#[test] #[test_log::test]
fn builtins_lazy_evaluation() { fn builtins_lazy_evaluation() {
let result = eval("builtins.builtins.builtins.add 1 1"); let result = eval("builtins.builtins.builtins.add 1 1");
assert_eq!(result, Value::Int(2)); assert_eq!(result, Value::Int(2));
} }
#[test] #[test_log::test]
fn builtins_foldl() { fn builtins_foldl() {
assert_eq!( assert_eq!(
eval("builtins.foldl' (acc: x: acc + x) 0 [1 2 3 4 5]"), eval("builtins.foldl' (acc: x: acc + x) 0 [1 2 3 4 5]"),
@@ -130,13 +129,13 @@ fn builtins_foldl() {
); );
} }
#[test] #[test_log::test]
fn builtins_elem() { fn builtins_elem() {
assert_eq!(eval("builtins.elem 2 [1 2 3]"), Value::Bool(true)); assert_eq!(eval("builtins.elem 2 [1 2 3]"), Value::Bool(true));
assert_eq!(eval("builtins.elem 5 [1 2 3]"), Value::Bool(false)); assert_eq!(eval("builtins.elem 5 [1 2 3]"), Value::Bool(false));
} }
#[test] #[test_log::test]
fn builtins_concat_lists() { fn builtins_concat_lists() {
assert_eq!( assert_eq!(
eval("builtins.concatLists [[1 2] [3 4] [5]]"), eval("builtins.concatLists [[1 2] [3 4] [5]]"),
@@ -150,7 +149,7 @@ fn builtins_concat_lists() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_basic() { fn builtins_compare_versions_basic() {
assert_eq!( assert_eq!(
eval("builtins.compareVersions \"1.0\" \"2.3\""), eval("builtins.compareVersions \"1.0\" \"2.3\""),
@@ -174,7 +173,7 @@ fn builtins_compare_versions_basic() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_components() { fn builtins_compare_versions_components() {
assert_eq!( assert_eq!(
eval("builtins.compareVersions \"2.3.1\" \"2.3\""), eval("builtins.compareVersions \"2.3.1\" \"2.3\""),
@@ -186,7 +185,7 @@ fn builtins_compare_versions_components() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_numeric_vs_alpha() { fn builtins_compare_versions_numeric_vs_alpha() {
// Numeric component comes before alpha component // Numeric component comes before alpha component
assert_eq!( assert_eq!(
@@ -199,7 +198,7 @@ fn builtins_compare_versions_numeric_vs_alpha() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_pre() { fn builtins_compare_versions_pre() {
// "pre" is special: comes before everything except another "pre" // "pre" is special: comes before everything except another "pre"
assert_eq!( assert_eq!(
@@ -220,7 +219,7 @@ fn builtins_compare_versions_pre() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_alpha() { fn builtins_compare_versions_alpha() {
// Alphabetic comparison // Alphabetic comparison
assert_eq!( assert_eq!(
@@ -233,7 +232,7 @@ fn builtins_compare_versions_alpha() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_symmetry() { fn builtins_compare_versions_symmetry() {
// Test symmetry: compareVersions(a, b) == -compareVersions(b, a) // Test symmetry: compareVersions(a, b) == -compareVersions(b, a)
assert_eq!( assert_eq!(
@@ -246,7 +245,7 @@ fn builtins_compare_versions_symmetry() {
); );
} }
#[test] #[test_log::test]
fn builtins_compare_versions_complex() { fn builtins_compare_versions_complex() {
// Complex version strings with multiple components // Complex version strings with multiple components
assert_eq!( assert_eq!(
@@ -263,7 +262,7 @@ fn builtins_compare_versions_complex() {
); );
} }
#[test] #[test_log::test]
fn builtins_generic_closure() { fn builtins_generic_closure() {
assert_eq!( assert_eq!(
eval( eval(
@@ -279,7 +278,7 @@ fn builtins_generic_closure() {
); );
} }
#[test] #[test_log::test]
fn builtins_function_args() { fn builtins_function_args() {
assert_eq!( assert_eq!(
eval("builtins.functionArgs (x: 1)"), eval("builtins.functionArgs (x: 1)"),
@@ -316,7 +315,7 @@ fn builtins_function_args() {
); );
} }
#[test] #[test_log::test]
fn builtins_parse_drv_name() { fn builtins_parse_drv_name() {
let result = eval(r#"builtins.parseDrvName "nix-js-0.1.0pre""#).unwrap_attr_set(); let result = eval(r#"builtins.parseDrvName "nix-js-0.1.0pre""#).unwrap_attr_set();
assert_eq!(result.get("name"), Some(&Value::String("nix-js".into()))); assert_eq!(result.get("name"), Some(&Value::String("nix-js".into())));

View File

@@ -1,9 +1,8 @@
mod utils;
use nix_js::value::Value; use nix_js::value::Value;
use utils::eval_result;
#[test] use crate::utils::eval_result;
#[test_log::test]
fn to_file_simple() { fn to_file_simple() {
let result = let result =
eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate"); eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate");
@@ -20,7 +19,7 @@ fn to_file_simple() {
} }
} }
#[test] #[test_log::test]
fn to_file_with_references() { fn to_file_with_references() {
let result = eval_result( let result = eval_result(
r#" r#"
@@ -43,7 +42,7 @@ fn to_file_with_references() {
} }
} }
#[test] #[test_log::test]
fn to_file_invalid_name_with_slash() { fn to_file_invalid_name_with_slash() {
let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#); let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#);
@@ -56,7 +55,7 @@ fn to_file_invalid_name_with_slash() {
); );
} }
#[test] #[test_log::test]
fn to_file_invalid_name_dot() { fn to_file_invalid_name_dot() {
let result = eval_result(r#"builtins.toFile "." "content""#); let result = eval_result(r#"builtins.toFile "." "content""#);
@@ -64,7 +63,7 @@ fn to_file_invalid_name_dot() {
assert!(result.unwrap_err().to_string().contains("invalid name")); assert!(result.unwrap_err().to_string().contains("invalid name"));
} }
#[test] #[test_log::test]
fn to_file_invalid_name_dotdot() { fn to_file_invalid_name_dotdot() {
let result = eval_result(r#"builtins.toFile ".." "content""#); let result = eval_result(r#"builtins.toFile ".." "content""#);
@@ -72,7 +71,7 @@ fn to_file_invalid_name_dotdot() {
assert!(result.unwrap_err().to_string().contains("invalid name")); assert!(result.unwrap_err().to_string().contains("invalid name"));
} }
#[test] #[test_log::test]
fn store_path_validation_not_in_store() { fn store_path_validation_not_in_store() {
let result = eval_result(r#"builtins.storePath "/tmp/foo""#); let result = eval_result(r#"builtins.storePath "/tmp/foo""#);
@@ -85,7 +84,7 @@ fn store_path_validation_not_in_store() {
); );
} }
#[test] #[test_log::test]
fn store_path_validation_malformed_hash() { fn store_path_validation_malformed_hash() {
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#) let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
.expect("Failed to create dummy file"); .expect("Failed to create dummy file");
@@ -113,7 +112,7 @@ fn store_path_validation_malformed_hash() {
); );
} }
#[test] #[test_log::test]
fn store_path_validation_missing_name() { fn store_path_validation_missing_name() {
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#) let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
.expect("Failed to create dummy file"); .expect("Failed to create dummy file");
@@ -141,7 +140,7 @@ fn store_path_validation_missing_name() {
); );
} }
#[test] #[test_log::test]
fn to_file_curried_application() { fn to_file_curried_application() {
let result = eval_result( let result = eval_result(
r#" r#"
@@ -163,7 +162,7 @@ fn to_file_curried_application() {
} }
} }
#[test] #[test_log::test]
fn to_file_number_conversion() { fn to_file_number_conversion() {
let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#) let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#)
.expect("Failed to evaluate"); .expect("Failed to evaluate");
@@ -177,7 +176,7 @@ fn to_file_number_conversion() {
} }
} }
#[test] #[test_log::test]
fn to_file_list_conversion() { fn to_file_list_conversion() {
let result = eval_result( let result = eval_result(
r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#, r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#,

View File

@@ -1,9 +1,8 @@
mod utils;
use nix_js::value::Value; use nix_js::value::Value;
use utils::{eval_deep, eval_deep_result};
#[test] use crate::utils::{eval_deep, eval_deep_result};
#[test_log::test]
fn add_operator_preserves_derivation_context() { fn add_operator_preserves_derivation_context() {
let result = eval_deep( let result = eval_deep(
r#" r#"
@@ -39,7 +38,7 @@ fn add_operator_preserves_derivation_context() {
assert_eq!(result, nix_result); assert_eq!(result, nix_result);
} }
#[test] #[test_log::test]
fn derivation_minimal() { fn derivation_minimal() {
let result = eval_deep( let result = eval_deep(
r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#, r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
@@ -77,7 +76,7 @@ fn derivation_minimal() {
} }
} }
#[test] #[test_log::test]
fn derivation_with_args() { fn derivation_with_args() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -99,7 +98,7 @@ fn derivation_with_args() {
} }
} }
#[test] #[test_log::test]
fn derivation_to_string() { fn derivation_to_string() {
let result = eval_deep( let result = eval_deep(
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#, r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
@@ -111,7 +110,7 @@ fn derivation_to_string() {
} }
} }
#[test] #[test_log::test]
fn derivation_missing_name() { fn derivation_missing_name() {
let result = let result =
eval_deep_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#); eval_deep_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
@@ -121,7 +120,7 @@ fn derivation_missing_name() {
assert!(err_msg.contains("missing required attribute 'name'")); assert!(err_msg.contains("missing required attribute 'name'"));
} }
#[test] #[test_log::test]
fn derivation_invalid_name_with_drv_suffix() { fn derivation_invalid_name_with_drv_suffix() {
let result = eval_deep_result( let result = eval_deep_result(
r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#, r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
@@ -132,7 +131,7 @@ fn derivation_invalid_name_with_drv_suffix() {
assert!(err_msg.contains("cannot end with .drv")); assert!(err_msg.contains("cannot end with .drv"));
} }
#[test] #[test_log::test]
fn derivation_missing_builder() { fn derivation_missing_builder() {
let result = eval_deep_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#); let result = eval_deep_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
@@ -141,7 +140,7 @@ fn derivation_missing_builder() {
assert!(err_msg.contains("missing required attribute 'builder'")); assert!(err_msg.contains("missing required attribute 'builder'"));
} }
#[test] #[test_log::test]
fn derivation_missing_system() { fn derivation_missing_system() {
let result = eval_deep_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#); let result = eval_deep_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
@@ -150,7 +149,7 @@ fn derivation_missing_system() {
assert!(err_msg.contains("missing required attribute 'system'")); assert!(err_msg.contains("missing required attribute 'system'"));
} }
#[test] #[test_log::test]
fn derivation_with_env_vars() { fn derivation_with_env_vars() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -171,7 +170,7 @@ fn derivation_with_env_vars() {
} }
} }
#[test] #[test_log::test]
fn derivation_strict() { fn derivation_strict() {
let result = eval_deep( let result = eval_deep(
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#, r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
@@ -188,7 +187,7 @@ fn derivation_strict() {
} }
} }
#[test] #[test_log::test]
fn derivation_deterministic_paths() { fn derivation_deterministic_paths() {
let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#; let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#;
@@ -204,7 +203,7 @@ fn derivation_deterministic_paths() {
} }
} }
#[test] #[test_log::test]
fn derivation_escaping_in_aterm() { fn derivation_escaping_in_aterm() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -224,7 +223,7 @@ fn derivation_escaping_in_aterm() {
} }
} }
#[test] #[test_log::test]
fn multi_output_two_outputs() { fn multi_output_two_outputs() {
let drv = eval_deep( let drv = eval_deep(
r#"derivation { r#"derivation {
@@ -267,7 +266,7 @@ fn multi_output_two_outputs() {
} }
} }
#[test] #[test_log::test]
fn multi_output_three_outputs() { fn multi_output_three_outputs() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -315,7 +314,7 @@ fn multi_output_three_outputs() {
} }
} }
#[test] #[test_log::test]
fn multi_output_backward_compat() { fn multi_output_backward_compat() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -341,7 +340,7 @@ fn multi_output_backward_compat() {
} }
} }
#[test] #[test_log::test]
fn multi_output_deterministic() { fn multi_output_deterministic() {
let result1 = eval_deep( let result1 = eval_deep(
r#"derivation { r#"derivation {
@@ -364,7 +363,7 @@ fn multi_output_deterministic() {
assert_eq!(result1, result2); assert_eq!(result1, result2);
} }
#[test] #[test_log::test]
fn fixed_output_sha256_flat() { fn fixed_output_sha256_flat() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -401,7 +400,7 @@ fn fixed_output_sha256_flat() {
} }
} }
#[test] #[test_log::test]
fn fixed_output_missing_hashalgo() { fn fixed_output_missing_hashalgo() {
assert!( assert!(
eval_deep_result( eval_deep_result(
@@ -416,7 +415,7 @@ fn fixed_output_missing_hashalgo() {
); );
} }
#[test] #[test_log::test]
fn fixed_output_recursive_mode() { fn fixed_output_recursive_mode() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -446,7 +445,7 @@ fn fixed_output_recursive_mode() {
} }
} }
#[test] #[test_log::test]
fn fixed_output_rejects_multi_output() { fn fixed_output_rejects_multi_output() {
let result = eval_deep_result( let result = eval_deep_result(
r#"derivation { r#"derivation {
@@ -464,7 +463,7 @@ fn fixed_output_rejects_multi_output() {
assert!(err_msg.contains("fixed-output") && err_msg.contains("one")); assert!(err_msg.contains("fixed-output") && err_msg.contains("one"));
} }
#[test] #[test_log::test]
fn fixed_output_invalid_hash_mode() { fn fixed_output_invalid_hash_mode() {
let result = eval_deep_result( let result = eval_deep_result(
r#"derivation { r#"derivation {
@@ -481,7 +480,7 @@ fn fixed_output_invalid_hash_mode() {
assert!(err_msg.contains("outputHashMode") && err_msg.contains("invalid")); assert!(err_msg.contains("outputHashMode") && err_msg.contains("invalid"));
} }
#[test] #[test_log::test]
fn structured_attrs_basic() { fn structured_attrs_basic() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -506,7 +505,7 @@ fn structured_attrs_basic() {
} }
} }
#[test] #[test_log::test]
fn structured_attrs_nested() { fn structured_attrs_nested() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -527,7 +526,7 @@ fn structured_attrs_nested() {
} }
} }
#[test] #[test_log::test]
fn structured_attrs_rejects_functions() { fn structured_attrs_rejects_functions() {
let result = eval_deep_result( let result = eval_deep_result(
r#"derivation { r#"derivation {
@@ -544,7 +543,7 @@ fn structured_attrs_rejects_functions() {
assert!(err_msg.contains("cannot convert lambda to JSON")); assert!(err_msg.contains("cannot convert lambda to JSON"));
} }
#[test] #[test_log::test]
fn structured_attrs_false() { fn structured_attrs_false() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -567,7 +566,7 @@ fn structured_attrs_false() {
} }
} }
#[test] #[test_log::test]
fn ignore_nulls_true() { fn ignore_nulls_true() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -589,7 +588,7 @@ fn ignore_nulls_true() {
} }
} }
#[test] #[test_log::test]
fn ignore_nulls_false() { fn ignore_nulls_false() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -612,7 +611,7 @@ fn ignore_nulls_false() {
} }
} }
#[test] #[test_log::test]
fn ignore_nulls_with_structured_attrs() { fn ignore_nulls_with_structured_attrs() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -636,7 +635,7 @@ fn ignore_nulls_with_structured_attrs() {
} }
} }
#[test] #[test_log::test]
fn all_features_combined() { fn all_features_combined() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {
@@ -663,7 +662,7 @@ fn all_features_combined() {
} }
} }
#[test] #[test_log::test]
fn fixed_output_with_structured_attrs() { fn fixed_output_with_structured_attrs() {
let result = eval_deep( let result = eval_deep(
r#"derivation { r#"derivation {

View File

@@ -1,8 +1,6 @@
mod utils; use crate::utils::eval;
use utils::eval; #[test_log::test]
#[test]
fn test_find_file_corepkg_fetchurl() { fn test_find_file_corepkg_fetchurl() {
let result = eval( let result = eval(
r#" r#"
@@ -17,13 +15,13 @@ fn test_find_file_corepkg_fetchurl() {
assert!(result.to_string().contains("fetchurl.nix")); assert!(result.to_string().contains("fetchurl.nix"));
} }
#[test] #[test_log::test]
fn test_lookup_path_syntax() { fn test_lookup_path_syntax() {
let result = eval(r#"<nix/fetchurl.nix>"#); let result = eval(r#"<nix/fetchurl.nix>"#);
assert!(result.to_string().contains("fetchurl.nix")); assert!(result.to_string().contains("fetchurl.nix"));
} }
#[test] #[test_log::test]
fn test_import_corepkg() { fn test_import_corepkg() {
let result = eval( let result = eval(
r#" r#"

View File

@@ -1,24 +1,23 @@
mod utils;
use nix_js::value::{List, Value}; use nix_js::value::{List, Value};
use utils::eval;
#[test] use crate::utils::{eval, eval_result};
#[test_log::test]
fn true_literal() { fn true_literal() {
assert_eq!(eval("true"), Value::Bool(true)); assert_eq!(eval("true"), Value::Bool(true));
} }
#[test] #[test_log::test]
fn false_literal() { fn false_literal() {
assert_eq!(eval("false"), Value::Bool(false)); assert_eq!(eval("false"), Value::Bool(false));
} }
#[test] #[test_log::test]
fn null_literal() { fn null_literal() {
assert_eq!(eval("null"), Value::Null); assert_eq!(eval("null"), Value::Null);
} }
#[test] #[test_log::test]
fn map_function() { fn map_function() {
assert_eq!( assert_eq!(
eval("map (x: x * 2) [1 2 3]"), eval("map (x: x * 2) [1 2 3]"),
@@ -26,23 +25,23 @@ fn map_function() {
); );
} }
#[test] #[test_log::test]
fn is_null_function() { fn is_null_function() {
assert_eq!(eval("isNull null"), Value::Bool(true)); assert_eq!(eval("isNull null"), Value::Bool(true));
assert_eq!(eval("isNull 5"), Value::Bool(false)); assert_eq!(eval("isNull 5"), Value::Bool(false));
} }
#[test] #[test_log::test]
fn shadow_true() { fn shadow_true() {
assert_eq!(eval("let true = false; in true"), Value::Bool(false)); assert_eq!(eval("let true = false; in true"), Value::Bool(false));
} }
#[test] #[test_log::test]
fn shadow_map() { fn shadow_map() {
assert_eq!(eval("let map = x: y: x; in map 1 2"), Value::Int(1)); assert_eq!(eval("let map = x: y: x; in map 1 2"), Value::Int(1));
} }
#[test] #[test_log::test]
fn mixed_usage() { fn mixed_usage() {
assert_eq!( assert_eq!(
eval("if true then map (x: x + 1) [1 2] else []"), eval("if true then map (x: x + 1) [1 2] else []"),
@@ -50,7 +49,7 @@ fn mixed_usage() {
); );
} }
#[test] #[test_log::test]
fn in_let_bindings() { fn in_let_bindings() {
assert_eq!( assert_eq!(
eval("let x = true; y = false; in x && y"), eval("let x = true; y = false; in x && y"),
@@ -58,18 +57,18 @@ fn in_let_bindings() {
); );
} }
#[test] #[test_log::test]
fn shadow_in_function() { fn shadow_in_function() {
assert_eq!(eval("(true: true) false"), Value::Bool(false)); assert_eq!(eval("(true: true) false"), Value::Bool(false));
} }
#[test] #[test_log::test]
fn throw_function() { fn throw_function() {
let result = utils::eval_result("throw \"error message\""); let result = eval_result("throw \"error message\"");
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test_log::test]
fn to_string_function() { fn to_string_function() {
assert_eq!(eval("toString 42"), Value::String("42".to_string())); assert_eq!(eval("toString 42"), Value::String("42".to_string()));
} }

View File

@@ -1,20 +1,19 @@
mod utils;
use nix_js::value::Value; use nix_js::value::Value;
use utils::{eval, eval_result};
#[test] use crate::utils::{eval, eval_result};
#[test_log::test]
fn required_parameters() { fn required_parameters() {
assert_eq!(eval("({ a, b }: a + b) { a = 1; b = 2; }"), Value::Int(3)); assert_eq!(eval("({ a, b }: a + b) { a = 1; b = 2; }"), Value::Int(3));
} }
#[test] #[test_log::test]
fn missing_required_parameter() { fn missing_required_parameter() {
let result = eval_result("({ a, b }: a + b) { a = 1; }"); let result = eval_result("({ a, b }: a + b) { a = 1; }");
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test_log::test]
fn all_required_parameters_present() { fn all_required_parameters_present() {
assert_eq!( assert_eq!(
eval("({ x, y, z }: x + y + z) { x = 1; y = 2; z = 3; }"), eval("({ x, y, z }: x + y + z) { x = 1; y = 2; z = 3; }"),
@@ -22,13 +21,13 @@ fn all_required_parameters_present() {
); );
} }
#[test] #[test_log::test]
fn reject_unexpected_arguments() { fn reject_unexpected_arguments() {
let result = eval_result("({ a, b }: a + b) { a = 1; b = 2; c = 3; }"); let result = eval_result("({ a, b }: a + b) { a = 1; b = 2; c = 3; }");
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test_log::test]
fn ellipsis_accepts_extra_arguments() { fn ellipsis_accepts_extra_arguments() {
assert_eq!( assert_eq!(
eval("({ a, b, ... }: a + b) { a = 1; b = 2; c = 3; }"), eval("({ a, b, ... }: a + b) { a = 1; b = 2; c = 3; }"),
@@ -36,12 +35,12 @@ fn ellipsis_accepts_extra_arguments() {
); );
} }
#[test] #[test_log::test]
fn default_parameters() { fn default_parameters() {
assert_eq!(eval("({ a, b ? 5 }: a + b) { a = 1; }"), Value::Int(6)); assert_eq!(eval("({ a, b ? 5 }: a + b) { a = 1; }"), Value::Int(6));
} }
#[test] #[test_log::test]
fn override_default_parameter() { fn override_default_parameter() {
assert_eq!( assert_eq!(
eval("({ a, b ? 5 }: a + b) { a = 1; b = 10; }"), eval("({ a, b ? 5 }: a + b) { a = 1; b = 10; }"),
@@ -49,7 +48,7 @@ fn override_default_parameter() {
); );
} }
#[test] #[test_log::test]
fn at_pattern_alias() { fn at_pattern_alias() {
assert_eq!( assert_eq!(
eval("(args@{ a, b }: args.a + args.b) { a = 1; b = 2; }"), eval("(args@{ a, b }: args.a + args.b) { a = 1; b = 2; }"),
@@ -57,17 +56,17 @@ fn at_pattern_alias() {
); );
} }
#[test] #[test_log::test]
fn simple_parameter_no_validation() { fn simple_parameter_no_validation() {
assert_eq!(eval("(x: x.a + x.b) { a = 1; b = 2; }"), Value::Int(3)); assert_eq!(eval("(x: x.a + x.b) { a = 1; b = 2; }"), Value::Int(3));
} }
#[test] #[test_log::test]
fn simple_parameter_accepts_any_argument() { fn simple_parameter_accepts_any_argument() {
assert_eq!(eval("(x: x) 42"), Value::Int(42)); assert_eq!(eval("(x: x) 42"), Value::Int(42));
} }
#[test] #[test_log::test]
fn nested_function_parameters() { fn nested_function_parameters() {
assert_eq!( assert_eq!(
eval("({ a }: { b }: a + b) { a = 5; } { b = 3; }"), eval("({ a }: { b }: a + b) { a = 5; } { b = 3; }"),
@@ -75,12 +74,12 @@ fn nested_function_parameters() {
); );
} }
#[test] #[test_log::test]
fn pattern_param_simple_reference_in_default() { fn pattern_param_simple_reference_in_default() {
assert_eq!(eval("({ a, b ? a }: b) { a = 10; }"), Value::Int(10)); assert_eq!(eval("({ a, b ? a }: b) { a = 10; }"), Value::Int(10));
} }
#[test] #[test_log::test]
fn pattern_param_multiple_references_in_default() { fn pattern_param_multiple_references_in_default() {
assert_eq!( assert_eq!(
eval("({ a, b ? a + 5, c ? 1 }: b + c) { a = 10; }"), eval("({ a, b ? a + 5, c ? 1 }: b + c) { a = 10; }"),
@@ -88,7 +87,7 @@ fn pattern_param_multiple_references_in_default() {
); );
} }
#[test] #[test_log::test]
fn pattern_param_mutual_reference() { fn pattern_param_mutual_reference() {
assert_eq!( assert_eq!(
eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; }"), eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; }"),
@@ -96,7 +95,7 @@ fn pattern_param_mutual_reference() {
); );
} }
#[test] #[test_log::test]
fn pattern_param_override_mutual_reference() { fn pattern_param_override_mutual_reference() {
assert_eq!( assert_eq!(
eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; c = 10; }"), eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; c = 10; }"),
@@ -104,7 +103,7 @@ fn pattern_param_override_mutual_reference() {
); );
} }
#[test] #[test_log::test]
fn pattern_param_reference_list() { fn pattern_param_reference_list() {
assert_eq!( assert_eq!(
eval("({ a, b ? [ a 2 ] }: builtins.elemAt b 0) { a = 42; }"), eval("({ a, b ? [ a 2 ] }: builtins.elemAt b 0) { a = 42; }"),
@@ -112,7 +111,7 @@ fn pattern_param_reference_list() {
); );
} }
#[test] #[test_log::test]
fn pattern_param_alias_in_default() { fn pattern_param_alias_in_default() {
assert_eq!( assert_eq!(
eval("(args@{ a, b ? args.a + 10 }: b) { a = 5; }"), eval("(args@{ a, b ? args.a + 10 }: b) { a = 5; }"),

View File

@@ -1,12 +1,10 @@
mod utils;
use nix_js::context::Context; use nix_js::context::Context;
use nix_js::error::Source; use nix_js::error::Source;
use nix_js::value::Value; use nix_js::value::Value;
use crate::utils::{eval, eval_result}; use crate::utils::{eval, eval_result};
#[test] #[test_log::test]
fn import_absolute_path() { fn import_absolute_path() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let lib_path = temp_dir.path().join("nix_test_lib.nix"); let lib_path = temp_dir.path().join("nix_test_lib.nix");
@@ -17,7 +15,7 @@ fn import_absolute_path() {
assert_eq!(eval(&expr), Value::Int(8)); assert_eq!(eval(&expr), Value::Int(8));
} }
#[test] #[test_log::test]
fn import_nested() { fn import_nested() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
@@ -35,7 +33,7 @@ fn import_nested() {
assert_eq!(eval(&expr), Value::Int(30)); assert_eq!(eval(&expr), Value::Int(30));
} }
#[test] #[test_log::test]
fn import_relative_path() { fn import_relative_path() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let subdir = temp_dir.path().join("subdir"); let subdir = temp_dir.path().join("subdir");
@@ -66,7 +64,7 @@ fn import_relative_path() {
assert_eq!(eval(&expr), Value::Int(7)); assert_eq!(eval(&expr), Value::Int(7));
} }
#[test] #[test_log::test]
fn import_returns_function() { fn import_returns_function() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let func_path = temp_dir.path().join("nix_test_func.nix"); let func_path = temp_dir.path().join("nix_test_func.nix");
@@ -76,7 +74,7 @@ fn import_returns_function() {
assert_eq!(eval(&expr), Value::Int(10)); assert_eq!(eval(&expr), Value::Int(10));
} }
#[test] #[test_log::test]
fn import_with_complex_dependency_graph() { fn import_with_complex_dependency_graph() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
@@ -97,7 +95,7 @@ fn import_with_complex_dependency_graph() {
// Tests for builtins.path // Tests for builtins.path
#[test] #[test_log::test]
fn path_with_file() { fn path_with_file() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
@@ -116,7 +114,7 @@ fn path_with_file() {
} }
} }
#[test] #[test_log::test]
fn path_with_custom_name() { fn path_with_custom_name() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("original.txt"); let test_file = temp_dir.path().join("original.txt");
@@ -136,7 +134,7 @@ fn path_with_custom_name() {
} }
} }
#[test] #[test_log::test]
fn path_with_directory_recursive() { fn path_with_directory_recursive() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
@@ -159,7 +157,7 @@ fn path_with_directory_recursive() {
} }
} }
#[test] #[test_log::test]
fn path_flat_with_file() { fn path_flat_with_file() {
let mut ctx = Context::new().unwrap(); let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
@@ -179,7 +177,7 @@ fn path_flat_with_file() {
} }
} }
#[test] #[test_log::test]
fn path_flat_with_directory_fails() { fn path_flat_with_directory_fails() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("mydir"); let test_dir = temp_dir.path().join("mydir");
@@ -196,7 +194,7 @@ fn path_flat_with_directory_fails() {
assert!(err_msg.contains("recursive") || err_msg.contains("regular file")); assert!(err_msg.contains("recursive") || err_msg.contains("regular file"));
} }
#[test] #[test_log::test]
fn path_nonexistent_fails() { fn path_nonexistent_fails() {
let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#; let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#;
let result = eval_result(expr); let result = eval_result(expr);
@@ -206,7 +204,7 @@ fn path_nonexistent_fails() {
assert!(err_msg.contains("does not exist")); assert!(err_msg.contains("does not exist"));
} }
#[test] #[test_log::test]
fn path_missing_path_param() { fn path_missing_path_param() {
let expr = r#"builtins.path { name = "test"; }"#; let expr = r#"builtins.path { name = "test"; }"#;
let result = eval_result(expr); let result = eval_result(expr);
@@ -216,7 +214,7 @@ fn path_missing_path_param() {
assert!(err_msg.contains("path") && err_msg.contains("required")); assert!(err_msg.contains("path") && err_msg.contains("required"));
} }
#[test] #[test_log::test]
fn path_with_sha256() { fn path_with_sha256() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("hash_test.txt"); let test_file = temp_dir.path().join("hash_test.txt");
@@ -243,7 +241,7 @@ fn path_with_sha256() {
assert_eq!(store_path1, store_path2); assert_eq!(store_path1, store_path2);
} }
#[test] #[test_log::test]
fn path_deterministic() { fn path_deterministic() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("deterministic.txt"); let test_file = temp_dir.path().join("deterministic.txt");
@@ -261,7 +259,7 @@ fn path_deterministic() {
assert_eq!(result1, result2); assert_eq!(result1, result2);
} }
#[test] #[test_log::test]
fn read_file_type_regular_file() { fn read_file_type_regular_file() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt"); let test_file = temp_dir.path().join("test.txt");
@@ -271,7 +269,7 @@ fn read_file_type_regular_file() {
assert_eq!(eval(&expr), Value::String("regular".to_string())); assert_eq!(eval(&expr), Value::String("regular".to_string()));
} }
#[test] #[test_log::test]
fn read_file_type_directory() { fn read_file_type_directory() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("testdir"); let test_dir = temp_dir.path().join("testdir");
@@ -281,7 +279,7 @@ fn read_file_type_directory() {
assert_eq!(eval(&expr), Value::String("directory".to_string())); assert_eq!(eval(&expr), Value::String("directory".to_string()));
} }
#[test] #[test_log::test]
fn read_file_type_symlink() { fn read_file_type_symlink() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let target = temp_dir.path().join("target.txt"); let target = temp_dir.path().join("target.txt");
@@ -299,7 +297,7 @@ fn read_file_type_symlink() {
} }
} }
#[test] #[test_log::test]
fn read_dir_basic() { fn read_dir_basic() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("readdir_test"); let test_dir = temp_dir.path().join("readdir_test");
@@ -331,7 +329,7 @@ fn read_dir_basic() {
} }
} }
#[test] #[test_log::test]
fn read_dir_empty() { fn read_dir_empty() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("empty_dir"); let test_dir = temp_dir.path().join("empty_dir");
@@ -347,7 +345,7 @@ fn read_dir_empty() {
} }
} }
#[test] #[test_log::test]
fn read_dir_nonexistent_fails() { fn read_dir_nonexistent_fails() {
let expr = r#"builtins.readDir "/nonexistent/directory""#; let expr = r#"builtins.readDir "/nonexistent/directory""#;
let result = eval_result(expr); let result = eval_result(expr);
@@ -355,7 +353,7 @@ fn read_dir_nonexistent_fails() {
assert!(result.is_err()); assert!(result.is_err());
} }
#[test] #[test_log::test]
fn read_dir_on_file_fails() { fn read_dir_on_file_fails() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt"); let test_file = temp_dir.path().join("test.txt");

View File

@@ -1,7 +1,5 @@
#![allow(non_snake_case)] #![allow(non_snake_case)]
mod utils;
use std::path::PathBuf; use std::path::PathBuf;
use nix_js::context::Context; use nix_js::context::Context;
@@ -9,7 +7,7 @@ use nix_js::error::Source;
use nix_js::value::Value; use nix_js::value::Value;
fn get_lang_dir() -> PathBuf { fn get_lang_dir() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/lang") PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/tests/lang")
} }
fn eval_file(name: &str) -> Result<(Value, Source), String> { fn eval_file(name: &str) -> Result<(Value, Source), String> {
@@ -44,7 +42,7 @@ fn format_value(value: &Value) -> String {
macro_rules! eval_okay_test { macro_rules! eval_okay_test {
($(#[$attr:meta])* $name:ident$(, $pre:expr)?) => { ($(#[$attr:meta])* $name:ident$(, $pre:expr)?) => {
$(#[$attr])* $(#[$attr])*
#[test] #[test_log::test]
fn $name() { fn $name() {
$(($pre)();)? $(($pre)();)?
let test_name = concat!("eval-okay-", stringify!($name)) let test_name = concat!("eval-okay-", stringify!($name))
@@ -76,7 +74,7 @@ macro_rules! eval_okay_test {
macro_rules! eval_fail_test { macro_rules! eval_fail_test {
($name:ident) => { ($name:ident) => {
#[test] #[test_log::test]
fn $name() { fn $name() {
let test_name = concat!("eval-fail-", stringify!($name)) let test_name = concat!("eval-fail-", stringify!($name))
.replace("_", "-") .replace("_", "-")
@@ -122,10 +120,7 @@ eval_okay_test!(concatmap);
eval_okay_test!(concatstringssep); eval_okay_test!(concatstringssep);
eval_okay_test!(context); eval_okay_test!(context);
eval_okay_test!(context_introspection); eval_okay_test!(context_introspection);
eval_okay_test!( eval_okay_test!(convertHash);
#[ignore = "not implemented: convertHash"]
convertHash
);
eval_okay_test!(curpos); eval_okay_test!(curpos);
eval_okay_test!(deepseq); eval_okay_test!(deepseq);
eval_okay_test!(delayed_with); eval_okay_test!(delayed_with);
@@ -158,24 +153,15 @@ eval_okay_test!(
fromTOML_timestamps fromTOML_timestamps
); );
eval_okay_test!(functionargs); eval_okay_test!(functionargs);
eval_okay_test!( eval_okay_test!(hashfile);
#[ignore = "not implemented: hashFile"] eval_okay_test!(hashstring);
hashfile
);
eval_okay_test!(
#[ignore = "not implemented: hashString"]
hashstring
);
eval_okay_test!(getattrpos); eval_okay_test!(getattrpos);
eval_okay_test!(getattrpos_functionargs); eval_okay_test!(getattrpos_functionargs);
eval_okay_test!(getattrpos_undefined); eval_okay_test!(getattrpos_undefined);
eval_okay_test!(getenv, || { eval_okay_test!(getenv, || {
unsafe { std::env::set_var("TEST_VAR", "foo") }; unsafe { std::env::set_var("TEST_VAR", "foo") };
}); });
eval_okay_test!( eval_okay_test!(groupBy);
#[ignore = "not implemented: hashString"]
groupBy
);
eval_okay_test!(r#if); eval_okay_test!(r#if);
eval_okay_test!(ind_string); eval_okay_test!(ind_string);
eval_okay_test!(import); eval_okay_test!(import);
@@ -206,15 +192,11 @@ eval_okay_test!(
eval_okay_test!(partition); eval_okay_test!(partition);
eval_okay_test!(path); eval_okay_test!(path);
eval_okay_test!(pathexists); eval_okay_test!(pathexists);
eval_okay_test!( eval_okay_test!(path_string_interpolation, || {
#[ignore = "rnix 0.13 regression: /${foo}-/*...*/ fails to parse"] unsafe {
path_string_interpolation, std::env::set_var("HOME", "/fake-home");
|| {
unsafe {
std::env::set_var("HOME", "/fake-home");
}
} }
); });
eval_okay_test!(patterns); eval_okay_test!(patterns);
eval_okay_test!(print); eval_okay_test!(print);
eval_okay_test!(readDir); eval_okay_test!(readDir);
@@ -253,22 +235,13 @@ eval_okay_test!(
tail_call_1 tail_call_1
); );
eval_okay_test!(tojson); eval_okay_test!(tojson);
eval_okay_test!( eval_okay_test!(toxml);
#[ignore = "not implemented: toXML"] eval_okay_test!(toxml2);
toxml
);
eval_okay_test!(
#[ignore = "not implemented: toXML"]
toxml2
);
eval_okay_test!(tryeval); eval_okay_test!(tryeval);
eval_okay_test!(types); eval_okay_test!(types);
eval_okay_test!(versions); eval_okay_test!(versions);
eval_okay_test!(with); eval_okay_test!(with);
eval_okay_test!( eval_okay_test!(zipAttrsWith);
#[ignore = "not implemented: hashString"]
zipAttrsWith
);
eval_fail_test!(fail_abort); eval_fail_test!(fail_abort);
eval_fail_test!(fail_addDrvOutputDependencies_empty_context); eval_fail_test!(fail_addDrvOutputDependencies_empty_context);

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More