Compare commits

..

5 Commits

46 changed files with 2747 additions and 1181 deletions

View File

@@ -1,7 +1,18 @@
vim.lsp.config("biome", {
root_dir = function (bufnr, on_dir)
root_dir = function (_bufnr, on_dir)
on_dir(vim.fn.getcwd())
end
})
vim.lsp.config("rust_analyzer", {
settings = {
["rust-analyzer"] = {
cargo = {
features = {
"daemon"
}
}
}
}
})
return {}

532
Cargo.lock generated
View File

@@ -34,6 +34,15 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "android_system_properties"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
"libc",
]
[[package]]
name = "anes"
version = "0.1.6"
@@ -61,6 +70,28 @@ dependencies = [
"derive_arbitrary",
]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "atomic-waker"
version = "1.1.2"
@@ -95,6 +126,12 @@ dependencies = [
"vsimd",
]
[[package]]
name = "base64ct"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
[[package]]
name = "bincode"
version = "1.3.3"
@@ -176,6 +213,17 @@ dependencies = [
"syn",
]
[[package]]
name = "bstr"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
dependencies = [
"memchr",
"regex-automata",
"serde",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@@ -223,6 +271,12 @@ dependencies = [
"displaydoc",
]
[[package]]
name = "camino"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
[[package]]
name = "capacity_builder"
version = "0.5.0"
@@ -266,7 +320,7 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
"nom 7.1.3",
]
[[package]]
@@ -287,6 +341,19 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-link",
]
[[package]]
name = "ciborium"
version = "0.2.2"
@@ -369,6 +436,12 @@ dependencies = [
"error-code",
]
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "constant_time_eq"
version = "0.3.1"
@@ -399,6 +472,12 @@ version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f"
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core_maths"
version = "0.1.1"
@@ -524,6 +603,33 @@ dependencies = [
"typenum",
]
[[package]]
name = "curve25519-dalek"
version = "4.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
dependencies = [
"cfg-if",
"cpufeatures",
"curve25519-dalek-derive",
"digest",
"fiat-crypto",
"rustc_version",
"subtle",
"zeroize",
]
[[package]]
name = "curve25519-dalek-derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "data-encoding"
version = "2.9.0"
@@ -657,6 +763,16 @@ dependencies = [
"tokio",
]
[[package]]
name = "der"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid",
"zeroize",
]
[[package]]
name = "deranged"
version = "0.5.5"
@@ -775,6 +891,30 @@ dependencies = [
"syn",
]
[[package]]
name = "ed25519"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53"
dependencies = [
"pkcs8",
"signature",
]
[[package]]
name = "ed25519-dalek"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9"
dependencies = [
"curve25519-dalek",
"ed25519",
"serde",
"sha2",
"subtle",
"zeroize",
]
[[package]]
name = "either"
version = "1.15.0"
@@ -826,6 +966,12 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "fiat-crypto"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.26"
@@ -1206,6 +1352,30 @@ dependencies = [
"tracing",
]
[[package]]
name = "iana-time-zone"
version = "0.1.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"log",
"wasm-bindgen",
"windows-core",
]
[[package]]
name = "iana-time-zone-haiku"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
dependencies = [
"cc",
]
[[package]]
name = "icu_calendar"
version = "2.1.1"
@@ -1406,6 +1576,15 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "is_executable"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baabb8b4867b26294d818bf3f651a454b6901431711abb96e296245888d6e8c4"
dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "itertools"
version = "0.10.5"
@@ -1465,6 +1644,12 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.179"
@@ -1568,6 +1753,15 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "matchers"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
dependencies = [
"regex-automata",
]
[[package]]
name = "memchr"
version = "2.7.5"
@@ -1640,6 +1834,57 @@ dependencies = [
"libc",
]
[[package]]
name = "nix-compat"
version = "0.1.0"
source = "git+https://git.snix.dev/snix/snix.git#5b3716a16b64771013c1c6aefaf3c9fd218e4db8"
dependencies = [
"bitflags",
"bstr",
"bytes",
"data-encoding",
"ed25519",
"ed25519-dalek",
"futures",
"mimalloc",
"nix-compat-derive",
"nom 8.0.0",
"num_enum",
"pin-project-lite",
"sha2",
"thiserror 2.0.17",
"tokio",
"tracing",
]
[[package]]
name = "nix-compat-derive"
version = "0.1.0"
source = "git+https://git.snix.dev/snix/snix.git#5b3716a16b64771013c1c6aefaf3c9fd218e4db8"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "nix-daemon"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb28bc02b8ea18d59e15fc8e86ae35850326dc5e4e2dcf17bc659f2fd79f1a08"
dependencies = [
"async-stream",
"chrono",
"futures",
"num_enum",
"tap",
"thiserror 1.0.69",
"tokio",
"tokio-stream",
"tokio-test",
"tracing",
]
[[package]]
name = "nix-js"
version = "0.1.0"
@@ -1656,7 +1901,10 @@ dependencies = [
"hex",
"itertools 0.14.0",
"mimalloc",
"nix-compat",
"nix-daemon",
"nix-js-macros",
"nix-nar",
"petgraph",
"regex",
"reqwest",
@@ -1669,6 +1917,9 @@ dependencies = [
"tar",
"tempfile",
"thiserror 2.0.17",
"tokio",
"tracing",
"tracing-subscriber",
"xz2",
"zip",
]
@@ -1683,6 +1934,18 @@ dependencies = [
"syn",
]
[[package]]
name = "nix-nar"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15dbfa157df89f4283825ff1c21d53344cfe0d222ea8fde0f9514206dc62d9e0"
dependencies = [
"camino",
"is_executable",
"symlink",
"thiserror 1.0.69",
]
[[package]]
name = "nom"
version = "7.1.3"
@@ -1693,6 +1956,24 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "nom"
version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
dependencies = [
"memchr",
]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@@ -1728,6 +2009,28 @@ dependencies = [
"autocfg",
]
[[package]]
name = "num_enum"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c"
dependencies = [
"num_enum_derive",
"rustversion",
]
[[package]]
name = "num_enum_derive"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "once_cell"
version = "1.21.3"
@@ -1841,6 +2144,16 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]]
name = "pkg-config"
version = "0.3.32"
@@ -1911,6 +2224,15 @@ dependencies = [
"syn",
]
[[package]]
name = "proc-macro-crate"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
dependencies = [
"toml_edit",
]
[[package]]
name = "proc-macro2"
version = "1.0.95"
@@ -2040,6 +2362,9 @@ name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.16",
]
[[package]]
name = "rand_core"
@@ -2443,6 +2768,15 @@ dependencies = [
"digest",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]]
name = "shlex"
version = "1.3.0"
@@ -2459,6 +2793,15 @@ dependencies = [
"libc",
]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"rand_core 0.6.4",
]
[[package]]
name = "simd-adler32"
version = "0.3.8"
@@ -2505,6 +2848,16 @@ dependencies = [
"url",
]
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.1"
@@ -2569,6 +2922,12 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "symlink"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a"
[[package]]
name = "syn"
version = "2.0.104"
@@ -2740,6 +3099,15 @@ dependencies = [
"syn",
]
[[package]]
name = "thread_local"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
dependencies = [
"cfg-if",
]
[[package]]
name = "time"
version = "0.3.44"
@@ -2845,6 +3213,58 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-stream"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f6d24790a10a7af737693a3e8f1d03faef7e6ca0cc99aae5066f533766de545"
dependencies = [
"futures-core",
"tokio",
"tokio-stream",
]
[[package]]
name = "toml_datetime"
version = "0.7.5+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347"
dependencies = [
"serde_core",
]
[[package]]
name = "toml_edit"
version = "0.23.10+spec-1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269"
dependencies = [
"indexmap",
"toml_datetime",
"toml_parser",
"winnow",
]
[[package]]
name = "toml_parser"
version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
dependencies = [
"winnow",
]
[[package]]
name = "tower"
version = "0.5.2"
@@ -2897,9 +3317,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
dependencies = [
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tracing-core"
version = "0.1.36"
@@ -2907,6 +3339,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex-automata",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
]
[[package]]
@@ -3008,6 +3470,12 @@ dependencies = [
"which",
]
[[package]]
name = "valuable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]]
name = "version_check"
version = "0.9.5"
@@ -3194,12 +3662,65 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
dependencies = [
"windows-implement",
"windows-interface",
"windows-link",
"windows-result",
"windows-strings",
]
[[package]]
name = "windows-implement"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-interface"
version = "0.59.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-link"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-result"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-sys"
version = "0.48.0"
@@ -3431,6 +3952,15 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]]
name = "winnow"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
dependencies = [
"memchr",
]
[[package]]
name = "winsafe"
version = "0.0.19"

15
Justfile Normal file
View File

@@ -0,0 +1,15 @@
[no-exit-message]
@repl:
RUST_LOG=none,nix_js=debug cargo run --bin repl
[no-exit-message]
@eval expr:
RUST_LOG=none,nix_js=debug cargo run --bin eval -- '{{expr}}'
[no-exit-message]
@replr:
cargo run --bin repl --release
[no-exit-message]
@evalr expr:
cargo run --bin eval --release -- '{{expr}}'

View File

@@ -18,7 +18,7 @@
{
default = pkgs.mkShell {
packages = with pkgs; [
(fenix.packages.${system}.stable.withComponents [
(fenix.packages.${system}.latest.withComponents [
"cargo"
"clippy"
"rust-src"
@@ -30,6 +30,7 @@
lldb
valgrind
hyperfine
just
nodejs
nodePackages.npm

View File

@@ -4,13 +4,24 @@ version = "0.1.0"
edition = "2024"
build = "build.rs"
[features]
default = ["daemon"]
daemon = ["dep:tokio", "dep:nix-daemon"]
[dependencies]
mimalloc = "0.1"
tokio = { version = "1.41", features = ["rt-multi-thread", "sync"], optional = true }
nix-daemon = { version = "0.1", optional = true }
# REPL
anyhow = "1.0"
rustyline = "14.0"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
derive_more = { version = "2", features = ["full"] }
thiserror = "2"
@@ -25,6 +36,7 @@ regex = "1.11"
deno_core = "0.376"
deno_error = "0.7"
nix-nar = "0.3"
sha2 = "0.10"
hex = "0.4"

View File

@@ -1,4 +1,3 @@
use std::env;
use std::path::Path;
use std::process::Command;
@@ -68,9 +67,4 @@ fn main() {
} else {
panic!("dist/runtime.js not found after build");
}
// Print build info
if env::var("CARGO_CFG_DEBUG_ASSERTIONS").is_ok() {
println!("Built runtime.js in DEBUG mode");
}
}

View File

@@ -4,6 +4,7 @@
import type { NixValue, NixAttrs, NixList } from "../types";
import { forceAttrs, forceString, forceFunction, forceList } from "../type-assert";
import { createThunk } from "../thunk";
export const attrNames = (set: NixValue): string[] => Object.keys(forceAttrs(set)).sort();
@@ -22,17 +23,13 @@ export const hasAttr =
export const mapAttrs =
(f: NixValue) =>
(attrs: NixValue): NixAttrs => {
const forced_attrs = forceAttrs(attrs);
const forced_f = forceFunction(f);
const new_attrs: NixAttrs = {};
for (const key in forced_attrs) {
Object.defineProperty(new_attrs, key, {
get: () => forceFunction(forced_f(key))(forced_attrs[key]),
enumerable: true,
configurable: true,
});
const forcedAttrs = forceAttrs(attrs);
const forcedF = forceFunction(f);
const newAttrs: NixAttrs = {};
for (const key in forcedAttrs) {
newAttrs[key] = createThunk(() => forceFunction(forcedF(key))(forcedAttrs[key]));
}
return new_attrs;
return newAttrs;
};
export const removeAttrs =

View File

@@ -11,16 +11,33 @@ import {
parseContextToInfoMap,
} from "../string-context";
/**
* builtins.hasContext - Check if string has context
*
* Returns true if the string has any store path references.
*/
export const hasContext = (value: NixValue): boolean => {
const s = forceNixString(value);
return isStringWithContext(s) && s.context.size > 0;
};
/**
* builtins.unsafeDiscardStringContext - Remove all context from string
*
* IMPORTANT: This discards string context, returning only the string value.
* Use with caution as it removes derivation dependencies.
*/
export const unsafeDiscardStringContext = (value: NixValue): string => {
const s = forceNixString(value);
return getStringValue(s);
};
/**
* builtins.unsafeDiscardOutputDependency - Convert DrvDeep to Opaque context
*
* IMPORTANT: Transforms "all outputs" references (=) to plain path references.
* Preserves other context types unchanged.
*/
export const unsafeDiscardOutputDependency = (value: NixValue): NixString => {
const s = forceNixString(value);
const strValue = getStringValue(s);
@@ -47,6 +64,12 @@ export const unsafeDiscardOutputDependency = (value: NixValue): NixString => {
return mkStringWithContext(strValue, newContext);
};
/**
* builtins.addDrvOutputDependencies - Convert Opaque to DrvDeep context
*
* IMPORTANT: Transforms plain derivation path references to "all outputs" references (=).
* The string must have exactly one context element which must be a .drv path.
*/
export const addDrvOutputDependencies = (value: NixValue): NixString => {
const s = forceNixString(value);
const strValue = getStringValue(s);
@@ -77,6 +100,14 @@ export const addDrvOutputDependencies = (value: NixValue): NixString => {
return mkStringWithContext(strValue, newContext);
};
/**
* builtins.getContext - Extract context as structured attribute set
*
* Returns an attribute set mapping store paths to their context info:
* - path: true if it's a plain store path reference (opaque)
* - allOutputs: true if it references all derivation outputs (drvDeep, encoded as =path)
* - outputs: list of specific output names (built, encoded as !output!path)
*/
export const getContext = (value: NixValue): NixAttrs => {
const s = forceNixString(value);
const context = getStringContext(s);
@@ -101,6 +132,18 @@ export const getContext = (value: NixValue): NixAttrs => {
return result;
};
/**
* builtins.appendContext - Add context to a string
*
* IMPORTANT: Merges the provided context attribute set with any existing context
* from the input string. Used to manually construct strings with specific
* derivation dependencies.
*
* Context format matches getContext output:
* - path: boolean - add as opaque reference
* - allOutputs: boolean - add as drvDeep reference (=)
* - outputs: [string] - add as built references (!output!)
*/
export const appendContext =
(strValue: NixValue) =>
(ctxValue: NixValue): NixString => {

View File

@@ -110,6 +110,12 @@ export interface CoerceResult {
* Coerce a Nix value to a string according to the specified mode.
* This implements the same behavior as Lix's EvalState::coerceToString.
*
* IMPORTANT: String context preservation rules:
* - StringWithContext: Context is collected in outContext parameter
* - Derivations (with outPath): Built context is added for the drvPath/outputName
* - Lists (ToString mode): Context from all elements is merged
* - All other coercions: No context added
*
* @param value - The value to coerce
* @param mode - The coercion mode (controls which types are allowed)
* @param copyToStore - If true, paths should be copied to the Nix store (not implemented yet)

View File

@@ -3,22 +3,33 @@
* Implemented via Rust ops exposed through deno_core
*/
import { forceAttrs, forceBool, forceString, forceNixPath } from "../type-assert";
import { forceAttrs, forceBool, forceString } from "../type-assert";
import type { NixValue, NixAttrs } from "../types";
import { isNixPath } from "../types";
import { force } from "../thunk";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
import { getPathValue } from "../path";
// Declare Deno.core.ops global (provided by deno_core runtime)
import type { NixStringContext, StringWithContext } from "../string-context";
import { mkStringWithContext } from "../string-context";
export const importFunc = (path: NixValue): NixValue => {
// TODO: context?
const pathStr = coerceToString(path, StringCoercionMode.Base);
const context: NixStringContext = new Set();
const pathStr = coerceToPath(path, context);
// FIXME: Context collected but not yet propagated to build system
// This means derivation dependencies from imported paths are not
// currently tracked. This will cause issues when:
// 1. Importing from derivation outputs: import "${drv}/file.nix"
// 2. Building packages that depend on imported configurations
if (context.size > 0) {
console.warn(
`[WARN] import: Path has string context which is not yet fully tracked.
Dependency tracking for imported derivations may be incomplete.`,
);
}
// Call Rust op - returns JS code string
const code = Deno.core.ops.op_import(pathStr);
return Function(`return (${code})`)();
};
@@ -28,8 +39,14 @@ export const scopedImport =
throw new Error("Not implemented: scopedImport");
};
export const storePath = (args: NixValue): never => {
throw new Error("Not implemented: storePath");
export const storePath = (pathArg: NixValue): StringWithContext => {
const context: NixStringContext = new Set();
const pathStr = coerceToPath(pathArg, context);
const validatedPath: string = Deno.core.ops.op_store_path(pathStr);
context.add(validatedPath);
return mkStringWithContext(validatedPath, context);
};
export const fetchClosure = (args: NixValue): never => {
@@ -336,9 +353,32 @@ export const path = (args: NixValue): string => {
return storePath;
};
export const toFile = (name: NixValue, s: NixValue): never => {
throw new Error("Not implemented: toFile");
};
export const toFile =
(nameArg: NixValue) =>
(contentsArg: NixValue): StringWithContext => {
const name = forceString(nameArg);
if (name.includes('/')) {
throw new Error("builtins.toFile: name cannot contain '/'");
}
if (name === '.' || name === '..') {
throw new Error("builtins.toFile: invalid name");
}
const context: NixStringContext = new Set();
const contents = coerceToString(
contentsArg,
StringCoercionMode.ToString,
false,
context
);
const references: string[] = Array.from(context);
const storePath: string = Deno.core.ops.op_to_file(name, contents, references);
return mkStringWithContext(storePath, new Set([storePath]));
};
export const toPath = (name: NixValue, s: NixValue): never => {
throw new Error("Not implemented: toPath");

View File

@@ -2,29 +2,83 @@
* String operation builtin functions
*/
import type { NixInt, NixValue } from "../types";
import { forceString, forceList, forceInt } from "../type-assert";
import type { NixInt, NixValue, NixString } from "../types";
import { forceString, forceList, forceInt, forceNixString } from "../type-assert";
import { coerceToString, StringCoercionMode } from "./conversion";
import {
type NixStringContext,
getStringValue,
getStringContext,
mkStringWithContext,
} from "../string-context";
export const stringLength = (e: NixValue): NixInt => BigInt(forceString(e).length);
/**
* builtins.substring - Extract substring while preserving string context
*
* IMPORTANT: String context must be preserved from the source string.
* This matches Lix behavior where substring operations maintain references
* to store paths and derivations.
*
* Special case: substring 0 0 str can be used idiomatically to capture
* string context efficiently without copying the string value.
*/
export const substring =
(start: NixValue) =>
(len: NixValue) =>
(s: NixValue): string => {
const str = forceString(s);
(s: NixValue): NixString => {
const startPos = Number(forceInt(start));
const length = Number(forceInt(len));
return str.substring(startPos, startPos + length);
if (startPos < 0) {
throw new Error("negative start position in 'substring'");
}
const str = forceNixString(s);
const strValue = getStringValue(str);
const context = getStringContext(str);
if (length === 0) {
if (context.size === 0) {
return "";
}
return mkStringWithContext("", context);
}
const actualLength = length < 0 ? Number.MAX_SAFE_INTEGER : length;
const result = startPos >= strValue.length ? "" : strValue.substring(startPos, startPos + actualLength);
if (context.size === 0) {
return result;
}
return mkStringWithContext(result, context);
};
/**
* builtins.concatStringsSep - Concatenate strings with separator, merging contexts
*
* IMPORTANT: String context must be collected from both the separator and all
* list elements, then merged into the result. This ensures that store path
* references are preserved when building paths like "/nix/store/xxx/bin:/nix/store/yyy/bin".
*/
export const concatStringsSep =
(sep: NixValue) =>
(list: NixValue): string =>
// FIXME: context?
forceList(list)
.map((elem) => coerceToString(elem, StringCoercionMode.Interpolation))
.join(forceString(sep));
(list: NixValue): NixString => {
const context: NixStringContext = new Set();
const separator = coerceToString(sep, StringCoercionMode.Interpolation, false, context);
const parts = forceList(list).map((elem) =>
coerceToString(elem, StringCoercionMode.Interpolation, false, context),
);
const result = parts.join(separator);
if (context.size === 0) {
return result;
}
return mkStringWithContext(result, context);
};
export const baseNameOf = (x: NixValue): string => {
const str = forceString(x);

View File

@@ -3,22 +3,23 @@
*/
import type { NixValue, NixAttrs, NixBool, NixString, NixPath } from "./types";
import { forceAttrs, forceFunction, forceString, typeName } from "./type-assert";
import { forceAttrs, forceBool, forceFunction, forceString, typeName } from "./type-assert";
import { isAttrs } from "./builtins/type-check";
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import {
type NixStringContext,
mkStringWithContext,
isStringWithContext,
getStringContext,
} from "./string-context";
import { type NixStringContext, mkStringWithContext, isStringWithContext } from "./string-context";
import { force } from "./thunk";
import { mkPath } from "./path";
import { isNixPath } from "./types";
import { CatchableError, isNixPath } from "./types";
/**
* Concatenate multiple values into a string or path with context
* This is used for string interpolation like "hello ${world}"
*
* IMPORTANT: String context handling:
* - All contexts from interpolated values are merged into the result
* - Path mode: Store contexts are forbidden (will throw error)
* - String mode: All contexts are preserved and merged
*
* If first element is a path, result is a path (with constraint: no store context allowed)
*
* @param parts - Array of values to concatenate
@@ -98,9 +99,9 @@ export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath
* @param path - Path string (may be relative or absolute)
* @returns NixPath object with absolute path
*/
export const resolvePath = (path: NixValue): NixPath => {
const path_str = forceString(path);
const resolved = Deno.core.ops.op_resolve_path(path_str);
export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
const pathStr = forceString(path);
const resolved = Deno.core.ops.op_resolve_path(currentDir, pathStr);
return mkPath(resolved);
};
@@ -226,3 +227,10 @@ export const call = (func: NixValue, arg: NixValue): NixValue => {
}
throw new Error(`attempt to call something which is not a function but ${typeName(forcedFunc)}`);
};
export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string): NixValue => {
if (forceBool(assertion)) {
return expr;
}
throw new CatchableError(`assertion '${assertionRaw}' failed`);
};

View File

@@ -13,6 +13,7 @@ import {
hasAttr,
concatStringsWithContext,
call,
assert,
} from "./helpers";
import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins";
@@ -34,6 +35,7 @@ export const Nix = {
IS_PATH,
DEBUG_THUNKS,
assert,
call,
hasAttr,
select,

View File

@@ -250,27 +250,5 @@ export const op = {
return Array.prototype.concat.call(forceList(a), forceList(b));
},
update: (a: NixValue, b: NixValue): NixAttrs => {
const forcedA = forceAttrs(a);
const forcedB = forceAttrs(b);
const newAttrs: NixAttrs = {};
for (const key in forcedA) {
Object.defineProperty(newAttrs, key, {
get: () => force(forcedA[key]),
enumerable: true,
configurable: true,
});
}
for (const key in forcedB) {
Object.defineProperty(newAttrs, key, {
get: () => force(forcedB[key]),
enumerable: true,
configurable: true,
});
}
return newAttrs;
},
update: (a: NixValue, b: NixValue): NixAttrs => ({ ...forceAttrs(a), ...forceAttrs(b) }),
};

View File

@@ -1,3 +1,28 @@
/**
* String Context System for Nix
*
* String context tracks references to store paths and derivations within strings.
* This is critical for Nix's dependency tracking - when a string containing a
* store path is used in a derivation, that store path becomes a build dependency.
*
* Context Elements (encoded as strings):
* - Opaque: Plain store path reference
* Format: "/nix/store/..."
* Example: "/nix/store/abc123-hello"
*
* - DrvDeep: Derivation with all outputs
* Format: "=/nix/store/...drv"
* Example: "=/nix/store/xyz789-hello.drv"
* Meaning: All outputs of this derivation and its closure
*
* - Built: Specific derivation output
* Format: "!<output>!/nix/store/...drv"
* Example: "!out!/nix/store/xyz789-hello.drv"
* Meaning: Specific output (e.g., "out", "dev", "lib") of this derivation
*
* This implementation matches Lix's NixStringContext system.
*/
export const HAS_CONTEXT = Symbol("HAS_CONTEXT");
export interface StringContextOpaque {
@@ -143,6 +168,20 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
return result;
};
/**
* Extract input derivations and source paths from context
*
* IMPORTANT: Used by derivation builder to determine build dependencies.
*
* Returns:
* - inputDrvs: Map of derivation paths to their required output names
* - inputSrcs: Set of plain store paths (opaque) and drvDeep references
*
* Context type handling:
* - Opaque: Added to inputSrcs
* - DrvDeep: Added to inputSrcs (entire derivation + all outputs)
* - Built: Added to inputDrvs with specific output name
*/
export const extractInputDrvsAndSrcs = (
context: NixStringContext,
): { inputDrvs: Map<string, Set<string>>; inputSrcs: Set<string> } => {

View File

@@ -35,7 +35,7 @@ declare global {
namespace Deno {
namespace core {
namespace ops {
function op_resolve_path(path: string): string;
function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string;
function op_read_file(path: string): string;
function op_path_exists(path: string): boolean;
@@ -76,6 +76,8 @@ declare global {
recursive: boolean,
sha256: string | null,
): string;
function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string;
}
}
}

View File

@@ -1,8 +1,15 @@
use anyhow::Result;
use nix_js::context::Context;
use std::process::exit;
use tracing_subscriber::EnvFilter;
fn main() -> Result<()> {
let format = tracing_subscriber::fmt::format().without_time();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.event_format(format)
.init();
let mut args = std::env::args();
if args.len() != 2 {
eprintln!("Usage: {} expr", args.next().unwrap());

View File

@@ -1,11 +1,17 @@
use anyhow::Result;
use nix_js::context::Context;
use regex::Regex;
use rustyline::DefaultEditor;
use rustyline::error::ReadlineError;
use nix_js::context::Context;
use tracing_subscriber::EnvFilter;
fn main() -> Result<()> {
let format = tracing_subscriber::fmt::format().without_time();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.event_format(format)
.init();
let mut rl = DefaultEditor::new()?;
let mut context = Context::new()?;
let re = Regex::new(r"^\s*([a-zA-Z_][a-zA-Z0-9_'-]*)\s*=(.*)$").unwrap();

View File

@@ -1,14 +1,28 @@
use std::path::Path;
use itertools::Itertools as _;
use crate::ir::*;
pub(crate) trait Compile<Ctx: CodegenContext> {
pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
let code = expr.compile(ctx);
let debug_prefix = if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
"Nix.DEBUG_THUNKS.enabled=true,"
} else {
""
};
let cur_dir = ctx.get_current_dir().display().to_string().escape_quote();
format!("({}currentDir={},{})", debug_prefix, cur_dir, code)
}
trait Compile<Ctx: CodegenContext> {
fn compile(&self, ctx: &Ctx) -> String;
}
pub(crate) trait CodegenContext {
fn get_ir(&self, id: ExprId) -> &Ir;
fn get_sym(&self, id: SymId) -> &str;
fn get_current_dir(&self) -> &Path;
}
trait EscapeQuote {
@@ -45,7 +59,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
Ir::Path(p) => {
// Path needs runtime resolution
let path_expr = ctx.get_ir(p.expr).compile(ctx);
format!("Nix.resolvePath({})", path_expr)
format!("Nix.resolvePath(currentDir,{})", path_expr)
}
&Ir::If(If { cond, consq, alter }) => {
let cond = ctx.get_ir(cond).compile(ctx);
@@ -75,11 +89,17 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
}
Ir::ConcatStrings(x) => x.compile(ctx),
Ir::HasAttr(x) => x.compile(ctx),
&Ir::Assert(Assert { assertion, expr }) => {
&Ir::Assert(Assert {
assertion,
expr,
ref assertion_raw,
}) => {
let assertion = ctx.get_ir(assertion).compile(ctx);
let expr_dbg = ctx.get_ir(expr);
let expr = ctx.get_ir(expr).compile(ctx);
format!("({assertion})?({expr}):(()=>{{throw new Error(`assertion failed ({expr_dbg:#?})`)}})()")
format!(
"Nix.assert({assertion},{expr},{})",
assertion_raw.escape_quote()
)
}
}
}
@@ -102,13 +122,13 @@ impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
Leq => format!("Nix.op.lte({},{})", lhs, rhs),
Geq => format!("Nix.op.gte({},{})", lhs, rhs),
// Short-circuit operators: use JavaScript native && and ||
And => format!("Nix.force({}) && Nix.force({})", lhs, rhs),
Or => format!("Nix.force({}) || Nix.force({})", lhs, rhs),
Impl => format!("(!Nix.force({}) || Nix.force({}))", lhs, rhs),
And => format!("Nix.force({})&&Nix.force({})", lhs, rhs),
Or => format!("Nix.force({})||Nix.force({})", lhs, rhs),
Impl => format!("(!Nix.force({})||Nix.force({}))", lhs, rhs),
Con => format!("Nix.op.concat({},{})", lhs, rhs),
Upd => format!("Nix.op.update({},{})", lhs, rhs),
PipeL => format!("Nix.call({}, {})", rhs, lhs),
PipeR => format!("Nix.call({}, {})", lhs, rhs),
PipeL => format!("Nix.call({},{})", rhs, lhs),
PipeR => format!("Nix.call({},{})", lhs, rhs),
}
}
}
@@ -183,7 +203,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Call {
fn compile(&self, ctx: &Ctx) -> String {
let func = ctx.get_ir(self.func).compile(ctx);
let arg = ctx.get_ir(self.arg).compile(ctx);
format!("Nix.call({func}, {arg})")
format!("Nix.call({func},{arg})")
}
}
@@ -231,7 +251,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Let {
}
let body = ctx.get_ir(self.body).compile(ctx);
format!("(()=>{{{}; return {}}})()", js_statements.join(";"), body)
format!("(()=>{{{};return {}}})()", js_statements.join(";"), body)
}
}
@@ -247,9 +267,12 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
})
.join(",");
if let Some(default) = self.default {
format!("Nix.selectWithDefault({lhs}, [{attrpath}], {})", ctx.get_ir(default).compile(ctx))
format!(
"Nix.selectWithDefault({lhs},[{attrpath}],{})",
ctx.get_ir(default).compile(ctx)
)
} else {
format!("Nix.select({lhs}, [{attrpath}])")
format!("Nix.select({lhs},[{attrpath}])")
}
}
}
@@ -261,16 +284,16 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
for (&sym, &expr) in &self.stcs {
let key = ctx.get_sym(sym);
let value = ctx.get_ir(expr).compile(ctx);
attrs.push(format!("{}: {}", key.escape_quote(), value));
attrs.push(format!("{}:{}", key.escape_quote(), value));
}
for (key_expr, value_expr) in &self.dyns {
let key = ctx.get_ir(*key_expr).compile(ctx);
let value = ctx.get_ir(*value_expr).compile(ctx);
attrs.push(format!("[{}]: {}", key, value));
attrs.push(format!("[{}]:{}", key, value));
}
format!("{{{}}}", attrs.join(", "))
format!("{{{}}}", attrs.join(","))
}
}
@@ -308,6 +331,6 @@ impl<Ctx: CodegenContext> Compile<Ctx> for HasAttr {
Attr::Dynamic(expr_id) => ctx.get_ir(*expr_id).compile(ctx),
})
.join(",");
format!("Nix.hasAttr({lhs}, [{attrpath}])")
format!("Nix.hasAttr({lhs},[{attrpath}])")
}
}

View File

@@ -1,25 +1,21 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::ptr::NonNull;
use hashbrown::HashMap;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use petgraph::graphmap::DiGraphMap;
use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, Compile};
use crate::codegen::{CodegenContext, compile};
use crate::error::{Error, Result};
use crate::ir::{Builtin, DowngradeContext, ExprId, Ir, SymId};
use crate::runtime::{Runtime, RuntimeCtx};
use crate::ir::{ArgId, Builtin, Downgrade as _, DowngradeContext, ExprId, Ir, SymId, ToIr as _};
use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{StoreBackend, StoreConfig};
use crate::value::Value;
use downgrade::DowngradeCtx;
use drop_guard::{PathDropGuard, PathStackProvider};
mod downgrade;
mod drop_guard;
use std::sync::Arc;
mod private {
use super::*;
use std::ops::DerefMut;
use std::ptr::NonNull;
pub struct CtxPtr(NonNull<Ctx>);
@@ -36,18 +32,13 @@ mod private {
unsafe { self.0.as_mut() }
}
}
impl PathStackProvider for CtxPtr {
fn path_stack(&mut self) -> &mut Vec<PathBuf> {
&mut self.as_mut().path_stack
}
}
impl RuntimeCtx for CtxPtr {
fn get_current_dir(&self) -> PathBuf {
impl RuntimeContext for CtxPtr {
fn get_current_dir(&self) -> &Path {
self.as_ref().get_current_dir()
}
fn push_path_stack(&mut self, path: PathBuf) -> impl DerefMut<Target = Self> {
PathDropGuard::new(path, self)
fn set_current_file(&mut self, path: PathBuf) {
self.as_mut().current_file = Some(path);
}
fn compile_code(&mut self, expr: &str) -> Result<String> {
self.as_mut().compile_code(expr)
@@ -65,6 +56,7 @@ pub(crate) struct SccInfo {
pub struct Context {
ctx: Ctx,
runtime: Runtime<CtxPtr>,
store: Arc<StoreBackend>,
}
impl Context {
@@ -72,16 +64,29 @@ impl Context {
let ctx = Ctx::new();
let runtime = Runtime::new()?;
Ok(Self { ctx, runtime })
let config = StoreConfig::from_env();
let store = Arc::new(StoreBackend::new(config)?);
Ok(Self { ctx, runtime, store })
}
pub fn eval_code(&mut self, expr: &str) -> Result<Value> {
// Initialize `path_stack` with current directory for relative path resolution
let mut guard = PathDropGuard::new_cwd(&mut self.ctx)?;
let ctx = guard.as_ctx();
self.ctx.current_file = Some(
std::env::current_dir()
.map_err(|err| {
Error::internal(format!("Failed to get current working dir: {err}"))
})?
.join("__eval__.nix"),
);
let code = self.compile_code(expr)?;
let code = ctx.compile_code(expr)?;
self.runtime.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
self.runtime
.op_state()
.borrow_mut()
.put(self.store.clone());
self.runtime
.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
}
pub fn compile_code(&mut self, expr: &str) -> Result<String> {
@@ -92,13 +97,17 @@ impl Context {
pub(crate) fn eval_js(&mut self, code: String) -> Result<Value> {
self.runtime.eval(code, CtxPtr::new(&mut self.ctx))
}
pub fn get_store_dir(&self) -> &str {
self.store.as_store().get_store_dir()
}
}
pub(crate) struct Ctx {
irs: Vec<Ir>,
symbols: DefaultStringInterner,
global: NonNull<HashMap<SymId, ExprId>>,
path_stack: Vec<PathBuf>,
current_file: Option<PathBuf>,
}
impl Default for Ctx {
@@ -159,7 +168,7 @@ impl Default for Ctx {
symbols,
irs,
global: unsafe { NonNull::new_unchecked(Box::leak(Box::new(global))) },
path_stack: Vec::new(),
current_file: None,
}
}
}
@@ -174,15 +183,12 @@ impl Ctx {
DowngradeCtx::new(self, global_ref)
}
pub(crate) fn get_current_dir(&self) -> PathBuf {
self.path_stack
.last()
.expect(
"path_stack should never be empty when get_current_dir is called. this is a bug",
)
pub(crate) fn get_current_dir(&self) -> &Path {
self.current_file
.as_ref()
.expect("current_file is not set")
.parent()
.expect("path in path_stack should always have a parent dir. this is a bug")
.to_path_buf()
.expect("current_file doesn't have a parent dir")
}
fn compile_code(&mut self, expr: &str) -> Result<String> {
@@ -196,16 +202,8 @@ impl Ctx {
let root = self
.downgrade_ctx()
.downgrade(root.tree().expr().unwrap())?;
let code = self.get_ir(root).compile(self);
let debug_prefix = if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
"Nix.DEBUG_THUNKS.enabled=true,"
} else {
""
};
let code = format!("({}{})", debug_prefix, code);
#[cfg(debug_assertions)]
eprintln!("[DEBUG] generated code: {}", &code);
let code = compile(self.get_ir(root), self);
tracing::debug!("generated code: {}", &code);
Ok(code)
}
}
@@ -214,14 +212,301 @@ impl CodegenContext for Ctx {
fn get_ir(&self, id: ExprId) -> &Ir {
self.irs.get(id.0).expect("ExprId out of bounds")
}
fn get_sym(&self, id: SymId) -> &str {
self.symbols.resolve(id).expect("SymId out of bounds")
}
}
impl PathStackProvider for Ctx {
fn path_stack(&mut self) -> &mut Vec<PathBuf> {
&mut self.path_stack
fn get_current_dir(&self) -> &std::path::Path {
self.get_current_dir()
}
}
struct DependencyTracker {
graph: DiGraphMap<ExprId, ()>,
current_binding: Option<ExprId>,
let_scope_exprs: HashSet<ExprId>,
// The outer binding that owns this tracker (for nested let scopes in function params)
owner_binding: Option<ExprId>,
}
enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>),
Let(HashMap<SymId, ExprId>),
Param(SymId, ExprId),
With(ExprId),
}
struct ScopeGuard<'a, 'ctx> {
ctx: &'a mut DowngradeCtx<'ctx>,
}
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> {
fn drop(&mut self) {
self.ctx.scopes.pop();
}
}
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> {
self.ctx
}
}
pub struct DowngradeCtx<'ctx> {
ctx: &'ctx mut Ctx,
irs: Vec<Option<Ir>>,
scopes: Vec<Scope<'ctx>>,
arg_id: usize,
dep_tracker_stack: Vec<DependencyTracker>,
}
impl<'ctx> DowngradeCtx<'ctx> {
fn new(ctx: &'ctx mut Ctx, global: &'ctx HashMap<SymId, ExprId>) -> Self {
Self {
scopes: vec![Scope::Global(global)],
irs: vec![],
arg_id: 0,
dep_tracker_stack: Vec::new(),
ctx,
}
}
}
impl DowngradeContext for DowngradeCtx<'_> {
fn new_expr(&mut self, expr: Ir) -> ExprId {
self.irs.push(Some(expr));
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_arg(&mut self) -> ExprId {
self.irs.push(Some(Ir::Arg(ArgId(self.arg_id))));
self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_sym(&mut self, sym: String) -> SymId {
self.ctx.symbols.get_or_intern(sym)
}
fn get_sym(&self, id: SymId) -> &str {
self.ctx.get_sym(id)
}
fn lookup(&mut self, sym: SymId) -> Result<ExprId> {
for scope in self.scopes.iter().rev() {
match scope {
&Scope::Global(global_scope) => {
if let Some(&expr) = global_scope.get(&sym) {
return Ok(expr);
}
}
Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) {
// Find which tracker contains this expression
let expr_tracker_idx = self
.dep_tracker_stack
.iter()
.position(|t| t.let_scope_exprs.contains(&expr));
// Find the innermost tracker with a current_binding
let current_tracker_idx = self
.dep_tracker_stack
.iter()
.rposition(|t| t.current_binding.is_some());
// Record dependency if both exist
if let (Some(expr_idx), Some(curr_idx)) =
(expr_tracker_idx, current_tracker_idx)
{
let current_binding = self.dep_tracker_stack[curr_idx]
.current_binding
.expect("current_binding not set");
let owner_binding = self.dep_tracker_stack[curr_idx].owner_binding;
// If referencing from inner scope to outer scope
if curr_idx >= expr_idx {
let tracker = &mut self.dep_tracker_stack[expr_idx];
let from_node = current_binding;
let to_node = expr;
if curr_idx > expr_idx {
// Cross-scope reference: use owner_binding if available
if let Some(owner) = owner_binding {
tracker.graph.add_edge(owner, expr, ());
}
} else {
// Same-level reference: record directly
tracker.graph.add_edge(from_node, to_node, ());
}
}
}
return Ok(self.new_expr(Ir::ExprRef(expr)));
}
}
&Scope::Param(param_sym, expr) => {
if param_sym == sym {
return Ok(expr);
}
}
&Scope::With(_) => (),
}
}
let namespaces: Vec<ExprId> = self
.scopes
.iter()
.filter_map(|scope| {
if let &Scope::With(namespace) = scope {
Some(namespace)
} else {
None
}
})
.collect();
let mut result = None;
for namespace in namespaces {
use crate::ir::{Attr, Select};
let select = Select {
expr: namespace,
attrpath: vec![Attr::Str(sym)],
default: result, // Link to outer With or None
};
result = Some(self.new_expr(select.to_ir()));
}
result.ok_or_else(|| Error::downgrade_error(format!("'{}' not found", self.get_sym(sym))))
}
fn extract_expr(&mut self, id: ExprId) -> Ir {
let local_id = id.0 - self.ctx.irs.len();
self.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.take()
.expect("extract_expr called on an already extracted expr")
}
fn replace_expr(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len();
let _ = self
.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.insert(expr);
}
#[allow(refining_impl_trait)]
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();
self.irs.extend(std::iter::repeat_with(|| None).take(slots));
(start..start + slots).map(ExprId)
}
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
let root = root.downgrade(&mut self)?;
self.ctx
.irs
.extend(self.irs.into_iter().map(Option::unwrap));
Ok(root)
}
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Let(bindings));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Param(param, arg));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::With(namespace));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn push_dep_tracker(&mut self, slots: &[ExprId]) {
let mut graph = DiGraphMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
graph.add_node(expr);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
graph,
current_binding: None,
let_scope_exprs,
owner_binding: None,
});
}
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId) {
let mut graph = DiGraphMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
graph.add_node(expr);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
graph,
current_binding: None,
let_scope_exprs,
owner_binding: Some(owner),
});
}
fn get_current_binding(&self) -> Option<ExprId> {
self.dep_tracker_stack
.last()
.and_then(|t| t.current_binding)
}
fn set_current_binding(&mut self, expr: Option<ExprId>) {
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
tracker.current_binding = expr;
}
}
fn pop_dep_tracker(&mut self) -> Result<SccInfo> {
let tracker = self
.dep_tracker_stack
.pop()
.expect("pop_dep_tracker without active tracker");
use petgraph::algo::kosaraju_scc;
let sccs = kosaraju_scc(&tracker.graph);
let mut sccs_topo = Vec::new();
for scc_nodes in sccs.iter() {
let mut scc_exprs = Vec::new();
let mut is_recursive = scc_nodes.len() > 1;
for &expr in scc_nodes {
scc_exprs.push(expr);
if !is_recursive && tracker.graph.contains_edge(expr, expr) {
is_recursive = true;
}
}
sccs_topo.push((scc_exprs, is_recursive));
}
Ok(SccInfo { sccs: sccs_topo })
}
}

View File

@@ -1,315 +0,0 @@
use hashbrown::HashMap;
use hashbrown::HashSet;
use petgraph::Directed;
use petgraph::Graph;
use petgraph::graph::NodeIndex;
use crate::codegen::CodegenContext;
use crate::error::{Error, Result};
use crate::ir::{ArgId, Downgrade, DowngradeContext, ExprId, Ir, SymId, ToIr};
use super::{Ctx, SccInfo};
struct DependencyTracker {
expr_to_node: HashMap<ExprId, NodeIndex>,
graph: Graph<ExprId, (), Directed>,
current_binding: Option<ExprId>,
let_scope_exprs: HashSet<ExprId>,
// The outer binding that owns this tracker (for nested let scopes in function params)
owner_binding: Option<ExprId>,
}
enum Scope<'ctx> {
Global(&'ctx HashMap<SymId, ExprId>),
Let(HashMap<SymId, ExprId>),
Param(SymId, ExprId),
With(ExprId),
}
struct ScopeGuard<'a, 'ctx> {
ctx: &'a mut DowngradeCtx<'ctx>,
}
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> {
fn drop(&mut self) {
self.ctx.scopes.pop();
}
}
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> {
self.ctx
}
}
pub struct DowngradeCtx<'ctx> {
ctx: &'ctx mut Ctx,
irs: Vec<Option<Ir>>,
scopes: Vec<Scope<'ctx>>,
arg_id: usize,
dep_tracker_stack: Vec<DependencyTracker>,
}
impl<'ctx> DowngradeCtx<'ctx> {
pub fn new(ctx: &'ctx mut Ctx, global: &'ctx HashMap<SymId, ExprId>) -> Self {
Self {
scopes: vec![Scope::Global(global)],
irs: vec![],
arg_id: 0,
dep_tracker_stack: Vec::new(),
ctx,
}
}
}
impl DowngradeContext for DowngradeCtx<'_> {
fn new_expr(&mut self, expr: Ir) -> ExprId {
self.irs.push(Some(expr));
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_arg(&mut self) -> ExprId {
self.irs.push(Some(Ir::Arg(ArgId(self.arg_id))));
self.arg_id += 1;
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn new_sym(&mut self, sym: String) -> SymId {
self.ctx.symbols.get_or_intern(sym)
}
fn get_sym(&self, id: SymId) -> &str {
self.ctx.get_sym(id)
}
fn lookup(&mut self, sym: SymId) -> Result<ExprId> {
for scope in self.scopes.iter().rev() {
match scope {
&Scope::Global(global_scope) => {
if let Some(&expr) = global_scope.get(&sym) {
return Ok(expr);
}
}
Scope::Let(let_scope) => {
if let Some(&expr) = let_scope.get(&sym) {
// Find which tracker contains this expression
let expr_tracker_idx = self
.dep_tracker_stack
.iter()
.position(|t| t.let_scope_exprs.contains(&expr));
// Find the innermost tracker with a current_binding
let current_tracker_idx = self
.dep_tracker_stack
.iter()
.rposition(|t| t.current_binding.is_some());
// Record dependency if both exist
if let (Some(expr_idx), Some(curr_idx)) =
(expr_tracker_idx, current_tracker_idx)
{
let current_binding =
self.dep_tracker_stack[curr_idx].current_binding.unwrap();
let owner_binding = self.dep_tracker_stack[curr_idx].owner_binding;
// If referencing from inner scope to outer scope
if curr_idx >= expr_idx {
let tracker = &mut self.dep_tracker_stack[expr_idx];
if let (Some(&from_node), Some(&to_node)) = (
tracker.expr_to_node.get(&current_binding),
tracker.expr_to_node.get(&expr),
) {
// Same-level reference: record directly
tracker.graph.add_edge(from_node, to_node, ());
} else if curr_idx > expr_idx {
// Cross-scope reference: use owner_binding if available
if let Some(owner) = owner_binding
&& let (Some(&from_node), Some(&to_node)) = (
tracker.expr_to_node.get(&owner),
tracker.expr_to_node.get(&expr),
)
{
tracker.graph.add_edge(from_node, to_node, ());
}
}
}
}
return Ok(self.new_expr(Ir::ExprRef(expr)));
}
}
&Scope::Param(param_sym, expr) => {
if param_sym == sym {
return Ok(expr);
}
}
&Scope::With(_) => (),
}
}
let namespaces: Vec<ExprId> = self
.scopes
.iter()
.filter_map(|scope| {
if let &Scope::With(namespace) = scope {
Some(namespace)
} else {
None
}
})
.collect();
let mut result = None;
for namespace in namespaces {
use crate::ir::{Attr, Select};
let select = Select {
expr: namespace,
attrpath: vec![Attr::Str(sym)],
default: result, // Link to outer With or None
};
result = Some(self.new_expr(select.to_ir()));
}
result.ok_or_else(|| Error::downgrade_error(format!("'{}' not found", self.get_sym(sym))))
}
fn extract_expr(&mut self, id: ExprId) -> Ir {
let local_id = id.0 - self.ctx.irs.len();
self.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.take()
.expect("extract_expr called on an already extracted expr")
}
fn replace_expr(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len();
let _ = self
.irs
.get_mut(local_id)
.expect("ExprId out of bounds")
.insert(expr);
}
#[allow(refining_impl_trait)]
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
let start = self.ctx.irs.len() + self.irs.len();
self.irs.extend(std::iter::repeat_with(|| None).take(slots));
(start..start + slots).map(ExprId)
}
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
let root = root.downgrade(&mut self)?;
self.ctx
.irs
.extend(self.irs.into_iter().map(Option::unwrap));
Ok(root)
}
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Let(bindings));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::Param(param, arg));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.scopes.push(Scope::With(namespace));
let mut guard = ScopeGuard { ctx: self };
f(guard.as_ctx())
}
fn push_dep_tracker(&mut self, slots: &[ExprId]) {
let mut graph = Graph::new();
let mut expr_to_node = HashMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
let node = graph.add_node(expr);
expr_to_node.insert(expr, node);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
expr_to_node,
graph,
current_binding: None,
let_scope_exprs,
owner_binding: None,
});
}
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId) {
let mut graph = Graph::new();
let mut expr_to_node = HashMap::new();
let mut let_scope_exprs = HashSet::new();
for &expr in slots.iter() {
let node = graph.add_node(expr);
expr_to_node.insert(expr, node);
let_scope_exprs.insert(expr);
}
self.dep_tracker_stack.push(DependencyTracker {
expr_to_node,
graph,
current_binding: None,
let_scope_exprs,
owner_binding: Some(owner),
});
}
fn get_current_binding(&self) -> Option<ExprId> {
self.dep_tracker_stack
.last()
.and_then(|t| t.current_binding)
}
fn set_current_binding(&mut self, expr: Option<ExprId>) {
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
tracker.current_binding = expr;
}
}
fn pop_dep_tracker(&mut self) -> Result<SccInfo> {
let tracker = self
.dep_tracker_stack
.pop()
.expect("pop_dep_tracker without active tracker");
use petgraph::algo::kosaraju_scc;
let sccs = kosaraju_scc(&tracker.graph);
let mut sccs_topo = Vec::new();
for scc_nodes in sccs.iter() {
let mut scc_exprs = Vec::new();
let mut is_recursive = scc_nodes.len() > 1;
for &node_idx in scc_nodes {
let expr = tracker.graph[node_idx];
scc_exprs.push(expr);
if !is_recursive && tracker.graph.contains_edge(node_idx, node_idx) {
is_recursive = true;
}
}
sccs_topo.push((scc_exprs, is_recursive));
}
Ok(SccInfo { sccs: sccs_topo })
}
}

View File

@@ -1,41 +0,0 @@
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use crate::error::{Error, Result};
pub trait PathStackProvider {
fn path_stack(&mut self) -> &mut Vec<PathBuf>;
}
pub struct PathDropGuard<'ctx, Ctx: PathStackProvider> {
ctx: &'ctx mut Ctx,
}
impl<'ctx, Ctx: PathStackProvider> PathDropGuard<'ctx, Ctx> {
pub fn new(path: PathBuf, ctx: &'ctx mut Ctx) -> Self {
ctx.path_stack().push(path);
Self { ctx }
}
pub fn new_cwd(ctx: &'ctx mut Ctx) -> Result<Self> {
let cwd = std::env::current_dir()
.map_err(|err| Error::downgrade_error(format!("cannot get cwd: {err}")))?;
let virtual_file = cwd.join("__eval__.nix");
ctx.path_stack().push(virtual_file);
Ok(Self { ctx })
}
pub fn as_ctx(&mut self) -> &mut Ctx {
self.ctx
}
}
impl<Ctx: PathStackProvider> Deref for PathDropGuard<'_, Ctx> {
type Target = Ctx;
fn deref(&self) -> &Self::Target {
self.ctx
}
}
impl<Ctx: PathStackProvider> DerefMut for PathDropGuard<'_, Ctx> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.ctx
}
}

View File

@@ -1,17 +1,21 @@
use std::sync::Arc;
use deno_core::{OpState, op2};
use serde::Serialize;
use tracing::debug;
mod archive;
mod cache;
pub(crate) mod cache;
mod download;
mod git;
mod hg;
mod nar;
pub use cache::FetcherCache;
pub use download::Downloader;
use deno_core::op2;
use serde::Serialize;
use crate::runtime::NixError;
use crate::store::StoreBackend;
use crate::nar;
#[derive(Serialize)]
pub struct FetchUrlResult {
@@ -55,8 +59,7 @@ pub fn op_fetch_url(
#[string] name: Option<String>,
executable: bool,
) -> Result<FetchUrlResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchurl: {}", url);
debug!("fetchurl: {}", url);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new();
@@ -101,13 +104,17 @@ pub fn op_fetch_url(
#[op2]
#[serde]
pub fn op_fetch_tarball(
state: &mut OpState,
#[string] url: String,
#[string] expected_hash: Option<String>,
#[string] expected_nar_hash: Option<String>,
#[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}", url, expected_hash, expected_nar_hash);
debug!(
"fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}",
url, expected_hash, expected_nar_hash
);
let store = state.borrow::<Arc<StoreBackend>>();
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new();
@@ -116,37 +123,37 @@ pub fn op_fetch_tarball(
// Try cache lookup with narHash if provided
if let Some(ref nar_hash) = expected_nar_hash {
let normalized = normalize_hash(nar_hash);
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: normalized nar_hash={}", normalized);
debug!("fetchTarball: normalized nar_hash={}", normalized);
if let Some(cached) = cache.get_extracted_tarball(&url, &normalized) {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache HIT (with expected nar_hash)");
debug!("fetchTarball: cache HIT (with expected nar_hash)");
// Need to compute tarball hash if not cached
let tarball_hash = expected_hash.as_ref()
let tarball_hash = expected_hash
.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();
return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(),
hash: tarball_hash,
nar_hash: normalized,
});
}
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
debug!("fetchTarball: cache MISS, downloading...");
} else if let Some((cached, cached_nar_hash)) = cache.get_extracted_tarball_by_url(&url) {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache HIT (by URL, nar_hash={})", cached_nar_hash);
let tarball_hash = expected_hash.as_ref()
debug!(
"fetchTarball: cache HIT (by URL, nar_hash={})",
cached_nar_hash
);
let tarball_hash = expected_hash
.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();
return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(),
hash: tarball_hash,
nar_hash: cached_nar_hash,
});
}
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
debug!("fetchTarball: cache MISS, downloading...");
let data = downloader
.download(&url)
@@ -174,8 +181,10 @@ pub fn op_fetch_tarball(
let nar_hash =
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: computed tarball_hash={}, nar_hash={}", tarball_hash, nar_hash);
debug!(
"fetchTarball: computed tarball_hash={}, nar_hash={}",
tarball_hash, nar_hash
);
// Verify NAR hash if provided
if let Some(ref expected) = expected_nar_hash {
@@ -189,12 +198,13 @@ pub fn op_fetch_tarball(
}
}
let store_path = cache
.put_tarball_from_extracted(&url, &nar_hash, &extracted_path, &dir_name)
.map_err(|e| NixError::from(e.to_string()))?;
// let store_path = cache
// .put_tarball_from_extracted(&url, &nar_hash, &extracted_path, &dir_name)
// .map_err(|e| NixError::from(e.to_string()))?;
let store_path = store.as_store().put_directory(&dir_name, &extracted_path).unwrap();
Ok(FetchTarballResult {
store_path: store_path.to_string_lossy().to_string(),
store_path,
hash: tarball_hash,
nar_hash,
})
@@ -211,8 +221,7 @@ pub fn op_fetch_git(
all_refs: bool,
#[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev);
debug!("fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string());

View File

@@ -186,7 +186,6 @@ fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> {
pub enum ArchiveError {
IoError(std::io::Error),
ZipError(zip::result::ZipError),
UnsupportedFormat(String),
}
impl std::fmt::Display for ArchiveError {
@@ -194,9 +193,6 @@ impl std::fmt::Display for ArchiveError {
match self {
ArchiveError::IoError(e) => write!(f, "I/O error: {}", e),
ArchiveError::ZipError(e) => write!(f, "ZIP error: {}", e),
ArchiveError::UnsupportedFormat(fmt) => {
write!(f, "Unsupported archive format: {}", fmt)
}
}
}
}

View File

@@ -1,8 +1,9 @@
use std::fs::{self, File};
use std::io::Write;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use tracing::debug;
use super::archive::ArchiveError;
@@ -168,37 +169,36 @@ impl FetcherCache {
let meta_path = cache_dir.join(&key).join(".meta");
let data_dir = cache_dir.join(&key);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: url={}, expected_hash={}", url, expected_hash);
debug!("get_tarball: url={}, expected_hash={}", url, expected_hash);
if !meta_path.exists() || !data_dir.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: cache miss - meta or data dir not found");
debug!("get_tarball: cache miss - meta or data dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: cached hash={}, name={}", meta.hash, meta.name);
debug!("get_tarball: cached hash={}, name={}", meta.hash, meta.name);
if meta.hash == expected_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: hash match, checking store_path={}", store_path.display());
debug!(
"get_tarball: hash match, checking store_path={}",
store_path.display()
);
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: HIT - returning store path");
debug!("get_tarball: HIT - returning store path");
Some(store_path)
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: store path doesn't exist");
debug!("get_tarball: store path doesn't exist");
None
}
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: hash mismatch (cached={}, expected={})", meta.hash, expected_hash);
debug!(
"get_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_hash
);
None
}
}
@@ -241,37 +241,42 @@ impl FetcherCache {
let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: url={}, expected_nar_hash={}", url, expected_nar_hash);
debug!(
"get_extracted_tarball: url={}, expected_nar_hash={}",
url, expected_nar_hash
);
if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: cache miss - meta or content dir not found");
debug!("get_extracted_tarball: cache miss - meta or content dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: cached hash={}, name={}", meta.hash, meta.name);
debug!(
"get_extracted_tarball: cached hash={}, name={}",
meta.hash, meta.name
);
if meta.hash == expected_nar_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: hash match, checking store_path={}", store_path.display());
debug!(
"get_extracted_tarball: hash match, checking store_path={}",
store_path.display()
);
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: HIT - returning store path");
debug!("get_extracted_tarball: HIT - returning store path");
Some(store_path)
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: store path doesn't exist");
debug!("get_extracted_tarball: store path doesn't exist");
None
}
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: hash mismatch (cached={}, expected={})", meta.hash, expected_nar_hash);
debug!(
"get_extracted_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_nar_hash
);
None
}
}
@@ -283,29 +288,27 @@ impl FetcherCache {
let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: url={}", url);
debug!("get_extracted_tarball_by_url: url={}", url);
if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: cache miss - meta or content dir not found");
debug!("get_extracted_tarball_by_url: cache miss - meta or content dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: cached hash={}, name={}", meta.hash, meta.name);
debug!(
"get_extracted_tarball_by_url: cached hash={}, name={}",
meta.hash, meta.name
);
let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: HIT - returning store path and hash");
debug!("get_extracted_tarball_by_url: HIT - returning store path and hash");
Some((store_path, meta.hash))
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: store path doesn't exist");
debug!("get_extracted_tarball_by_url: store path doesn't exist");
None
}
}
@@ -314,15 +317,17 @@ impl FetcherCache {
&self,
url: &str,
hash: &str,
extracted_path: &PathBuf,
extracted_path: &Path,
name: &str,
) -> Result<PathBuf, CacheError> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key);
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: url={}, hash={}, name={}", url, hash, name);
debug!(
"put_tarball_from_extracted: url={}, hash={}, name={}",
url, hash, name
);
fs::create_dir_all(&cache_entry_dir)?;
@@ -339,16 +344,16 @@ impl FetcherCache {
fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?;
let store_path = self.make_store_path(hash, name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: store_path={}", store_path.display());
debug!(
"put_tarball_from_extracted: store_path={}",
store_path.display()
);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
copy_dir_recursive(extracted_path, &store_path)?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: copied to store");
debug!("put_tarball_from_extracted: copied to store");
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: store path already exists");
debug!("put_tarball_from_extracted: store path already exists");
}
Ok(store_path)
@@ -372,7 +377,7 @@ impl FetcherCache {
}
}
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> Result<(), std::io::Error> {
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> {
fs::create_dir_all(dst)?;
for entry in fs::read_dir(src)? {

View File

@@ -5,6 +5,7 @@ use std::process::Command;
use super::FetchGitResult;
use super::cache::FetcherCache;
#[allow(clippy::too_many_arguments)]
pub fn fetch_git(
cache: &FetcherCache,
url: &str,
@@ -166,8 +167,8 @@ fn checkout_rev(
.output()?;
if !output.status.success() {
eprintln!(
"Warning: failed to initialize submodules: {}",
tracing::warn!(
"failed to initialize submodules: {}",
String::from_utf8_lossy(&output.stderr)
);
}

View File

@@ -1,127 +0,0 @@
use sha2::{Digest, Sha256};
use std::fs;
use std::io::{self, Write};
use std::path::Path;
pub fn compute_nar_hash(path: &Path) -> Result<String, io::Error> {
let mut hasher = Sha256::new();
dump_path(&mut hasher, path)?;
Ok(hex::encode(hasher.finalize()))
}
fn dump_path<W: Write>(sink: &mut W, path: &Path) -> io::Result<()> {
write_string(sink, "nix-archive-1")?;
write_string(sink, "(")?;
dump_entry(sink, path)?;
write_string(sink, ")")?;
Ok(())
}
fn dump_entry<W: Write>(sink: &mut W, path: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(path)?;
if metadata.is_symlink() {
let target = fs::read_link(path)?;
write_string(sink, "type")?;
write_string(sink, "symlink")?;
write_string(sink, "target")?;
write_string(sink, &target.to_string_lossy())?;
} else if metadata.is_file() {
write_string(sink, "type")?;
write_string(sink, "regular")?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if metadata.permissions().mode() & 0o111 != 0 {
write_string(sink, "executable")?;
write_string(sink, "")?;
}
}
let contents = fs::read(path)?;
write_string(sink, "contents")?;
write_contents(sink, &contents)?;
} else if metadata.is_dir() {
write_string(sink, "type")?;
write_string(sink, "directory")?;
let mut entries: Vec<_> = fs::read_dir(path)?
.filter_map(|e| e.ok())
.map(|e| e.file_name().to_string_lossy().to_string())
.collect();
entries.sort();
for name in entries {
write_string(sink, "entry")?;
write_string(sink, "(")?;
write_string(sink, "name")?;
write_string(sink, &name)?;
write_string(sink, "node")?;
write_string(sink, "(")?;
dump_entry(sink, &path.join(&name))?;
write_string(sink, ")")?;
write_string(sink, ")")?;
}
}
Ok(())
}
fn write_string<W: Write>(sink: &mut W, s: &str) -> io::Result<()> {
let bytes = s.as_bytes();
let len = bytes.len() as u64;
sink.write_all(&len.to_le_bytes())?;
sink.write_all(bytes)?;
let padding = (8 - (len % 8)) % 8;
for _ in 0..padding {
sink.write_all(&[0])?;
}
Ok(())
}
fn write_contents<W: Write>(sink: &mut W, contents: &[u8]) -> io::Result<()> {
let len = contents.len() as u64;
sink.write_all(&len.to_le_bytes())?;
sink.write_all(contents)?;
let padding = (8 - (len % 8)) % 8;
for _ in 0..padding {
sink.write_all(&[0])?;
}
Ok(())
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_simple_file() {
let temp = TempDir::new().unwrap();
let file_path = temp.path().join("test.txt");
fs::write(&file_path, "hello").unwrap();
let hash = compute_nar_hash(&file_path).unwrap();
assert!(!hash.is_empty());
assert_eq!(hash.len(), 64);
}
#[test]
fn test_directory() {
let temp = TempDir::new().unwrap();
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
fs::write(temp.path().join("b.txt"), "bbb").unwrap();
let hash = compute_nar_hash(temp.path()).unwrap();
assert!(!hash.is_empty());
assert_eq!(hash.len(), 64);
}
}

View File

@@ -353,6 +353,7 @@ pub struct Assert {
pub assertion: ExprId,
/// The expression to return if the assertion is true.
pub expr: ExprId,
pub assertion_raw: String,
}
/// Represents the concatenation of multiple string expressions.

View File

@@ -3,8 +3,8 @@
use rnix::ast::{self, Expr, HasEntry};
use crate::error::{Error, Result};
use super::*;
use crate::error::{Error, Result};
pub trait Downgrade<Ctx: DowngradeContext> {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
@@ -40,9 +40,18 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let assertion = self.condition().unwrap().downgrade(ctx)?;
let assertion = self.condition().unwrap();
let assertion_raw = assertion.to_string();
let assertion = assertion.downgrade(ctx)?;
let expr = self.body().unwrap().downgrade(ctx)?;
Ok(ctx.new_expr(Assert { assertion, expr }.to_ir()))
Ok(ctx.new_expr(
Assert {
assertion,
expr,
assertion_raw,
}
.to_ir(),
))
}
}

View File

@@ -1,13 +1,16 @@
#![warn(clippy::unwrap_used)]
mod codegen;
pub mod context;
pub mod error;
pub mod value;
mod codegen;
mod fetcher;
mod ir;
mod nix_hash;
mod nar;
mod runtime;
pub mod value;
mod store;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

63
nix-js/src/nar.rs Normal file
View File

@@ -0,0 +1,63 @@
use nix_nar::Encoder;
use sha2::{Digest, Sha256};
use std::io::Read;
use std::path::Path;
use crate::error::{Error, Result};
pub fn compute_nar_hash(path: &Path) -> Result<String> {
let mut hasher = Sha256::new();
std::io::copy(
&mut Encoder::new(path).map_err(|err| Error::internal(err.to_string()))?,
&mut hasher,
)
.map_err(|err| Error::internal(err.to_string()))?;
Ok(hex::encode(hasher.finalize()))
}
pub fn pack_nar(path: &Path) -> Result<Vec<u8>> {
let mut buffer = Vec::new();
Encoder::new(path)
.map_err(|err| Error::internal(err.to_string()))?
.read_to_end(&mut buffer)
.map_err(|err| Error::internal(err.to_string()))?;
Ok(buffer)
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
#[test]
fn test_simple_file() {
let temp = TempDir::new().unwrap();
let file_path = temp.path().join("test.txt");
fs::write(&file_path, "hello").unwrap();
let hash = compute_nar_hash(&file_path).unwrap();
assert_eq!(
hash,
"0a430879c266f8b57f4092a0f935cf3facd48bbccde5760d4748ca405171e969"
);
assert!(!hash.is_empty());
assert_eq!(hash.len(), 64);
}
#[test]
fn test_directory() {
let temp = TempDir::new().unwrap();
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
fs::write(temp.path().join("b.txt"), "bbb").unwrap();
let hash = compute_nar_hash(temp.path()).unwrap();
assert_eq!(
hash,
"0036c14209749bc9b9631e2077b108b701c322ab53853cd26f2746268a86fc0f"
);
assert!(!hash.is_empty());
assert_eq!(hash.len(), 64);
}
}

View File

@@ -1,7 +1,6 @@
use sha2::{Digest, Sha256};
const NIX_BASE32_CHARS: &[u8; 32] = b"0123456789abcdfghijklmnpqrsvwxyz";
const STORE_DIR: &str = "/nix/store";
pub fn sha256_hex(data: &str) -> String {
let mut hasher = Sha256::new();
@@ -42,8 +41,8 @@ pub fn nix_base32_encode(bytes: &[u8]) -> String {
result
}
pub fn make_store_path(ty: &str, hash_hex: &str, name: &str) -> String {
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, STORE_DIR, name);
pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String {
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name);
let mut hasher = Sha256::new();
hasher.update(s.as_bytes());
@@ -52,7 +51,7 @@ pub fn make_store_path(ty: &str, hash_hex: &str, name: &str) -> String {
let compressed = compress_hash(&hash, 20);
let encoded = nix_base32_encode(&compressed);
format!("{}/{}-{}", STORE_DIR, encoded, name)
format!("{}/{}-{}", store_dir, encoded, name)
}
pub fn output_path_name(drv_name: &str, output_name: &str) -> String {
@@ -111,7 +110,7 @@ mod tests {
#[test]
fn test_make_store_path() {
let path = make_store_path("output:out", "abc123", "hello");
let path = make_store_path("/nix/store", "output:out", "abc123", "hello");
assert!(path.starts_with("/nix/store/"));
assert!(path.ends_with("-hello"));

View File

@@ -1,7 +1,6 @@
use std::borrow::Cow;
use std::marker::PhantomData;
use std::ops::DerefMut;
use std::path::{Component, PathBuf};
use std::path::{Component, Path, PathBuf};
use std::sync::Once;
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
@@ -14,25 +13,27 @@ type ScopeRef<'p, 's> = v8::PinnedRef<'p, v8::HandleScope<'s>>;
type LocalValue<'a> = v8::Local<'a, v8::Value>;
type LocalSymbol<'a> = v8::Local<'a, v8::Symbol>;
pub(crate) trait RuntimeCtx: 'static {
fn get_current_dir(&self) -> PathBuf;
fn push_path_stack(&mut self, path: PathBuf) -> impl DerefMut<Target = Self>;
pub(crate) trait RuntimeContext: 'static {
fn get_current_dir(&self) -> &Path;
fn set_current_file(&mut self, path: PathBuf);
fn compile_code(&mut self, code: &str) -> Result<String>;
}
fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
const ESM: &[ExtensionFileSource] =
&deno_core::include_js_files!(nix_runtime dir "runtime-ts/dist", "runtime.js");
let mut ops = vec![
op_import::<Ctx>(),
op_read_file(),
op_path_exists(),
op_resolve_path::<Ctx>(),
op_resolve_path(),
op_sha256_hex(),
op_make_store_path(),
op_output_path_name(),
op_make_fixed_output_path(),
op_add_path(),
op_store_path(),
op_to_file(),
];
ops.extend(crate::fetcher::register_ops());
@@ -75,7 +76,7 @@ pub(crate) use private::NixError;
#[deno_core::op2]
#[string]
fn op_import<Ctx: RuntimeCtx>(
fn op_import<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
@@ -93,10 +94,8 @@ fn op_import<Ctx: RuntimeCtx>(
let content = std::fs::read_to_string(&absolute_path)
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
#[cfg(debug_assertions)]
eprintln!("[DEBUG] compiling file: {}", absolute_path.display());
let mut guard = ctx.push_path_stack(absolute_path);
let ctx = guard.deref_mut();
tracing::debug!("compiling file: {}", absolute_path.display());
ctx.set_current_file(absolute_path);
Ok(ctx.compile_code(&content).map_err(|err| err.to_string())?)
}
@@ -114,19 +113,17 @@ fn op_path_exists(#[string] path: String) -> bool {
#[deno_core::op2]
#[string]
fn op_resolve_path<Ctx: RuntimeCtx>(
state: &mut OpState,
fn op_resolve_path(
#[string] current_dir: String,
#[string] path: String,
) -> std::result::Result<String, NixError> {
let ctx = state.borrow::<Ctx>();
// If already absolute, return as-is
if path.starts_with('/') {
return Ok(path);
}
// Resolve relative path against current file directory (or CWD)
let current_dir = ctx.get_current_dir().join(&path);
let current_dir = PathBuf::from(current_dir).join(&path);
let mut normalized = PathBuf::new();
for component in current_dir.components() {
match component {
@@ -151,11 +148,17 @@ fn op_sha256_hex(#[string] data: String) -> String {
#[deno_core::op2]
#[string]
fn op_make_store_path(
state: &mut OpState,
#[string] ty: String,
#[string] hash_hex: String,
#[string] name: String,
) -> String {
crate::nix_hash::make_store_path(&ty, &hash_hex, &name)
use crate::store::StoreBackend;
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
}
#[deno_core::op2]
@@ -167,15 +170,21 @@ fn op_output_path_name(#[string] drv_name: String, #[string] output_name: String
#[deno_core::op2]
#[string]
fn op_make_fixed_output_path(
state: &mut OpState,
#[string] hash_algo: String,
#[string] hash: String,
#[string] hash_mode: String,
#[string] name: String,
) -> String {
use crate::store::StoreBackend;
use sha2::{Digest, Sha256};
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_hash::make_store_path("source", &hash, &name)
crate::nix_hash::make_store_path(store_dir, "source", &hash, &name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
@@ -183,21 +192,24 @@ fn op_make_fixed_output_path(
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_hash::make_store_path("output:out", &inner_hash, &name)
crate::nix_hash::make_store_path(store_dir, "output:out", &inner_hash, &name)
}
}
#[deno_core::op2]
#[string]
fn op_add_path(
state: &mut OpState,
#[string] path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use sha2::{Digest, Sha256};
use std::fs;
use std::path::Path;
use std::sync::Arc;
let path_obj = Path::new(&path);
@@ -229,16 +241,18 @@ fn op_add_path(
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
if computed_hash != expected_hash {
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
path, expected_hash, computed_hash
)));
}
if let Some(expected_hash) = sha256
&& computed_hash != expected_hash
{
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
path, expected_hash, computed_hash
)));
}
let store_path = crate::nix_hash::make_store_path("source", &computed_hash, &computed_name);
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
let store_path = crate::nix_hash::make_store_path(store_dir, "source", &computed_hash, &computed_name);
Ok(store_path)
}
@@ -248,8 +262,8 @@ fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixEr
use std::fs;
if path.is_file() {
let contents = fs::read(path)
.map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let contents =
fs::read(path).map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
Ok(hex::encode(hasher.finalize()))
@@ -278,8 +292,49 @@ fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixEr
}
}
#[deno_core::op2]
#[string]
fn op_store_path(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use crate::store::{validate_store_path, StoreBackend};
use std::sync::Arc;
pub(crate) struct Runtime<Ctx: RuntimeCtx> {
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
validate_store_path(store_dir, &path).map_err(|e| NixError::from(e.to_string()))?;
store
.as_store()
.ensure_path(&path)
.map_err(|e| NixError::from(e.to_string()))?;
Ok(path)
}
#[deno_core::op2]
#[string]
fn op_to_file(
state: &mut OpState,
#[string] name: String,
#[string] contents: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_path = store
.as_store()
.add_text_to_store(&name, &contents, references)
.map_err(|e| NixError::from(format!("builtins.toFile failed: {}", e)))?;
Ok(store_path)
}
pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime,
is_thunk_symbol: v8::Global<v8::Symbol>,
primop_metadata_symbol: v8::Global<v8::Symbol>,
@@ -288,7 +343,7 @@ pub(crate) struct Runtime<Ctx: RuntimeCtx> {
_marker: PhantomData<Ctx>,
}
impl<Ctx: RuntimeCtx> Runtime<Ctx> {
impl<Ctx: RuntimeContext> Runtime<Ctx> {
pub(crate) fn new() -> Result<Self> {
// Initialize V8 once
static INIT: Once = Once::new();
@@ -319,6 +374,10 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
})
}
pub(crate) fn op_state(&mut self) -> std::rc::Rc<std::cell::RefCell<OpState>> {
self.js_runtime.op_state()
}
pub(crate) fn eval(&mut self, script: String, ctx: Ctx) -> Result<Value> {
self.js_runtime.op_state().borrow_mut().put(ctx);
@@ -345,7 +404,8 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
))
}
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT)
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT, IS_PATH)
#[allow(clippy::type_complexity)]
fn get_symbols(
scope: &ScopeRef,
) -> Result<(
@@ -365,55 +425,24 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
Error::internal("failed to convert global Nix Value to object".into())
})?;
let is_thunk_sym_key = v8::String::new(scope, "IS_THUNK")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let is_thunk_sym = nix_obj
.get(scope, is_thunk_sym_key.into())
.ok_or_else(|| Error::internal("failed to get IS_THUNK Symbol".into()))?;
let is_thunk = is_thunk_sym.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert IS_THUNK Value to Symbol ({err})"
))
})?;
let is_thunk = v8::Global::new(scope, is_thunk);
let primop_metadata_sym_key = v8::String::new(scope, "PRIMOP_METADATA")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let primop_metadata_sym = nix_obj
.get(scope, primop_metadata_sym_key.into())
.ok_or_else(|| Error::internal("failed to get PRIMOP_METADATA Symbol".into()))?;
let primop_metadata = primop_metadata_sym
.try_cast::<v8::Symbol>()
.map_err(|err| {
let get_symbol = |symbol| {
let key = v8::String::new(scope, symbol)
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let val = nix_obj
.get(scope, key.into())
.ok_or_else(|| Error::internal(format!("failed to get {symbol} Symbol")))?;
let sym = val.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert PRIMOP_METADATA Value to Symbol ({err})"
"failed to convert {symbol} Value to Symbol ({err})"
))
})?;
let primop_metadata = v8::Global::new(scope, primop_metadata);
Ok(v8::Global::new(scope, sym))
};
let has_context_sym_key = v8::String::new(scope, "HAS_CONTEXT")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let has_context_sym = nix_obj
.get(scope, has_context_sym_key.into())
.ok_or_else(|| Error::internal("failed to get HAS_CONTEXT Symbol".into()))?;
let has_context = has_context_sym.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert HAS_CONTEXT Value to Symbol ({err})"
))
})?;
let has_context = v8::Global::new(scope, has_context);
let is_path_sym_key = v8::String::new(scope, "IS_PATH")
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
let is_path_sym = nix_obj
.get(scope, is_path_sym_key.into())
.ok_or_else(|| Error::internal("failed to get IS_PATH Symbol".into()))?;
let is_path = is_path_sym.try_cast::<v8::Symbol>().map_err(|err| {
Error::internal(format!(
"failed to convert IS_PATH Value to Symbol ({err})"
))
})?;
let is_path = v8::Global::new(scope, is_path);
let is_thunk = get_symbol("IS_THUNK")?;
let primop_metadata = get_symbol("PRIMOP_METADATA")?;
let has_context = get_symbol("HAS_CONTEXT")?;
let is_path = get_symbol("IS_PATH")?;
Ok((is_thunk, primop_metadata, has_context, is_path))
}
@@ -616,14 +645,9 @@ mod test {
#[test]
fn to_value_working() {
let mut ctx = Context::new().unwrap();
const EXPR: &str = "({ test: [1., 9223372036854775807n, true, false, 'hello world!'] })";
assert_eq!(
ctx.eval_js(
"({
test: [1., 9223372036854775807n, true, false, 'hello world!']
})"
.into(),
)
.unwrap(),
ctx.eval_js(EXPR.into()).unwrap(),
Value::AttrSet(AttrSet::new(std::collections::BTreeMap::from([(
Symbol::from("test"),
Value::List(List::new(vec![

110
nix-js/src/store.rs Normal file
View File

@@ -0,0 +1,110 @@
mod config;
mod error;
mod validation;
use std::path::Path;
pub use config::{StoreConfig, StoreMode};
pub use validation::validate_store_path;
use crate::error::Result;
pub trait Store: Send + Sync {
fn get_store_dir(&self) -> &str;
fn is_valid_path(&self, path: &str) -> Result<bool>;
fn ensure_path(&self, path: &str) -> Result<()>;
fn add_to_store(
&self,
name: &str,
content: &[u8],
recursive: bool,
references: Vec<String>,
) -> Result<String>;
fn add_text_to_store(
&self,
name: &str,
content: &str,
references: Vec<String>,
) -> Result<String>;
fn put_directory(&self, name: &str, path: &Path) -> Result<String>;
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String>;
}
pub enum StoreBackend {
Simulated(SimulatedStore),
#[cfg(feature = "daemon")]
Daemon(DaemonStore),
}
impl StoreBackend {
pub fn new(config: StoreConfig) -> Result<Self> {
match config.mode {
#[cfg(feature = "daemon")]
StoreMode::Daemon => {
let daemon = DaemonStore::connect(&config.daemon_socket)?;
Ok(StoreBackend::Daemon(daemon))
}
#[cfg(not(feature = "daemon"))]
StoreMode::Daemon => {
tracing::warn!("Daemon mode not available (nix-js not compiled with 'daemon' feature), falling back to simulated store");
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
StoreMode::Simulated => {
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
#[cfg(feature = "daemon")]
StoreMode::Auto => match DaemonStore::connect(&config.daemon_socket) {
Ok(daemon) => {
tracing::debug!(
"Using nix-daemon at {}",
config.daemon_socket.display()
);
Ok(StoreBackend::Daemon(daemon))
}
Err(e) => {
tracing::warn!(
"Daemon unavailable ({}), using simulated store",
e
);
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
},
#[cfg(not(feature = "daemon"))]
StoreMode::Auto => {
let simulated = SimulatedStore::new()?;
Ok(StoreBackend::Simulated(simulated))
}
}
}
pub fn as_store(&self) -> &dyn Store {
match self {
StoreBackend::Simulated(s) => s,
#[cfg(feature = "daemon")]
StoreBackend::Daemon(d) => d,
}
}
}
mod simulated;
pub use simulated::SimulatedStore;
#[cfg(feature = "daemon")]
mod daemon;
#[cfg(feature = "daemon")]
pub use daemon::DaemonStore;

View File

@@ -0,0 +1,50 @@
use std::path::PathBuf;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StoreMode {
Daemon,
Simulated,
Auto,
}
#[derive(Debug, Clone)]
pub struct StoreConfig {
pub mode: StoreMode,
pub daemon_socket: PathBuf,
}
impl StoreConfig {
pub fn from_env() -> Self {
let mode = match std::env::var("NIX_JS_STORE_MODE")
.as_deref()
.map(|s| s.to_lowercase())
.as_deref()
{
Ok("daemon") => StoreMode::Daemon,
Ok("simulated") => StoreMode::Simulated,
Ok("auto") | Err(_) => StoreMode::Auto,
Ok(other) => {
tracing::warn!(
"Invalid NIX_JS_STORE_MODE '{}', using 'auto'",
other
);
StoreMode::Auto
}
};
let daemon_socket = std::env::var("NIX_DAEMON_SOCKET")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("/nix/var/nix/daemon-socket/socket"));
Self {
mode,
daemon_socket,
}
}
}
impl Default for StoreConfig {
fn default() -> Self {
Self::from_env()
}
}

166
nix-js/src/store/daemon.rs Normal file
View File

@@ -0,0 +1,166 @@
use std::path::Path;
use std::sync::Arc;
use nix_daemon::{Progress as _, Store as _, nix};
use tokio::net::UnixStream;
use tokio::sync::Mutex;
use crate::error::{Error, Result};
use crate::nar::pack_nar;
use super::Store;
pub struct DaemonStore {
runtime: tokio::runtime::Runtime,
store: Arc<Mutex<nix::DaemonStore<UnixStream>>>,
}
impl DaemonStore {
pub fn connect(socket_path: &Path) -> Result<Self> {
let runtime = tokio::runtime::Runtime::new()
.map_err(|e| Error::internal(format!("Failed to create tokio runtime: {}", e)))?;
let socket_str = socket_path
.to_str()
.ok_or_else(|| Error::internal("Invalid socket path: not UTF-8".to_string()))?;
let store = runtime.block_on(async {
nix_daemon::nix::DaemonStore::builder()
.connect_unix(socket_str)
.await
.map_err(|e| {
Error::internal(format!(
"Failed to connect to nix-daemon at {}: {}",
socket_str, e
))
})
})?;
Ok(Self {
runtime,
store: Arc::new(Mutex::new(store)),
})
}
fn block_on<F>(&self, future: F) -> F::Output
where
F: std::future::Future,
{
self.runtime.block_on(future)
}
}
impl Store for DaemonStore {
fn get_store_dir(&self) -> &str {
"/nix/store"
}
fn is_valid_path(&self, path: &str) -> Result<bool> {
self.block_on(async {
let mut store = self.store.lock().await;
store
.is_valid_path(path)
.result()
.await
.map_err(|e| Error::internal(format!("Daemon error in is_valid_path: {}", e)))
})
}
fn ensure_path(&self, path: &str) -> Result<()> {
self.block_on(async {
let mut store = self.store.lock().await;
store.ensure_path(path).result().await.map_err(|e| {
Error::eval_error(
format!(
"builtins.storePath: path '{}' is not valid in nix store: {}",
path, e
),
None,
)
})
})
}
fn add_to_store(
&self,
name: &str,
content: &[u8],
recursive: bool,
references: Vec<String>,
) -> Result<String> {
let temp_dir = tempfile::tempdir()
.map_err(|e| Error::internal(format!("Failed to create temp dir: {}", e)))?;
let content_path = temp_dir.path().join(name);
std::fs::write(&content_path, content)
.map_err(|e| Error::internal(format!("Failed to write content: {}", e)))?;
let cam_str = if recursive {
"fixed:r:sha256"
} else {
"fixed:sha256"
};
self.block_on(async {
let mut store = self.store.lock().await;
let (store_path, _path_info) = store
.add_to_store(
name,
cam_str,
references,
false,
content_path.as_os_str().as_encoded_bytes(),
)
.result()
.await
.map_err(|e| Error::internal(format!("Daemon error in add_to_store: {}", e)))?;
Ok(store_path)
})
}
fn add_text_to_store(
&self,
name: &str,
content: &str,
references: Vec<String>,
) -> Result<String> {
self.block_on(async {
let mut store = self.store.lock().await;
let (store_path, _) = store
.add_to_store(name, "text:sha256", references, false, content.as_bytes())
.result()
.await
.map_err(|e| {
Error::internal(format!("Daemon error in add_text_to_store: {}", e))
})?;
Ok(store_path)
})
}
fn put_directory(&self, name: &str, path: &Path) -> Result<String> {
self.block_on(async {
let mut store = self.store.lock().await;
let (store_path, _) = store
.add_to_store(name, "text:sha256", std::iter::empty::<&str>(), false, pack_nar(path).unwrap().as_slice())
.result()
.await
.map_err(|e| {
Error::internal(format!("Daemon error in add_text_to_store: {}", e))
})?;
Ok(store_path)
})
}
fn make_fixed_output_path(
&self,
_hash_algo: &str,
hash: &str,
_hash_mode: &str,
name: &str,
) -> Result<String> {
let short_hash = &hash[..32.min(hash.len())];
Ok(format!("/nix/store/{}-{}", short_hash, name))
}
}

32
nix-js/src/store/error.rs Normal file
View File

@@ -0,0 +1,32 @@
use std::fmt;
#[derive(Debug)]
pub enum StoreError {
DaemonConnectionFailed(String),
OperationFailed(String),
InvalidPath(String),
PathNotFound(String),
Io(std::io::Error),
}
impl fmt::Display for StoreError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
StoreError::DaemonConnectionFailed(msg) => {
write!(f, "Failed to connect to nix-daemon: {}", msg)
}
StoreError::OperationFailed(msg) => write!(f, "Store operation failed: {}", msg),
StoreError::InvalidPath(msg) => write!(f, "Invalid store path: {}", msg),
StoreError::PathNotFound(path) => write!(f, "Path not found in store: {}", path),
StoreError::Io(e) => write!(f, "I/O error: {}", e),
}
}
}
impl std::error::Error for StoreError {}
impl From<std::io::Error> for StoreError {
fn from(e: std::io::Error) -> Self {
StoreError::Io(e)
}
}

View File

@@ -0,0 +1,100 @@
use super::Store;
use crate::error::{Error, Result};
use crate::fetcher::cache::FetcherCache;
use std::fs;
use std::path::Path;
pub struct SimulatedStore {
cache: FetcherCache,
store_dir: String,
}
impl SimulatedStore {
pub fn new() -> Result<Self> {
let cache = FetcherCache::new()
.map_err(|e| Error::internal(format!("Failed to create simulated store: {}", e)))?;
let store_dir = dirs::cache_dir()
.unwrap_or_else(|| std::path::PathBuf::from("/tmp"))
.join("nix-js")
.join("fetchers")
.join("store")
.to_string_lossy()
.to_string();
Ok(Self { cache, store_dir })
}
pub fn cache(&self) -> &FetcherCache {
&self.cache
}
}
impl Store for SimulatedStore {
fn get_store_dir(&self) -> &str {
&self.store_dir
}
fn is_valid_path(&self, path: &str) -> Result<bool> {
Ok(Path::new(path).exists())
}
fn ensure_path(&self, path: &str) -> Result<()> {
if !Path::new(path).exists() {
return Err(Error::eval_error(
format!(
"builtins.storePath: path '{}' does not exist in the simulated store",
path
),
None,
));
}
Ok(())
}
fn add_to_store(
&self,
name: &str,
content: &[u8],
_recursive: bool,
_references: Vec<String>,
) -> Result<String> {
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(content));
let store_path = self.cache.make_store_path(&hash, name);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))
.map_err(|e| Error::internal(format!("Failed to create store directory: {}", e)))?;
fs::write(&store_path, content)
.map_err(|e| Error::internal(format!("Failed to write to store: {}", e)))?;
}
Ok(store_path.to_string_lossy().to_string())
}
fn add_text_to_store(
&self,
name: &str,
content: &str,
references: Vec<String>,
) -> Result<String> {
self.add_to_store(name, content.as_bytes(), false, references)
}
fn make_fixed_output_path(
&self,
_hash_algo: &str,
hash: &str,
_hash_mode: &str,
name: &str,
) -> Result<String> {
let store_path = self.cache.make_store_path(hash, name);
Ok(store_path.to_string_lossy().to_string())
}
fn put_directory(&self, name: &str, path: &Path) -> Result<String> {
todo!()
}
}

View File

@@ -0,0 +1,132 @@
use crate::error::{Error, Result};
pub fn validate_store_path(store_dir: &str, path: &str) -> Result<()> {
if !path.starts_with(store_dir) {
return Err(Error::eval_error(
format!("path '{}' is not in the Nix store", path),
None,
));
}
let relative = path
.strip_prefix(store_dir)
.and_then(|s| s.strip_prefix('/'))
.ok_or_else(|| Error::eval_error(format!("invalid store path format: {}", path), None))?;
if relative.is_empty() {
return Err(Error::eval_error(
format!("store path cannot be store directory itself: {}", path),
None,
));
}
let parts: Vec<&str> = relative.splitn(2, '-').collect();
if parts.len() != 2 {
return Err(Error::eval_error(
format!("invalid store path format (missing name): {}", path),
None,
));
}
let hash = parts[0];
let name = parts[1];
if hash.len() != 32 {
return Err(Error::eval_error(
format!(
"invalid store path hash length (expected 32, got {}): {}",
hash.len(),
hash
),
None,
));
}
for ch in hash.chars() {
if !matches!(ch, '0'..='9' | 'a'..='d' | 'f'..='n' | 'p'..='s' | 'v'..='z') {
return Err(Error::eval_error(
format!("invalid character '{}' in store path hash: {}", ch, hash),
None,
));
}
}
if name.is_empty() {
return Err(Error::eval_error(
format!("store path has empty name: {}", path),
None,
));
}
if name.starts_with('.') {
return Err(Error::eval_error(
format!("store path name cannot start with '.': {}", name),
None,
));
}
for ch in name.chars() {
if !matches!(ch, '0'..='9' | 'a'..='z' | 'A'..='Z' | '+' | '-' | '.' | '_' | '?' | '=') {
return Err(Error::eval_error(
format!("invalid character '{}' in store path name: {}", ch, name),
None,
));
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_store_paths() {
let store_dir = "/nix/store";
let valid_paths = vec![
"/nix/store/0123456789abcdfghijklmnpqrsvwxyz-hello",
"/nix/store/abcdfghijklmnpqrsvwxyz0123456789-hello-1.0",
"/nix/store/00000000000000000000000000000000-test_+-.?="
];
for path in valid_paths {
assert!(
validate_store_path(store_dir, path).is_ok(),
"Expected {} to be valid, got {:?}",
path,
validate_store_path(store_dir, path)
);
}
}
#[test]
fn test_invalid_store_paths() {
let store_dir = "/nix/store";
let invalid_paths = vec![
("/tmp/foo", "not in store"),
("/nix/store", "empty relative"),
("/nix/store/tooshort-name", "hash too short"),
(
"/nix/store/abc123defghijklmnopqrstuvwxyz123-name",
"hash too long"
),
("/nix/store/abcd1234abcd1234abcd1234abcd123e-name", "e in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123o-name", "o in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123u-name", "u in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd123t-name", "t in hash"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234-.name", "name starts with dot"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234-na/me", "slash in name"),
("/nix/store/abcd1234abcd1234abcd1234abcd1234", "missing name"),
];
for (path, reason) in invalid_paths {
assert!(
validate_store_path(store_dir, path).is_err(),
"Expected {} to be invalid ({})",
path,
reason
);
}
}
}

View File

@@ -150,40 +150,85 @@ fn builtins_concat_lists() {
#[test]
fn builtins_compare_versions_basic() {
assert_eq!(eval("builtins.compareVersions \"1.0\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.1\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3\" \"2.3\""), Value::Int(0));
assert_eq!(eval("builtins.compareVersions \"2.5\" \"2.3\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"3.1\" \"2.3\""), Value::Int(1));
assert_eq!(
eval("builtins.compareVersions \"1.0\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.1\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3\" \"2.3\""),
Value::Int(0)
);
assert_eq!(
eval("builtins.compareVersions \"2.5\" \"2.3\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"3.1\" \"2.3\""),
Value::Int(1)
);
}
#[test]
fn builtins_compare_versions_components() {
assert_eq!(eval("builtins.compareVersions \"2.3.1\" \"2.3\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"2.3\" \"2.3.1\""), Value::Int(-1));
assert_eq!(
eval("builtins.compareVersions \"2.3.1\" \"2.3\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3\" \"2.3.1\""),
Value::Int(-1)
);
}
#[test]
fn builtins_compare_versions_numeric_vs_alpha() {
// Numeric component comes before alpha component
assert_eq!(eval("builtins.compareVersions \"2.3.1\" \"2.3a\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"2.3a\" \"2.3.1\""), Value::Int(-1));
assert_eq!(
eval("builtins.compareVersions \"2.3.1\" \"2.3a\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3a\" \"2.3.1\""),
Value::Int(-1)
);
}
#[test]
fn builtins_compare_versions_pre() {
// "pre" is special: comes before everything except another "pre"
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre3\" \"2.3pre12\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3c\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3q\""), Value::Int(-1));
assert_eq!(
eval("builtins.compareVersions \"2.3pre1\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3pre3\" \"2.3pre12\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3pre1\" \"2.3c\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3pre1\" \"2.3q\""),
Value::Int(-1)
);
}
#[test]
fn builtins_compare_versions_alpha() {
// Alphabetic comparison
assert_eq!(eval("builtins.compareVersions \"2.3a\" \"2.3c\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3c\" \"2.3a\""), Value::Int(1));
assert_eq!(
eval("builtins.compareVersions \"2.3a\" \"2.3c\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3c\" \"2.3a\""),
Value::Int(1)
);
}
#[test]

View File

@@ -0,0 +1,228 @@
mod utils;
use std::sync::Once;
use nix_js::value::Value;
use utils::eval_result;
fn init() {
static INIT: Once = Once::new();
INIT.call_once(|| {
#[cfg(not(feature = "daemon"))]
unsafe { std::env::set_var("NIX_JS_STORE_MODE", "simulated") };
#[cfg(feature = "daemon")]
unsafe { std::env::set_var("NIX_JS_STORE_MODE", "daemon") };
});
}
#[test]
fn to_file_simple() {
init();
let result =
eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate");
match result {
Value::String(path) => {
assert!(path.contains("-hello.txt"));
assert!(std::path::Path::new(&path).exists());
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "Hello, World!");
}
_ => panic!("Expected string, got {:?}", result),
}
}
#[test]
fn to_file_with_references() {
init();
let result = eval_result(
r#"
let
dep = builtins.toFile "dep.txt" "dependency";
in
builtins.toFile "main.txt" "Reference: ${dep}"
"#,
)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
assert!(path.contains("-main.txt"));
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert!(contents.contains("Reference: "));
assert!(contents.contains("-dep.txt"));
}
_ => panic!("Expected string"),
}
}
#[test]
fn to_file_invalid_name_with_slash() {
init();
let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("name cannot contain '/'")
);
}
#[test]
fn to_file_invalid_name_dot() {
init();
let result = eval_result(r#"builtins.toFile "." "content""#);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("invalid name"));
}
#[test]
fn to_file_invalid_name_dotdot() {
init();
let result = eval_result(r#"builtins.toFile ".." "content""#);
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("invalid name"));
}
#[test]
fn store_path_validation_not_in_store() {
init();
let result = eval_result(r#"builtins.storePath "/tmp/foo""#);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("not in the Nix store")
);
}
#[test]
fn store_path_validation_malformed_hash() {
init();
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
.expect("Failed to create dummy file");
let dummy_path = match dummy_file_result {
Value::String(ref p) => p.clone(),
_ => panic!("Expected string"),
};
let store_dir = std::path::Path::new(&dummy_path)
.parent()
.expect("Failed to get parent dir")
.to_str()
.expect("Failed to convert to string");
let test_path = format!("{}/invalid-hash-hello", store_dir);
let result = eval_result(&format!(r#"builtins.storePath "{}""#, test_path));
assert!(result.is_err());
let err_str = result.unwrap_err().to_string();
assert!(
err_str.contains("invalid") || err_str.contains("hash"),
"Expected hash validation error, got: {}",
err_str
);
}
#[test]
fn store_path_validation_missing_name() {
init();
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
.expect("Failed to create dummy file");
let dummy_path = match dummy_file_result {
Value::String(ref p) => p.clone(),
_ => panic!("Expected string"),
};
let store_dir = std::path::Path::new(&dummy_path)
.parent()
.expect("Failed to get parent dir")
.to_str()
.expect("Failed to convert to string");
let test_path = format!("{}/abcd1234abcd1234abcd1234abcd1234", store_dir);
let result = eval_result(&format!(r#"builtins.storePath "{}""#, test_path));
assert!(result.is_err());
let err_str = result.unwrap_err().to_string();
assert!(
err_str.contains("missing name") || err_str.contains("format"),
"Expected missing name error, got: {}",
err_str
);
}
#[test]
fn to_file_curried_application() {
init();
let result = eval_result(
r#"
let
makeFile = builtins.toFile "test.txt";
in
makeFile "test content"
"#,
)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
assert!(path.contains("-test.txt"));
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "test content");
}
_ => panic!("Expected string"),
}
}
#[test]
fn to_file_number_conversion() {
init();
let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "42");
}
_ => panic!("Expected string"),
}
}
#[test]
fn to_file_list_conversion() {
init();
let result = eval_result(
r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#,
)
.expect("Failed to evaluate");
match result {
Value::String(path) => {
let contents = std::fs::read_to_string(&path).expect("Failed to read file");
assert_eq!(contents, "line1\nline2\nline3");
}
_ => panic!("Expected string"),
}
}

View File

@@ -1,14 +1,14 @@
use nix_js::context::Context;
#![cfg(feature = "daemon")]
mod utils;
use nix_js::value::Value;
use utils::{eval, eval_result};
#[test]
fn derivation_minimal() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
)
.unwrap();
let result =
eval(r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#);
match result {
Value::AttrSet(attrs) => {
@@ -44,17 +44,14 @@ fn derivation_minimal() {
#[test]
fn derivation_with_args() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "test";
builder = "/bin/sh";
system = "x86_64-linux";
args = ["-c" "echo hello"];
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "test";
builder = "/bin/sh";
system = "x86_64-linux";
args = ["-c" "echo hello"];
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -69,12 +66,9 @@ fn derivation_with_args() {
#[test]
fn derivation_to_string() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
)
.unwrap();
let result = eval(
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
);
match result {
Value::String(s) => assert_eq!(s, "/nix/store/xpcvxsx5sw4rbq666blz6sxqlmsqphmr-foo"),
@@ -84,8 +78,7 @@ fn derivation_to_string() {
#[test]
fn derivation_missing_name() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
let result = eval_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
@@ -94,8 +87,7 @@ fn derivation_missing_name() {
#[test]
fn derivation_invalid_name_with_drv_suffix() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(
let result = eval_result(
r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
);
@@ -106,8 +98,7 @@ fn derivation_invalid_name_with_drv_suffix() {
#[test]
fn derivation_missing_builder() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
let result = eval_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
@@ -116,8 +107,7 @@ fn derivation_missing_builder() {
#[test]
fn derivation_missing_system() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
let result = eval_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
@@ -126,18 +116,15 @@ fn derivation_missing_system() {
#[test]
fn derivation_with_env_vars() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "test";
builder = "/bin/sh";
system = "x86_64-linux";
MY_VAR = "hello";
ANOTHER = "world";
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "test";
builder = "/bin/sh";
system = "x86_64-linux";
MY_VAR = "hello";
ANOTHER = "world";
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -150,12 +137,9 @@ fn derivation_with_env_vars() {
#[test]
fn derivation_strict() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
)
.unwrap();
let result = eval(
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -169,12 +153,10 @@ fn derivation_strict() {
#[test]
fn derivation_deterministic_paths() {
let mut ctx = Context::new().unwrap();
let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#;
let result1 = ctx.eval_code(expr).unwrap();
let result2 = ctx.eval_code(expr).unwrap();
let result1 = eval(expr);
let result2 = eval(expr);
match (result1, result2) {
(Value::AttrSet(attrs1), Value::AttrSet(attrs2)) => {
@@ -187,17 +169,14 @@ fn derivation_deterministic_paths() {
#[test]
fn derivation_escaping_in_aterm() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "test";
builder = "/bin/sh";
system = "x86_64-linux";
args = ["-c" "echo \"hello\nworld\""];
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "test";
builder = "/bin/sh";
system = "x86_64-linux";
args = ["-c" "echo \"hello\nworld\""];
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -210,17 +189,14 @@ fn derivation_escaping_in_aterm() {
#[test]
fn multi_output_two_outputs() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "multi";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "multi";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -271,17 +247,14 @@ fn multi_output_two_outputs() {
#[test]
fn multi_output_three_outputs() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "three";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev" "doc"];
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "three";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev" "doc"];
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -322,17 +295,14 @@ fn multi_output_three_outputs() {
#[test]
fn multi_output_backward_compat() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "compat";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out"];
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "compat";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out"];
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -351,47 +321,39 @@ fn multi_output_backward_compat() {
#[test]
fn multi_output_deterministic() {
let mut ctx = Context::new().unwrap();
let result1 = ctx
.eval_code(
r#"derivation {
name = "determ";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
}"#,
)
.unwrap();
let result1 = eval(
r#"derivation {
name = "determ";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
}"#,
);
let result2 = ctx
.eval_code(
r#"derivation {
name = "determ";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
}"#,
)
.unwrap();
let result2 = eval(
r#"derivation {
name = "determ";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
}"#,
);
assert_eq!(result1, result2);
}
#[test]
fn fixed_output_sha256_flat() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "fixed";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
outputHashAlgo = "sha256";
outputHashMode = "flat";
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "fixed";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
outputHashAlgo = "sha256";
outputHashMode = "flat";
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -419,17 +381,14 @@ fn fixed_output_sha256_flat() {
#[test]
fn fixed_output_default_algo() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "default";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "default";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "0000000000000000000000000000000000000000000000000000000000000000";
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -445,19 +404,16 @@ fn fixed_output_default_algo() {
#[test]
fn fixed_output_recursive_mode() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "recursive";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "1111111111111111111111111111111111111111111111111111111111111111";
outputHashAlgo = "sha256";
outputHashMode = "recursive";
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "recursive";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "1111111111111111111111111111111111111111111111111111111111111111";
outputHashAlgo = "sha256";
outputHashMode = "recursive";
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -478,8 +434,7 @@ fn fixed_output_recursive_mode() {
#[test]
fn fixed_output_rejects_multi_output() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(
let result = eval_result(
r#"derivation {
name = "invalid";
builder = "/bin/sh";
@@ -496,8 +451,7 @@ fn fixed_output_rejects_multi_output() {
#[test]
fn fixed_output_invalid_hash_mode() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(
let result = eval_result(
r#"derivation {
name = "invalid";
builder = "/bin/sh";
@@ -514,20 +468,17 @@ fn fixed_output_invalid_hash_mode() {
#[test]
fn structured_attrs_basic() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "struct";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = true;
foo = "bar";
count = 42;
enabled = true;
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "struct";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = true;
foo = "bar";
count = 42;
enabled = true;
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -542,18 +493,15 @@ fn structured_attrs_basic() {
#[test]
fn structured_attrs_nested() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "nested";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = true;
data = { x = 1; y = [2 3]; };
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "nested";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = true;
data = { x = 1; y = [2 3]; };
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -566,8 +514,7 @@ fn structured_attrs_nested() {
#[test]
fn structured_attrs_rejects_functions() {
let mut ctx = Context::new().unwrap();
let result = ctx.eval_code(
let result = eval_result(
r#"derivation {
name = "invalid";
builder = "/bin/sh";
@@ -584,18 +531,15 @@ fn structured_attrs_rejects_functions() {
#[test]
fn structured_attrs_false() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "normal";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = false;
foo = "bar";
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "normal";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = false;
foo = "bar";
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -610,19 +554,16 @@ fn structured_attrs_false() {
#[test]
fn ignore_nulls_true() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "ignore";
builder = "/bin/sh";
system = "x86_64-linux";
__ignoreNulls = true;
foo = "bar";
nullValue = null;
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "ignore";
builder = "/bin/sh";
system = "x86_64-linux";
__ignoreNulls = true;
foo = "bar";
nullValue = null;
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -635,18 +576,15 @@ fn ignore_nulls_true() {
#[test]
fn ignore_nulls_false() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "keep";
builder = "/bin/sh";
system = "x86_64-linux";
__ignoreNulls = false;
nullValue = null;
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "keep";
builder = "/bin/sh";
system = "x86_64-linux";
__ignoreNulls = false;
nullValue = null;
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -661,20 +599,17 @@ fn ignore_nulls_false() {
#[test]
fn ignore_nulls_with_structured_attrs() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "combined";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = true;
__ignoreNulls = true;
foo = "bar";
nullValue = null;
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "combined";
builder = "/bin/sh";
system = "x86_64-linux";
__structuredAttrs = true;
__ignoreNulls = true;
foo = "bar";
nullValue = null;
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -688,21 +623,18 @@ fn ignore_nulls_with_structured_attrs() {
#[test]
fn all_features_combined() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "all";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
__structuredAttrs = true;
__ignoreNulls = true;
data = { x = 1; };
nullValue = null;
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "all";
builder = "/bin/sh";
system = "x86_64-linux";
outputs = ["out" "dev"];
__structuredAttrs = true;
__ignoreNulls = true;
data = { x = 1; };
nullValue = null;
}"#,
);
match result {
Value::AttrSet(attrs) => {
@@ -718,19 +650,16 @@ fn all_features_combined() {
#[test]
fn fixed_output_with_structured_attrs() {
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"derivation {
name = "fixstruct";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "abc123";
__structuredAttrs = true;
data = { key = "value"; };
}"#,
)
.unwrap();
let result = eval(
r#"derivation {
name = "fixstruct";
builder = "/bin/sh";
system = "x86_64-linux";
outputHash = "abc123";
__structuredAttrs = true;
data = { key = "value"; };
}"#,
);
match result {
Value::AttrSet(attrs) => {

View File

@@ -104,21 +104,18 @@ fn import_with_complex_dependency_graph() {
// Tests for builtins.path
#[test]
fn test_path_with_file() {
fn path_with_file() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt");
std::fs::write(&test_file, "Hello, World!").unwrap();
let expr = format!(
r#"builtins.path {{ path = {}; }}"#,
test_file.display()
);
let expr = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result = ctx.eval_code(&expr).unwrap();
// Should return a store path string
if let Value::String(store_path) = result {
assert!(store_path.starts_with("/nix/store/"));
assert!(store_path.starts_with(ctx.get_store_dir()));
assert!(store_path.contains("test.txt"));
} else {
panic!("Expected string, got {:?}", result);
@@ -126,7 +123,7 @@ fn test_path_with_file() {
}
#[test]
fn test_path_with_custom_name() {
fn path_with_custom_name() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("original.txt");
@@ -147,7 +144,7 @@ fn test_path_with_custom_name() {
}
#[test]
fn test_path_with_directory_recursive() {
fn path_with_directory_recursive() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("mydir");
@@ -162,7 +159,7 @@ fn test_path_with_directory_recursive() {
let result = ctx.eval_code(&expr).unwrap();
if let Value::String(store_path) = result {
assert!(store_path.starts_with("/nix/store/"));
assert!(store_path.starts_with(ctx.get_store_dir()));
assert!(store_path.contains("mydir"));
} else {
panic!("Expected string, got {:?}", result);
@@ -170,7 +167,7 @@ fn test_path_with_directory_recursive() {
}
#[test]
fn test_path_flat_with_file() {
fn path_flat_with_file() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("flat.txt");
@@ -183,14 +180,14 @@ fn test_path_flat_with_file() {
let result = ctx.eval_code(&expr).unwrap();
if let Value::String(store_path) = result {
assert!(store_path.starts_with("/nix/store/"));
assert!(store_path.starts_with(ctx.get_store_dir()));
} else {
panic!("Expected string, got {:?}", result);
}
}
#[test]
fn test_path_flat_with_directory_fails() {
fn path_flat_with_directory_fails() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("mydir");
@@ -208,7 +205,7 @@ fn test_path_flat_with_directory_fails() {
}
#[test]
fn test_path_nonexistent_fails() {
fn path_nonexistent_fails() {
let mut ctx = Context::new().unwrap();
let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#;
@@ -220,7 +217,7 @@ fn test_path_nonexistent_fails() {
}
#[test]
fn test_path_missing_path_param() {
fn path_missing_path_param() {
let mut ctx = Context::new().unwrap();
let expr = r#"builtins.path { name = "test"; }"#;
@@ -232,17 +229,14 @@ fn test_path_missing_path_param() {
}
#[test]
fn test_path_with_sha256() {
fn path_with_sha256() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("hash_test.txt");
std::fs::write(&test_file, "Test content for hashing").unwrap();
// First, get the hash by calling without sha256
let expr1 = format!(
r#"builtins.path {{ path = {}; }}"#,
test_file.display()
);
let expr1 = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result1 = ctx.eval_code(&expr1).unwrap();
let store_path1 = match result1 {
Value::String(s) => s,
@@ -251,10 +245,7 @@ fn test_path_with_sha256() {
// Compute the actual hash (for testing, we'll just verify the same path is returned)
// In real usage, the user would know the hash beforehand
let expr2 = format!(
r#"builtins.path {{ path = {}; }}"#,
test_file.display()
);
let expr2 = format!(r#"builtins.path {{ path = {}; }}"#, test_file.display());
let result2 = ctx.eval_code(&expr2).unwrap();
let store_path2 = match result2 {
Value::String(s) => s,
@@ -266,7 +257,7 @@ fn test_path_with_sha256() {
}
#[test]
fn test_path_deterministic() {
fn path_deterministic() {
let mut ctx = Context::new().unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("deterministic.txt");

View File

@@ -4,113 +4,113 @@ use nix_js::value::Value;
use utils::{eval, eval_result};
#[test]
fn test_path_type_of() {
fn path_type_of() {
let result = eval("builtins.typeOf ./foo");
assert_eq!(result, Value::String("path".to_string()));
}
#[test]
fn test_is_path_true() {
fn is_path_true() {
let result = eval("builtins.isPath ./foo");
assert_eq!(result, Value::Bool(true));
}
#[test]
fn test_is_path_false_string() {
fn is_path_false_string() {
let result = eval(r#"builtins.isPath "./foo""#);
assert_eq!(result, Value::Bool(false));
}
#[test]
fn test_is_path_false_number() {
fn is_path_false_number() {
let result = eval("builtins.isPath 42");
assert_eq!(result, Value::Bool(false));
}
#[test]
fn test_path_concat_type() {
fn path_concat_type() {
// path + string = path
let result = eval(r#"builtins.typeOf (./foo + "/bar")"#);
assert_eq!(result, Value::String("path".to_string()));
}
#[test]
fn test_string_path_concat_type() {
fn string_path_concat_type() {
// string + path = string
let result = eval(r#"builtins.typeOf ("prefix-" + ./foo)"#);
assert_eq!(result, Value::String("string".to_string()));
}
#[test]
fn test_basename_of_path() {
fn basename_of_path() {
let result = eval("builtins.baseNameOf ./path/to/file.nix");
assert!(matches!(result, Value::String(s) if s == "file.nix"));
}
#[test]
fn test_basename_of_string() {
fn basename_of_string() {
let result = eval(r#"builtins.baseNameOf "/path/to/file.nix""#);
assert_eq!(result, Value::String("file.nix".to_string()));
}
#[test]
fn test_dir_of_path_type() {
fn dir_of_path_type() {
// dirOf preserves path type
let result = eval("builtins.typeOf (builtins.dirOf ./path/to/file.nix)");
assert_eq!(result, Value::String("path".to_string()));
}
#[test]
fn test_dir_of_string_type() {
fn dir_of_string_type() {
// dirOf preserves string type
let result = eval(r#"builtins.typeOf (builtins.dirOf "/path/to/file.nix")"#);
assert_eq!(result, Value::String("string".to_string()));
}
#[test]
fn test_path_equality() {
fn path_equality() {
// Same path should be equal
let result = eval("./foo == ./foo");
assert_eq!(result, Value::Bool(true));
}
#[test]
fn test_path_not_equal_string() {
fn path_not_equal_string() {
// Paths and strings are different types - should not be equal
let result = eval(r#"./foo == "./foo""#);
assert_eq!(result, Value::Bool(false));
}
#[test]
fn test_to_path_absolute() {
fn to_path_absolute() {
// toPath with absolute path returns string
let result = eval(r#"builtins.toPath "/foo/bar""#);
assert_eq!(result, Value::String("/foo/bar".to_string()));
}
#[test]
fn test_to_path_type_is_string() {
fn to_path_type_is_string() {
// toPath returns a string, not a path
let result = eval(r#"builtins.typeOf (builtins.toPath "/foo")"#);
assert_eq!(result, Value::String("string".to_string()));
}
#[test]
fn test_to_path_relative_fails() {
fn to_path_relative_fails() {
// toPath with relative path should fail
let result = eval_result(r#"builtins.toPath "foo/bar""#);
assert!(result.is_err());
}
#[test]
fn test_to_path_empty_fails() {
fn to_path_empty_fails() {
// toPath with empty string should fail
let result = eval_result(r#"builtins.toPath """#);
assert!(result.is_err());
}
#[test]
fn test_to_path_from_path_value() {
fn to_path_from_path_value() {
// toPath can accept a path value too (coerces to string first)
let result = eval("builtins.toPath ./foo");
// Should succeed and return the absolute path as a string

View File

@@ -4,7 +4,7 @@ use nix_js::value::{List, Value};
use utils::eval;
#[test]
fn test_match_exact_full_string() {
fn match_exact_full_string() {
assert_eq!(
eval(r#"builtins.match "foobar" "foobar""#),
Value::List(List::new(vec![]))
@@ -12,12 +12,12 @@ fn test_match_exact_full_string() {
}
#[test]
fn test_match_partial_returns_null() {
fn match_partial_returns_null() {
assert_eq!(eval(r#"builtins.match "foo" "foobar""#), Value::Null);
}
#[test]
fn test_match_with_capture_groups() {
fn match_with_capture_groups() {
assert_eq!(
eval(r#"builtins.match "(.*)\\.nix" "foobar.nix""#),
Value::List(List::new(vec![Value::String("foobar".into())]))
@@ -25,7 +25,7 @@ fn test_match_with_capture_groups() {
}
#[test]
fn test_match_multiple_capture_groups() {
fn match_multiple_capture_groups() {
assert_eq!(
eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "foobar.nix""#),
Value::List(List::new(vec![
@@ -37,7 +37,7 @@ fn test_match_multiple_capture_groups() {
}
#[test]
fn test_match_with_path() {
fn match_with_path() {
assert_eq!(
eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "/path/to/foobar.nix""#),
Value::List(List::new(vec![
@@ -49,7 +49,7 @@ fn test_match_with_path() {
}
#[test]
fn test_match_posix_space_class() {
fn match_posix_space_class() {
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo ""#),
Value::List(List::new(vec![Value::String("foo".into())]))
@@ -57,8 +57,11 @@ fn test_match_posix_space_class() {
}
#[test]
fn test_match_posix_upper_class() {
assert_eq!(eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#), Value::Null);
fn match_posix_upper_class() {
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#),
Value::Null
);
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO ""#),
@@ -67,7 +70,7 @@ fn test_match_posix_upper_class() {
}
#[test]
fn test_match_quantifiers() {
fn match_quantifiers() {
assert_eq!(
eval(r#"builtins.match "fo*" "f""#),
Value::List(List::new(vec![]))
@@ -84,7 +87,7 @@ fn test_match_quantifiers() {
}
#[test]
fn test_split_non_capturing() {
fn split_non_capturing() {
assert_eq!(
eval(r#"builtins.split "foobar" "foobar""#),
Value::List(List::new(vec![
@@ -96,7 +99,7 @@ fn test_split_non_capturing() {
}
#[test]
fn test_split_no_match() {
fn split_no_match() {
assert_eq!(
eval(r#"builtins.split "fo+" "f""#),
Value::List(List::new(vec![Value::String("f".into())]))
@@ -104,7 +107,7 @@ fn test_split_no_match() {
}
#[test]
fn test_split_with_capture_group() {
fn split_with_capture_group() {
assert_eq!(
eval(r#"builtins.split "(fo*)" "foobar""#),
Value::List(List::new(vec![
@@ -116,7 +119,7 @@ fn test_split_with_capture_group() {
}
#[test]
fn test_split_multiple_matches() {
fn split_multiple_matches() {
assert_eq!(
eval(r#"builtins.split "(b)" "foobarbaz""#),
Value::List(List::new(vec![
@@ -130,7 +133,7 @@ fn test_split_multiple_matches() {
}
#[test]
fn test_split_with_multiple_groups() {
fn split_with_multiple_groups() {
assert_eq!(
eval(r#"builtins.split "(f)(o*)" "foo""#),
Value::List(List::new(vec![
@@ -145,7 +148,7 @@ fn test_split_with_multiple_groups() {
}
#[test]
fn test_split_with_optional_groups() {
fn split_with_optional_groups() {
assert_eq!(
eval(r#"builtins.split "(a)|(c)" "abc""#),
Value::List(List::new(vec![
@@ -159,7 +162,7 @@ fn test_split_with_optional_groups() {
}
#[test]
fn test_split_greedy_matching() {
fn split_greedy_matching() {
assert_eq!(
eval(r#"builtins.split "(o+)" "oooofoooo""#),
Value::List(List::new(vec![
@@ -173,7 +176,7 @@ fn test_split_greedy_matching() {
}
#[test]
fn test_split_posix_classes() {
fn split_posix_classes() {
assert_eq!(
eval(r#"builtins.split "([[:upper:]]+)" " FOO ""#),
Value::List(List::new(vec![
@@ -185,7 +188,7 @@ fn test_split_posix_classes() {
}
#[test]
fn test_replace_basic() {
fn replace_basic() {
assert_eq!(
eval(r#"builtins.replaceStrings ["o"] ["a"] "foobar""#),
Value::String("faabar".into())
@@ -193,7 +196,7 @@ fn test_replace_basic() {
}
#[test]
fn test_replace_with_empty() {
fn replace_with_empty() {
assert_eq!(
eval(r#"builtins.replaceStrings ["o"] [""] "foobar""#),
Value::String("fbar".into())
@@ -201,7 +204,7 @@ fn test_replace_with_empty() {
}
#[test]
fn test_replace_multiple_patterns() {
fn replace_multiple_patterns() {
assert_eq!(
eval(r#"builtins.replaceStrings ["oo" "a"] ["a" "oo"] "foobar""#),
Value::String("faboor".into())
@@ -209,7 +212,7 @@ fn test_replace_multiple_patterns() {
}
#[test]
fn test_replace_first_match_wins() {
fn replace_first_match_wins() {
assert_eq!(
eval(r#"builtins.replaceStrings ["oo" "oo"] ["u" "i"] "foobar""#),
Value::String("fubar".into())
@@ -217,7 +220,7 @@ fn test_replace_first_match_wins() {
}
#[test]
fn test_replace_empty_pattern() {
fn replace_empty_pattern() {
assert_eq!(
eval(r#"builtins.replaceStrings [""] ["X"] "abc""#),
Value::String("XaXbXcX".into())
@@ -225,7 +228,7 @@ fn test_replace_empty_pattern() {
}
#[test]
fn test_replace_empty_pattern_empty_string() {
fn replace_empty_pattern_empty_string() {
assert_eq!(
eval(r#"builtins.replaceStrings [""] ["X"] """#),
Value::String("X".into())
@@ -233,7 +236,7 @@ fn test_replace_empty_pattern_empty_string() {
}
#[test]
fn test_replace_simple_char() {
fn replace_simple_char() {
assert_eq!(
eval(r#"builtins.replaceStrings ["-"] ["_"] "a-b""#),
Value::String("a_b".into())
@@ -241,7 +244,7 @@ fn test_replace_simple_char() {
}
#[test]
fn test_replace_longer_pattern() {
fn replace_longer_pattern() {
assert_eq!(
eval(r#"builtins.replaceStrings ["oo"] ["u"] "foobar""#),
Value::String("fubar".into())
@@ -249,15 +252,14 @@ fn test_replace_longer_pattern() {
}
#[test]
fn test_replace_different_lengths() {
let result = std::panic::catch_unwind(|| {
eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#)
});
fn replace_different_lengths() {
let result =
std::panic::catch_unwind(|| eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#));
assert!(result.is_err());
}
#[test]
fn test_split_version_simple() {
fn split_version_simple() {
assert_eq!(
eval(r#"builtins.splitVersion "1.2.3""#),
Value::List(List::new(vec![
@@ -269,7 +271,7 @@ fn test_split_version_simple() {
}
#[test]
fn test_split_version_with_pre() {
fn split_version_with_pre() {
assert_eq!(
eval(r#"builtins.splitVersion "2.3.0pre1234""#),
Value::List(List::new(vec![
@@ -283,7 +285,7 @@ fn test_split_version_with_pre() {
}
#[test]
fn test_split_version_with_letters() {
fn split_version_with_letters() {
assert_eq!(
eval(r#"builtins.splitVersion "2.3a""#),
Value::List(List::new(vec![
@@ -295,7 +297,7 @@ fn test_split_version_with_letters() {
}
#[test]
fn test_split_version_with_dashes() {
fn split_version_with_dashes() {
assert_eq!(
eval(r#"builtins.splitVersion "2.3-beta1""#),
Value::List(List::new(vec![
@@ -308,7 +310,7 @@ fn test_split_version_with_dashes() {
}
#[test]
fn test_split_version_empty() {
fn split_version_empty() {
assert_eq!(
eval(r#"builtins.splitVersion """#),
Value::List(List::new(vec![]))

View File

@@ -152,8 +152,10 @@ fn string_add_merges_context() {
#[test]
fn context_in_derivation_args() {
let result = eval(
r#"
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"
let
dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; };
drv = derivation {
@@ -164,10 +166,11 @@ fn context_in_derivation_args() {
};
in drv.drvPath
"#,
);
)
.unwrap();
match result {
Value::String(s) => {
assert!(s.starts_with("/nix/store/"), "Should be a store path");
assert!(s.starts_with(ctx.get_store_dir()), "Should be a store path");
assert!(s.ends_with(".drv"), "Should be a .drv file");
}
_ => panic!("Expected String, got {:?}", result),
@@ -176,8 +179,10 @@ fn context_in_derivation_args() {
#[test]
fn context_in_derivation_env() {
let result = eval(
r#"
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"
let
dep = derivation { name = "dep"; builder = "/bin/sh"; system = "x86_64-linux"; };
drv = derivation {
@@ -188,10 +193,11 @@ fn context_in_derivation_env() {
};
in drv.drvPath
"#,
);
)
.unwrap();
match result {
Value::String(s) => {
assert!(s.starts_with("/nix/store/"), "Should be a store path");
assert!(s.starts_with(ctx.get_store_dir()), "Should be a store path");
assert!(s.ends_with(".drv"), "Should be a .drv file");
}
_ => panic!("Expected String, got {:?}", result),
@@ -213,16 +219,19 @@ fn tostring_preserves_context() {
#[test]
fn interpolation_derivation_returns_outpath() {
let result = eval(
r#"
let mut ctx = Context::new().unwrap();
let result = ctx
.eval_code(
r#"
let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };
in "${drv}"
"#,
);
)
.unwrap();
match result {
Value::String(s) => {
assert!(s.starts_with("/nix/store/"), "Should be a store path");
assert!(s.starts_with(ctx.get_store_dir()), "Should be a store path");
assert!(s.ends_with("-test"), "Should end with derivation name");
}
_ => panic!("Expected String, got {:?}", result),
@@ -288,3 +297,95 @@ fn interpolation_derivation_equals_tostring() {
);
assert_eq!(result, Value::Bool(true));
}
#[test]
fn substring_preserves_context() {
let result = eval(
r#"
let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };
str = builtins.toString drv;
sub = builtins.substring 0 10 str;
in builtins.hasContext sub
"#,
);
assert_eq!(result, Value::Bool(true));
}
#[test]
fn substring_zero_length_preserves_context() {
let result = eval(
r#"
let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };
str = builtins.toString drv;
empty = builtins.substring 0 0 str;
in builtins.hasContext empty
"#,
);
assert_eq!(result, Value::Bool(true));
}
#[test]
fn substring_zero_length_empty_value() {
let result = eval(
r#"
let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };
str = builtins.toString drv;
empty = builtins.substring 0 0 str;
in empty == ""
"#,
);
assert_eq!(result, Value::Bool(true));
}
#[test]
#[allow(non_snake_case)]
fn concatStringsSep_preserves_context() {
let result = eval(
r#"
let
drv1 = derivation { name = "test1"; builder = "/bin/sh"; system = "x86_64-linux"; };
drv2 = derivation { name = "test2"; builder = "/bin/sh"; system = "x86_64-linux"; };
str1 = builtins.toString drv1;
str2 = builtins.toString drv2;
combined = builtins.concatStringsSep ":" [str1 str2];
in builtins.hasContext combined
"#,
);
assert_eq!(result, Value::Bool(true));
}
#[test]
#[allow(non_snake_case)]
fn concatStringsSep_merges_contexts() {
let result = eval(
r#"
let
drv1 = derivation { name = "test1"; builder = "/bin/sh"; system = "x86_64-linux"; };
drv2 = derivation { name = "test2"; builder = "/bin/sh"; system = "x86_64-linux"; };
str1 = builtins.toString drv1;
str2 = builtins.toString drv2;
combined = builtins.concatStringsSep ":" [str1 str2];
ctx = builtins.getContext combined;
in builtins.length (builtins.attrNames ctx)
"#,
);
assert_eq!(result, Value::Int(2));
}
#[test]
#[allow(non_snake_case)]
fn concatStringsSep_separator_has_context() {
let result = eval(
r#"
let
drv = derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; };
sep = builtins.toString drv;
combined = builtins.concatStringsSep sep ["a" "b"];
in builtins.hasContext combined
"#,
);
assert_eq!(result, Value::Bool(true));
}