Compare commits
2 Commits
main
...
182470d92d
| Author | SHA1 | Date | |
|---|---|---|---|
| 182470d92d | |||
| 62e48cabd6 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -7,6 +7,3 @@ flamegraph*.svg
|
|||||||
perf.data*
|
perf.data*
|
||||||
profile.json.gz
|
profile.json.gz
|
||||||
prof.json
|
prof.json
|
||||||
*.cpuprofile
|
|
||||||
*.cpuprofile.gz
|
|
||||||
*v8.log*
|
|
||||||
|
|||||||
139
Cargo.lock
generated
139
Cargo.lock
generated
@@ -289,12 +289,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.20.2"
|
version = "3.19.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb"
|
checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510"
|
||||||
dependencies = [
|
|
||||||
"allocator-api2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytes"
|
name = "bytes"
|
||||||
@@ -432,9 +429,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.5.59"
|
version = "4.5.58"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c5caf74d17c3aec5495110c34cc3f78644bfa89af6c8993ed4de2790e49b6499"
|
checksum = "63be97961acde393029492ce0be7a1af7e323e6bae9511ebfac33751be5e6806"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
@@ -442,9 +439,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.5.59"
|
version = "4.5.58"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "370daa45065b80218950227371916a1633217ae42b2715b2287b606dcd618e24"
|
checksum = "7f13174bda5dfd69d7e947827e5af4b0f2f94a4a3ee92912fba07a66150f21e2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
@@ -494,15 +491,6 @@ version = "1.0.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colored"
|
|
||||||
version = "3.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "faf9468729b8cbcea668e36183cb69d317348c2e08e994829fb56ebfdfbaac34"
|
|
||||||
dependencies = [
|
|
||||||
"windows-sys 0.61.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "combine"
|
name = "combine"
|
||||||
version = "4.6.7"
|
version = "4.6.7"
|
||||||
@@ -971,27 +959,6 @@ version = "0.1.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
|
checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "env_filter"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f"
|
|
||||||
dependencies = [
|
|
||||||
"log",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "env_logger"
|
|
||||||
version = "0.11.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d"
|
|
||||||
dependencies = [
|
|
||||||
"anstream",
|
|
||||||
"anstyle",
|
|
||||||
"env_filter",
|
|
||||||
"log",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "equivalent"
|
name = "equivalent"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
@@ -1175,9 +1142,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures"
|
name = "futures"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
|
checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
@@ -1190,9 +1157,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-channel"
|
name = "futures-channel"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
|
checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-sink",
|
"futures-sink",
|
||||||
@@ -1200,15 +1167,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-core"
|
name = "futures-core"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
|
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-executor"
|
name = "futures-executor"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
|
checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-task",
|
"futures-task",
|
||||||
@@ -1217,15 +1184,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-io"
|
name = "futures-io"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
|
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-macro"
|
name = "futures-macro"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
|
checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -1234,21 +1201,21 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-sink"
|
name = "futures-sink"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
|
checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-task"
|
name = "futures-task"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
|
checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-util"
|
name = "futures-util"
|
||||||
version = "0.3.32"
|
version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
|
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
@@ -1258,6 +1225,7 @@ dependencies = [
|
|||||||
"futures-task",
|
"futures-task",
|
||||||
"memchr",
|
"memchr",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
"pin-utils",
|
||||||
"slab",
|
"slab",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -1311,12 +1279,6 @@ dependencies = [
|
|||||||
"wasip3",
|
"wasip3",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ghost-cell"
|
|
||||||
version = "0.2.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d8449d342b1c67f49169e92e71deb7b9b27f30062301a16dbc27a4cc8d2351b7"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gimli"
|
name = "gimli"
|
||||||
version = "0.32.3"
|
version = "0.32.3"
|
||||||
@@ -2025,7 +1987,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "nix-compat"
|
name = "nix-compat"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://git.snix.dev/snix/snix.git#1b37f68842a7e5e226d9dc009e9a90d400c5fb14"
|
source = "git+https://git.snix.dev/snix/snix.git#9d414bec7c7fff1fca6ba6d14dda6c4295521260"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"bstr",
|
"bstr",
|
||||||
@@ -2048,7 +2010,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "nix-compat-derive"
|
name = "nix-compat-derive"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://git.snix.dev/snix/snix.git#1b37f68842a7e5e226d9dc009e9a90d400c5fb14"
|
source = "git+https://git.snix.dev/snix/snix.git#9d414bec7c7fff1fca6ba6d14dda6c4295521260"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -2061,10 +2023,8 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
"bumpalo",
|
|
||||||
"bzip2",
|
"bzip2",
|
||||||
"clap",
|
"clap",
|
||||||
"colored",
|
|
||||||
"criterion",
|
"criterion",
|
||||||
"deno_core",
|
"deno_core",
|
||||||
"deno_error",
|
"deno_error",
|
||||||
@@ -2073,7 +2033,6 @@ dependencies = [
|
|||||||
"ere",
|
"ere",
|
||||||
"fastwebsockets",
|
"fastwebsockets",
|
||||||
"flate2",
|
"flate2",
|
||||||
"ghost-cell",
|
|
||||||
"hashbrown 0.16.1",
|
"hashbrown 0.16.1",
|
||||||
"hex",
|
"hex",
|
||||||
"http",
|
"http",
|
||||||
@@ -2103,7 +2062,6 @@ dependencies = [
|
|||||||
"tap",
|
"tap",
|
||||||
"tar",
|
"tar",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"test-log",
|
|
||||||
"thiserror 2.0.18",
|
"thiserror 2.0.18",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml",
|
"toml",
|
||||||
@@ -2118,7 +2076,6 @@ name = "nix-js-macros"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case 0.11.0",
|
"convert_case 0.11.0",
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
@@ -2721,9 +2678,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rnix"
|
name = "rnix"
|
||||||
version = "0.14.0"
|
version = "0.13.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c163bd17372eecdf10d351c34584b7de7c1a33be4e92a32f3fb3f5a7fe3f579b"
|
checksum = "0b8276b540c344ec04cd215fb3d35db378c2b1861cc44802c2c097f3490f6e52"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rowan",
|
"rowan",
|
||||||
]
|
]
|
||||||
@@ -3304,9 +3261,9 @@ checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.116"
|
version = "2.0.115"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3df424c70518695237746f84cede799c9c58fcb37450d7b23716568cc8bc69cb"
|
checksum = "6e614ed320ac28113fa64972c4262d5dbc89deacdfd00c34a3e4cea073243c12"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -3315,9 +3272,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn-match"
|
name = "syn-match"
|
||||||
version = "0.3.1"
|
version = "0.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "54b8f0a9004d6aafa6a588602a1119e6cdaacec9921aa1605383e6e7d6258fd6"
|
checksum = "783c4140d7ed89f37116e865b49e5a9fdd28608b9071a9dd1e158b50fc0a31fc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -3437,28 +3394,6 @@ dependencies = [
|
|||||||
"windows-sys 0.60.2",
|
"windows-sys 0.60.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "test-log"
|
|
||||||
version = "0.2.19"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "37d53ac171c92a39e4769491c4b4dde7022c60042254b5fc044ae409d34a24d4"
|
|
||||||
dependencies = [
|
|
||||||
"env_logger",
|
|
||||||
"test-log-macros",
|
|
||||||
"tracing-subscriber",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "test-log-macros"
|
|
||||||
version = "0.2.19"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "be35209fd0781c5401458ab66e4f98accf63553e8fae7425503e92fdd319783b"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "text-size"
|
name = "text-size"
|
||||||
version = "1.1.1"
|
version = "1.1.1"
|
||||||
@@ -3648,9 +3583,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_parser"
|
name = "toml_parser"
|
||||||
version = "1.0.9+spec-1.1.0"
|
version = "1.0.8+spec-1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4"
|
checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"winnow",
|
"winnow",
|
||||||
]
|
]
|
||||||
@@ -3787,9 +3722,9 @@ checksum = "81b79ad29b5e19de4260020f8919b443b2ef0277d242ce532ec7b7a2cc8b6007"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.24"
|
version = "1.0.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
|
checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-linebreak"
|
name = "unicode-linebreak"
|
||||||
|
|||||||
8
Justfile
8
Justfile
@@ -21,11 +21,3 @@
|
|||||||
[no-exit-message]
|
[no-exit-message]
|
||||||
@evali expr:
|
@evali expr:
|
||||||
cargo run --release --features inspector -- --inspect-brk 127.0.0.1:9229 eval --expr '{{expr}}'
|
cargo run --release --features inspector -- --inspect-brk 127.0.0.1:9229 eval --expr '{{expr}}'
|
||||||
|
|
||||||
[no-exit-message]
|
|
||||||
@replp:
|
|
||||||
cargo run --release --features prof -- repl
|
|
||||||
|
|
||||||
[no-exit-message]
|
|
||||||
@evalp expr:
|
|
||||||
cargo run --release --features prof -- eval --expr '{{expr}}'
|
|
||||||
|
|||||||
@@ -8,6 +8,5 @@ proc-macro = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
convert_case = "0.11"
|
convert_case = "0.11"
|
||||||
proc-macro2 = "1.0"
|
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
syn = { version = "2.0", features = ["full"] }
|
syn = { version = "2.0", features = ["full"] }
|
||||||
|
|||||||
@@ -4,22 +4,22 @@
|
|||||||
//! an Intermediate Representation (IR) that follows a specific pattern. It generates:
|
//! an Intermediate Representation (IR) that follows a specific pattern. It generates:
|
||||||
//! 1. An enum representing the different kinds of IR nodes.
|
//! 1. An enum representing the different kinds of IR nodes.
|
||||||
//! 2. Structs for each of the variants that have fields.
|
//! 2. Structs for each of the variants that have fields.
|
||||||
//! 3. `From` implementations to easily convert from a struct variant (e.g., `BinOp`) to the main enum (`Ir::BinOp`).
|
//! 3. `Ref` and `Mut` versions of the main enum for ergonomic pattern matching on references.
|
||||||
//! 4. A `To[IrName]` trait to provide a convenient `.to_ir()` method on the variant structs.
|
//! 4. `From` implementations to easily convert from a struct variant (e.g., `BinOp`) to the main enum (`Ir::BinOp`).
|
||||||
|
//! 5. A `To[IrName]` trait to provide a convenient `.to_ir()` method on the variant structs.
|
||||||
|
|
||||||
use convert_case::{Case, Casing};
|
use convert_case::{Case, Casing};
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use quote::{format_ident, quote};
|
use quote::{format_ident, quote};
|
||||||
use syn::{
|
use syn::{
|
||||||
Expr, ExprPath, FieldsNamed, GenericArgument, GenericParam, Generics, Ident, Path, PathSegment,
|
FieldsNamed, Ident, Token, Type, parenthesized,
|
||||||
Token, Type, TypePath, parenthesized,
|
|
||||||
parse::{Parse, ParseStream, Result},
|
parse::{Parse, ParseStream, Result},
|
||||||
punctuated::Punctuated,
|
punctuated::Punctuated,
|
||||||
token,
|
token,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Represents one of the variants passed to the `ir!` macro.
|
/// Represents one of the variants passed to the `ir!` macro.
|
||||||
enum VariantInput {
|
pub enum VariantInput {
|
||||||
/// A unit-like variant, e.g., `Arg`.
|
/// A unit-like variant, e.g., `Arg`.
|
||||||
Unit(Ident),
|
Unit(Ident),
|
||||||
/// A tuple-like variant with one unnamed field, e.g., `ExprRef(ExprId)`.
|
/// A tuple-like variant with one unnamed field, e.g., `ExprRef(ExprId)`.
|
||||||
@@ -29,12 +29,11 @@ enum VariantInput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The top-level input for the `ir!` macro.
|
/// The top-level input for the `ir!` macro.
|
||||||
struct MacroInput {
|
pub struct MacroInput {
|
||||||
/// The name of the main IR enum to be generated (e.g., `Ir`).
|
/// The name of the main IR enum to be generated (e.g., `Ir`).
|
||||||
base_name: Ident,
|
pub base_name: Ident,
|
||||||
generics: Generics,
|
|
||||||
/// The list of variants for the enum.
|
/// The list of variants for the enum.
|
||||||
variants: Punctuated<VariantInput, Token![,]>,
|
pub variants: Punctuated<VariantInput, Token![,]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Parse for VariantInput {
|
impl Parse for VariantInput {
|
||||||
@@ -65,14 +64,13 @@ impl Parse for VariantInput {
|
|||||||
|
|
||||||
impl Parse for MacroInput {
|
impl Parse for MacroInput {
|
||||||
fn parse(input: ParseStream) -> Result<Self> {
|
fn parse(input: ParseStream) -> Result<Self> {
|
||||||
|
// The macro input is expected to be: `IrName, Variant1, Variant2, ...`
|
||||||
let base_name = input.parse()?;
|
let base_name = input.parse()?;
|
||||||
let generics = Generics::parse(input)?;
|
input.parse::<Token![,]>()?;
|
||||||
input.parse::<Token![;]>()?;
|
|
||||||
let variants = Punctuated::parse_terminated(input)?;
|
let variants = Punctuated::parse_terminated(input)?;
|
||||||
|
|
||||||
Ok(MacroInput {
|
Ok(MacroInput {
|
||||||
base_name,
|
base_name,
|
||||||
generics,
|
|
||||||
variants,
|
variants,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -83,39 +81,17 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
|
|||||||
let parsed_input = syn::parse_macro_input!(input as MacroInput);
|
let parsed_input = syn::parse_macro_input!(input as MacroInput);
|
||||||
|
|
||||||
let base_name = &parsed_input.base_name;
|
let base_name = &parsed_input.base_name;
|
||||||
let generic_params = &parsed_input.generics.params;
|
let ref_name = format_ident!("{}Ref", base_name);
|
||||||
let mk_ident_path = |ident| Path {
|
let mut_name = format_ident!("{}Mut", base_name);
|
||||||
leading_colon: None,
|
|
||||||
segments: Punctuated::from_iter(std::iter::once(PathSegment {
|
|
||||||
ident,
|
|
||||||
arguments: Default::default(),
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
let generic_args = {
|
|
||||||
generic_params
|
|
||||||
.iter()
|
|
||||||
.map(|arg| match arg {
|
|
||||||
GenericParam::Lifetime(lifetime) => {
|
|
||||||
GenericArgument::Lifetime(lifetime.lifetime.clone())
|
|
||||||
}
|
|
||||||
GenericParam::Const(cnst) => GenericArgument::Const(Expr::Path(ExprPath {
|
|
||||||
path: mk_ident_path(cnst.ident.clone()),
|
|
||||||
attrs: Vec::new(),
|
|
||||||
qself: None,
|
|
||||||
})),
|
|
||||||
GenericParam::Type(ty) => GenericArgument::Type(Type::Path(TypePath {
|
|
||||||
path: mk_ident_path(ty.ident.clone()),
|
|
||||||
qself: None,
|
|
||||||
})),
|
|
||||||
})
|
|
||||||
.collect::<Punctuated<_, Token![,]>>()
|
|
||||||
};
|
|
||||||
let where_clause = &parsed_input.generics.where_clause;
|
|
||||||
let to_trait_name = format_ident!("To{}", base_name);
|
let to_trait_name = format_ident!("To{}", base_name);
|
||||||
let to_trait_fn_name = format_ident!("to_{}", base_name.to_string().to_case(Case::Snake));
|
let to_trait_fn_name = format_ident!("to_{}", base_name.to_string().to_case(Case::Snake));
|
||||||
|
|
||||||
let mut enum_variants = Vec::new();
|
let mut enum_variants = Vec::new();
|
||||||
let mut struct_defs = Vec::new();
|
let mut struct_defs = Vec::new();
|
||||||
|
let mut ref_variants = Vec::new();
|
||||||
|
let mut mut_variants = Vec::new();
|
||||||
|
let mut as_ref_arms = Vec::new();
|
||||||
|
let mut as_mut_arms = Vec::new();
|
||||||
let mut span_arms = Vec::new();
|
let mut span_arms = Vec::new();
|
||||||
let mut from_impls = Vec::new();
|
let mut from_impls = Vec::new();
|
||||||
let mut to_trait_impls = Vec::new();
|
let mut to_trait_impls = Vec::new();
|
||||||
@@ -133,15 +109,19 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
|
|||||||
});
|
});
|
||||||
|
|
||||||
enum_variants.push(quote! { #name(#inner_type) });
|
enum_variants.push(quote! { #name(#inner_type) });
|
||||||
|
ref_variants.push(quote! { #name(&'a #inner_type) });
|
||||||
|
mut_variants.push(quote! { #name(&'a mut #inner_type) });
|
||||||
|
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
|
||||||
|
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
|
||||||
span_arms.push(quote! { Self::#name(inner) => inner.span });
|
span_arms.push(quote! { Self::#name(inner) => inner.span });
|
||||||
from_impls.push(quote! {
|
from_impls.push(quote! {
|
||||||
impl <#generic_params> From<#inner_type> for #base_name <#generic_args> #where_clause {
|
impl From<#inner_type> for #base_name {
|
||||||
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
to_trait_impls.push(quote! {
|
to_trait_impls.push(quote! {
|
||||||
impl <#generic_params> #to_trait_name <#generic_args> for #name #where_clause {
|
impl #to_trait_name for #name {
|
||||||
fn #to_trait_fn_name(self) -> #base_name <#generic_args> { #base_name::from(self) }
|
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -158,24 +138,25 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
let inner_type = name.clone();
|
let inner_type = name.clone();
|
||||||
enum_variants.push(quote! { #name(#inner_type) });
|
enum_variants.push(quote! { #name(#inner_type) });
|
||||||
|
ref_variants.push(quote! { #name(&'a #inner_type) });
|
||||||
|
mut_variants.push(quote! { #name(&'a mut #inner_type) });
|
||||||
|
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
|
||||||
|
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
|
||||||
span_arms.push(quote! { Self::#name(inner) => inner.span });
|
span_arms.push(quote! { Self::#name(inner) => inner.span });
|
||||||
from_impls.push(quote! {
|
from_impls.push(quote! {
|
||||||
impl <#generic_params> From<#inner_type> for #base_name <#generic_args> #where_clause {
|
impl From<#inner_type> for #base_name {
|
||||||
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
to_trait_impls.push(quote! {
|
to_trait_impls.push(quote! {
|
||||||
impl <#generic_params> #to_trait_name <#generic_args> for #name #where_clause {
|
impl #to_trait_name for #name {
|
||||||
fn #to_trait_fn_name(self) -> #base_name <#generic_args> { #base_name::from(self) }
|
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
VariantInput::Struct(name, mut fields) => {
|
VariantInput::Struct(name, mut fields) => {
|
||||||
let inner_type = name.clone();
|
let inner_type = name.clone();
|
||||||
|
|
||||||
fields.named.iter_mut().for_each(|field| {
|
|
||||||
field.vis = syn::Visibility::Public(syn::token::Pub::default());
|
|
||||||
});
|
|
||||||
fields.named.push(syn::Field {
|
fields.named.push(syn::Field {
|
||||||
attrs: vec![],
|
attrs: vec![],
|
||||||
vis: syn::Visibility::Public(syn::token::Pub::default()),
|
vis: syn::Visibility::Public(syn::token::Pub::default()),
|
||||||
@@ -187,18 +168,22 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
struct_defs.push(quote! {
|
struct_defs.push(quote! {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct #name <#generic_params> #where_clause #fields
|
pub struct #name #fields
|
||||||
});
|
});
|
||||||
enum_variants.push(quote! { #name(#inner_type <#generic_args>) });
|
enum_variants.push(quote! { #name(#inner_type) });
|
||||||
|
ref_variants.push(quote! { #name(&'a #inner_type) });
|
||||||
|
mut_variants.push(quote! { #name(&'a mut #inner_type) });
|
||||||
|
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
|
||||||
|
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
|
||||||
span_arms.push(quote! { Self::#name(inner) => inner.span });
|
span_arms.push(quote! { Self::#name(inner) => inner.span });
|
||||||
from_impls.push(quote! {
|
from_impls.push(quote! {
|
||||||
impl <#generic_params> From<#inner_type <#generic_args>> for #base_name <#generic_args> #where_clause {
|
impl From<#inner_type> for #base_name {
|
||||||
fn from(val: #inner_type <#generic_args>) -> Self { #base_name::#name(val) }
|
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
to_trait_impls.push(quote! {
|
to_trait_impls.push(quote! {
|
||||||
impl <#generic_params> #to_trait_name <#generic_args> for #name <#generic_args> #where_clause {
|
impl #to_trait_name for #name {
|
||||||
fn #to_trait_fn_name(self) -> #base_name <#generic_args> { #base_name::from(self) }
|
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -208,15 +193,41 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
|
|||||||
// Assemble the final generated code.
|
// Assemble the final generated code.
|
||||||
let expanded = quote! {
|
let expanded = quote! {
|
||||||
/// The main IR enum, generated by the `ir!` macro.
|
/// The main IR enum, generated by the `ir!` macro.
|
||||||
#[derive(Debug)]
|
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
|
||||||
pub enum #base_name <#generic_params> #where_clause {
|
pub enum #base_name {
|
||||||
#( #enum_variants ),*
|
#( #enum_variants ),*
|
||||||
}
|
}
|
||||||
|
|
||||||
// The struct definitions for the enum variants.
|
// The struct definitions for the enum variants.
|
||||||
#( #struct_defs )*
|
#( #struct_defs )*
|
||||||
|
|
||||||
impl <#generic_params> #base_name <#generic_args> #where_clause {
|
/// An immutable reference version of the IR enum.
|
||||||
|
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
|
||||||
|
pub enum #ref_name<'a> {
|
||||||
|
#( #ref_variants ),*
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A mutable reference version of the IR enum.
|
||||||
|
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
|
||||||
|
pub enum #mut_name<'a> {
|
||||||
|
#( #mut_variants ),*
|
||||||
|
}
|
||||||
|
|
||||||
|
impl #base_name {
|
||||||
|
/// Converts a `&Ir` into a `IrRef`.
|
||||||
|
pub fn as_ref(&self) -> #ref_name<'_> {
|
||||||
|
match self {
|
||||||
|
#( #as_ref_arms ),*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a `&mut Ir` into a `IrMut`.
|
||||||
|
pub fn as_mut(&mut self) -> #mut_name<'_> {
|
||||||
|
match self {
|
||||||
|
#( #as_mut_arms ),*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn span(&self) -> rnix::TextRange {
|
pub fn span(&self) -> rnix::TextRange {
|
||||||
match self {
|
match self {
|
||||||
#( #span_arms ),*
|
#( #span_arms ),*
|
||||||
@@ -228,9 +239,9 @@ pub fn ir_impl(input: TokenStream) -> TokenStream {
|
|||||||
#( #from_impls )*
|
#( #from_impls )*
|
||||||
|
|
||||||
/// A trait for converting a variant struct into the main IR enum.
|
/// A trait for converting a variant struct into the main IR enum.
|
||||||
pub trait #to_trait_name <#generic_params> #where_clause {
|
pub trait #to_trait_name {
|
||||||
/// Performs the conversion.
|
/// Performs the conversion.
|
||||||
fn #to_trait_fn_name(self) -> #base_name <#generic_args>;
|
fn #to_trait_fn_name(self) -> #base_name;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement the `ToIr` trait for each variant struct.
|
// Implement the `ToIr` trait for each variant struct.
|
||||||
|
|||||||
@@ -27,7 +27,6 @@ miette = { version = "7.4", features = ["fancy"] }
|
|||||||
|
|
||||||
hashbrown = "0.16"
|
hashbrown = "0.16"
|
||||||
string-interner = "0.19"
|
string-interner = "0.19"
|
||||||
bumpalo = { version = "3.20", features = ["allocator-api2", "boxed", "collections"] }
|
|
||||||
|
|
||||||
rust-embed="8.11"
|
rust-embed="8.11"
|
||||||
|
|
||||||
@@ -59,7 +58,7 @@ dirs = "6.0"
|
|||||||
tempfile = "3.24"
|
tempfile = "3.24"
|
||||||
rusqlite = { version = "0.38", features = ["bundled"] }
|
rusqlite = { version = "0.38", features = ["bundled"] }
|
||||||
|
|
||||||
rnix = "0.14"
|
rnix = "0.13"
|
||||||
rowan = "0.16"
|
rowan = "0.16"
|
||||||
|
|
||||||
nix-js-macros = { path = "../nix-js-macros" }
|
nix-js-macros = { path = "../nix-js-macros" }
|
||||||
@@ -74,16 +73,12 @@ hyper-util = { version = "0.1", features = ["tokio"], optional = true }
|
|||||||
http-body-util = { version = "0.1", optional = true }
|
http-body-util = { version = "0.1", optional = true }
|
||||||
http = { version = "1", optional = true }
|
http = { version = "1", optional = true }
|
||||||
uuid = { version = "1", features = ["v4"], optional = true }
|
uuid = { version = "1", features = ["v4"], optional = true }
|
||||||
ghost-cell = "0.2.6"
|
|
||||||
colored = "3.1.1"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
inspector = ["dep:fastwebsockets", "dep:hyper", "dep:hyper-util", "dep:http-body-util", "dep:http", "dep:uuid"]
|
inspector = ["dep:fastwebsockets", "dep:hyper", "dep:hyper-util", "dep:http-body-util", "dep:http", "dep:uuid"]
|
||||||
prof = []
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.8", features = ["html_reports"] }
|
criterion = { version = "0.8", features = ["html_reports"] }
|
||||||
test-log = { version = "0.2", features = ["trace"] }
|
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "basic_ops"
|
name = "basic_ops"
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use std::hint::black_box;
|
|
||||||
|
|
||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
use criterion::{Criterion, criterion_group, criterion_main};
|
||||||
|
use std::hint::black_box;
|
||||||
use utils::eval;
|
use utils::eval;
|
||||||
|
|
||||||
fn bench_arithmetic(c: &mut Criterion) {
|
fn bench_arithmetic(c: &mut Criterion) {
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use std::hint::black_box;
|
|
||||||
|
|
||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
use criterion::{Criterion, criterion_group, criterion_main};
|
||||||
|
use std::hint::black_box;
|
||||||
use utils::eval;
|
use utils::eval;
|
||||||
|
|
||||||
fn bench_builtin_math(c: &mut Criterion) {
|
fn bench_builtin_math(c: &mut Criterion) {
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use std::hint::black_box;
|
|
||||||
|
|
||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
use criterion::{Criterion, criterion_group, criterion_main};
|
||||||
use nix_js::context::Context;
|
use nix_js::context::Context;
|
||||||
|
use std::hint::black_box;
|
||||||
use utils::compile;
|
use utils::compile;
|
||||||
|
|
||||||
fn bench_parse_and_downgrade(c: &mut Criterion) {
|
fn bench_parse_and_downgrade(c: &mut Criterion) {
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use std::hint::black_box;
|
|
||||||
|
|
||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
use criterion::{Criterion, criterion_group, criterion_main};
|
||||||
|
use std::hint::black_box;
|
||||||
use utils::eval;
|
use utils::eval;
|
||||||
|
|
||||||
fn bench_non_recursive(c: &mut Criterion) {
|
fn bench_non_recursive(c: &mut Criterion) {
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ export default defineConfig([
|
|||||||
...tseslint.configs.recommended,
|
...tseslint.configs.recommended,
|
||||||
{
|
{
|
||||||
files: ["**/*.{js,mjs,cjs,ts,mts,cts}"],
|
files: ["**/*.{js,mjs,cjs,ts,mts,cts}"],
|
||||||
languageOptions: { globals: globals.es2022 },
|
languageOptions: { globals: globals.browser },
|
||||||
rules: {
|
rules: {
|
||||||
"no-unused-vars": "off",
|
"no-unused-vars": "off",
|
||||||
"@typescript-eslint/no-unused-vars": ["error", { varsIgnorePattern: "^_", argsIgnorePattern: "^_" }],
|
"@typescript-eslint/no-unused-vars": ["error", { varsIgnorePattern: "^_", argsIgnorePattern: "^_" }],
|
||||||
|
|||||||
164
nix-js/runtime-ts/package-lock.json
generated
164
nix-js/runtime-ts/package-lock.json
generated
@@ -8,15 +8,16 @@
|
|||||||
"name": "nix-js-runtime",
|
"name": "nix-js-runtime",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@eslint/json": "^1.0.1",
|
||||||
|
"eslint": "^9.39.2",
|
||||||
"globals": "^17.3.0",
|
"globals": "^17.3.0",
|
||||||
"jiti": "^2.6.1",
|
"jiti": "^2.6.1",
|
||||||
"js-sdsl": "^4.4.2"
|
"js-sdsl": "^4.4.2",
|
||||||
|
"typescript-eslint": "^8.55.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"esbuild": "^0.24.2",
|
"esbuild": "^0.24.2",
|
||||||
"eslint": "^9.39.2",
|
"typescript": "^5.7.2"
|
||||||
"typescript": "^5.7.2",
|
|
||||||
"typescript-eslint": "^8.55.0"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@esbuild/aix-ppc64": {
|
"node_modules/@esbuild/aix-ppc64": {
|
||||||
@@ -448,7 +449,6 @@
|
|||||||
"version": "4.9.1",
|
"version": "4.9.1",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz",
|
||||||
"integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==",
|
"integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"eslint-visitor-keys": "^3.4.3"
|
"eslint-visitor-keys": "^3.4.3"
|
||||||
@@ -467,7 +467,6 @@
|
|||||||
"version": "3.4.3",
|
"version": "3.4.3",
|
||||||
"resolved": "https://registry.npmmirror.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
|
"resolved": "https://registry.npmmirror.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
|
||||||
"integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
|
"integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||||
@@ -480,7 +479,6 @@
|
|||||||
"version": "4.12.2",
|
"version": "4.12.2",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
|
||||||
"integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
|
"integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
|
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
|
||||||
@@ -490,7 +488,6 @@
|
|||||||
"version": "0.21.1",
|
"version": "0.21.1",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/config-array/-/config-array-0.21.1.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/config-array/-/config-array-0.21.1.tgz",
|
||||||
"integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==",
|
"integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint/object-schema": "^2.1.7",
|
"@eslint/object-schema": "^2.1.7",
|
||||||
@@ -505,7 +502,6 @@
|
|||||||
"version": "0.4.2",
|
"version": "0.4.2",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/config-helpers/-/config-helpers-0.4.2.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/config-helpers/-/config-helpers-0.4.2.tgz",
|
||||||
"integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==",
|
"integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint/core": "^0.17.0"
|
"@eslint/core": "^0.17.0"
|
||||||
@@ -518,7 +514,6 @@
|
|||||||
"version": "0.17.0",
|
"version": "0.17.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/core/-/core-0.17.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/core/-/core-0.17.0.tgz",
|
||||||
"integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==",
|
"integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/json-schema": "^7.0.15"
|
"@types/json-schema": "^7.0.15"
|
||||||
@@ -527,11 +522,22 @@
|
|||||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@eslint/core": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmmirror.com/@eslint/core/-/core-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-/nr9K9wkr3P1EzFTdFdMoLuo1PmIxjmwvPozwoSodjNBdefGujXQUF93u1DDZpEaTuDvMsIQddsd35BwtrW9Xw==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/json-schema": "^7.0.15"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^20.19.0 || ^22.13.0 || >=24"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@eslint/eslintrc": {
|
"node_modules/@eslint/eslintrc": {
|
||||||
"version": "3.3.3",
|
"version": "3.3.3",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/eslintrc/-/eslintrc-3.3.3.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/eslintrc/-/eslintrc-3.3.3.tgz",
|
||||||
"integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==",
|
"integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ajv": "^6.12.4",
|
"ajv": "^6.12.4",
|
||||||
@@ -555,7 +561,6 @@
|
|||||||
"version": "14.0.0",
|
"version": "14.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/globals/-/globals-14.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/globals/-/globals-14.0.0.tgz",
|
||||||
"integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==",
|
"integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18"
|
"node": ">=18"
|
||||||
@@ -568,7 +573,6 @@
|
|||||||
"version": "9.39.2",
|
"version": "9.39.2",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/js/-/js-9.39.2.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/js/-/js-9.39.2.tgz",
|
||||||
"integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==",
|
"integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||||
@@ -577,21 +581,47 @@
|
|||||||
"url": "https://eslint.org/donate"
|
"url": "https://eslint.org/donate"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@eslint/json": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmmirror.com/@eslint/json/-/json-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-bE2nGv8/U+uRvQEJWOgCsZCa65XsCBgxyyx/sXtTHVv0kqdauACLzyp7A1C3yNn7pRaWjIt5acxY+TAbSyIJXw==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@eslint/core": "^1.1.0",
|
||||||
|
"@eslint/plugin-kit": "^0.6.0",
|
||||||
|
"@humanwhocodes/momoa": "^3.3.10",
|
||||||
|
"natural-compare": "^1.4.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^20.19.0 || ^22.13.0 || >=24"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@eslint/object-schema": {
|
"node_modules/@eslint/object-schema": {
|
||||||
"version": "2.1.7",
|
"version": "2.1.7",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/object-schema/-/object-schema-2.1.7.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/object-schema/-/object-schema-2.1.7.tgz",
|
||||||
"integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==",
|
"integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@eslint/plugin-kit": {
|
||||||
|
"version": "0.6.0",
|
||||||
|
"resolved": "https://registry.npmmirror.com/@eslint/plugin-kit/-/plugin-kit-0.6.0.tgz",
|
||||||
|
"integrity": "sha512-bIZEUzOI1jkhviX2cp5vNyXQc6olzb2ohewQubuYlMXZ2Q/XjBO0x0XhGPvc9fjSIiUN0vw+0hq53BJ4eQSJKQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@eslint/core": "^1.1.0",
|
||||||
|
"levn": "^0.4.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^20.19.0 || ^22.13.0 || >=24"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@humanfs/core": {
|
"node_modules/@humanfs/core": {
|
||||||
"version": "0.19.1",
|
"version": "0.19.1",
|
||||||
"resolved": "https://registry.npmmirror.com/@humanfs/core/-/core-0.19.1.tgz",
|
"resolved": "https://registry.npmmirror.com/@humanfs/core/-/core-0.19.1.tgz",
|
||||||
"integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
|
"integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.18.0"
|
"node": ">=18.18.0"
|
||||||
@@ -601,7 +631,6 @@
|
|||||||
"version": "0.16.7",
|
"version": "0.16.7",
|
||||||
"resolved": "https://registry.npmmirror.com/@humanfs/node/-/node-0.16.7.tgz",
|
"resolved": "https://registry.npmmirror.com/@humanfs/node/-/node-0.16.7.tgz",
|
||||||
"integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==",
|
"integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@humanfs/core": "^0.19.1",
|
"@humanfs/core": "^0.19.1",
|
||||||
@@ -615,7 +644,6 @@
|
|||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
|
||||||
"integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
|
"integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.22"
|
"node": ">=12.22"
|
||||||
@@ -625,11 +653,19 @@
|
|||||||
"url": "https://github.com/sponsors/nzakas"
|
"url": "https://github.com/sponsors/nzakas"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@humanwhocodes/momoa": {
|
||||||
|
"version": "3.3.10",
|
||||||
|
"resolved": "https://registry.npmmirror.com/@humanwhocodes/momoa/-/momoa-3.3.10.tgz",
|
||||||
|
"integrity": "sha512-KWiFQpSAqEIyrTXko3hFNLeQvSK8zXlJQzhhxsyVn58WFRYXST99b3Nqnu+ttOtjds2Pl2grUHGpe2NzhPynuQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@humanwhocodes/retry": {
|
"node_modules/@humanwhocodes/retry": {
|
||||||
"version": "0.4.3",
|
"version": "0.4.3",
|
||||||
"resolved": "https://registry.npmmirror.com/@humanwhocodes/retry/-/retry-0.4.3.tgz",
|
"resolved": "https://registry.npmmirror.com/@humanwhocodes/retry/-/retry-0.4.3.tgz",
|
||||||
"integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==",
|
"integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.18"
|
"node": ">=18.18"
|
||||||
@@ -643,21 +679,18 @@
|
|||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmmirror.com/@types/estree/-/estree-1.0.8.tgz",
|
"resolved": "https://registry.npmmirror.com/@types/estree/-/estree-1.0.8.tgz",
|
||||||
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
|
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/json-schema": {
|
"node_modules/@types/json-schema": {
|
||||||
"version": "7.0.15",
|
"version": "7.0.15",
|
||||||
"resolved": "https://registry.npmmirror.com/@types/json-schema/-/json-schema-7.0.15.tgz",
|
"resolved": "https://registry.npmmirror.com/@types/json-schema/-/json-schema-7.0.15.tgz",
|
||||||
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
|
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.55.0.tgz",
|
||||||
"integrity": "sha512-1y/MVSz0NglV1ijHC8OT49mPJ4qhPYjiK08YUQVbIOyu+5k862LKUHFkpKHWu//zmr7hDR2rhwUm6gnCGNmGBQ==",
|
"integrity": "sha512-1y/MVSz0NglV1ijHC8OT49mPJ4qhPYjiK08YUQVbIOyu+5k862LKUHFkpKHWu//zmr7hDR2rhwUm6gnCGNmGBQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint-community/regexpp": "^4.12.2",
|
"@eslint-community/regexpp": "^4.12.2",
|
||||||
@@ -686,7 +719,6 @@
|
|||||||
"version": "7.0.5",
|
"version": "7.0.5",
|
||||||
"resolved": "https://registry.npmmirror.com/ignore/-/ignore-7.0.5.tgz",
|
"resolved": "https://registry.npmmirror.com/ignore/-/ignore-7.0.5.tgz",
|
||||||
"integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
|
"integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 4"
|
"node": ">= 4"
|
||||||
@@ -696,7 +728,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/parser/-/parser-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/parser/-/parser-8.55.0.tgz",
|
||||||
"integrity": "sha512-4z2nCSBfVIMnbuu8uinj+f0o4qOeggYJLbjpPHka3KH1om7e+H9yLKTYgksTaHcGco+NClhhY2vyO3HsMH1RGw==",
|
"integrity": "sha512-4z2nCSBfVIMnbuu8uinj+f0o4qOeggYJLbjpPHka3KH1om7e+H9yLKTYgksTaHcGco+NClhhY2vyO3HsMH1RGw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -722,7 +753,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/project-service/-/project-service-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/project-service/-/project-service-8.55.0.tgz",
|
||||||
"integrity": "sha512-zRcVVPFUYWa3kNnjaZGXSu3xkKV1zXy8M4nO/pElzQhFweb7PPtluDLQtKArEOGmjXoRjnUZ29NjOiF0eCDkcQ==",
|
"integrity": "sha512-zRcVVPFUYWa3kNnjaZGXSu3xkKV1zXy8M4nO/pElzQhFweb7PPtluDLQtKArEOGmjXoRjnUZ29NjOiF0eCDkcQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@typescript-eslint/tsconfig-utils": "^8.55.0",
|
"@typescript-eslint/tsconfig-utils": "^8.55.0",
|
||||||
@@ -744,7 +774,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/scope-manager/-/scope-manager-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/scope-manager/-/scope-manager-8.55.0.tgz",
|
||||||
"integrity": "sha512-fVu5Omrd3jeqeQLiB9f1YsuK/iHFOwb04bCtY4BSCLgjNbOD33ZdV6KyEqplHr+IlpgT0QTZ/iJ+wT7hvTx49Q==",
|
"integrity": "sha512-fVu5Omrd3jeqeQLiB9f1YsuK/iHFOwb04bCtY4BSCLgjNbOD33ZdV6KyEqplHr+IlpgT0QTZ/iJ+wT7hvTx49Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@typescript-eslint/types": "8.55.0",
|
"@typescript-eslint/types": "8.55.0",
|
||||||
@@ -762,7 +791,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.55.0.tgz",
|
||||||
"integrity": "sha512-1R9cXqY7RQd7WuqSN47PK9EDpgFUK3VqdmbYrvWJZYDd0cavROGn+74ktWBlmJ13NXUQKlZ/iAEQHI/V0kKe0Q==",
|
"integrity": "sha512-1R9cXqY7RQd7WuqSN47PK9EDpgFUK3VqdmbYrvWJZYDd0cavROGn+74ktWBlmJ13NXUQKlZ/iAEQHI/V0kKe0Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||||
@@ -779,7 +807,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/type-utils/-/type-utils-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/type-utils/-/type-utils-8.55.0.tgz",
|
||||||
"integrity": "sha512-x1iH2unH4qAt6I37I2CGlsNs+B9WGxurP2uyZLRz6UJoZWDBx9cJL1xVN/FiOmHEONEg6RIufdvyT0TEYIgC5g==",
|
"integrity": "sha512-x1iH2unH4qAt6I37I2CGlsNs+B9WGxurP2uyZLRz6UJoZWDBx9cJL1xVN/FiOmHEONEg6RIufdvyT0TEYIgC5g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@typescript-eslint/types": "8.55.0",
|
"@typescript-eslint/types": "8.55.0",
|
||||||
@@ -804,7 +831,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/types/-/types-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/types/-/types-8.55.0.tgz",
|
||||||
"integrity": "sha512-ujT0Je8GI5BJWi+/mMoR0wxwVEQaxM+pi30xuMiJETlX80OPovb2p9E8ss87gnSVtYXtJoU9U1Cowcr6w2FE0w==",
|
"integrity": "sha512-ujT0Je8GI5BJWi+/mMoR0wxwVEQaxM+pi30xuMiJETlX80OPovb2p9E8ss87gnSVtYXtJoU9U1Cowcr6w2FE0w==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||||
@@ -818,7 +844,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.55.0.tgz",
|
||||||
"integrity": "sha512-EwrH67bSWdx/3aRQhCoxDaHM+CrZjotc2UCCpEDVqfCE+7OjKAGWNY2HsCSTEVvWH2clYQK8pdeLp42EVs+xQw==",
|
"integrity": "sha512-EwrH67bSWdx/3aRQhCoxDaHM+CrZjotc2UCCpEDVqfCE+7OjKAGWNY2HsCSTEVvWH2clYQK8pdeLp42EVs+xQw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@typescript-eslint/project-service": "8.55.0",
|
"@typescript-eslint/project-service": "8.55.0",
|
||||||
@@ -846,7 +871,6 @@
|
|||||||
"version": "2.0.2",
|
"version": "2.0.2",
|
||||||
"resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
"resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"balanced-match": "^1.0.0"
|
"balanced-match": "^1.0.0"
|
||||||
@@ -856,7 +880,6 @@
|
|||||||
"version": "9.0.5",
|
"version": "9.0.5",
|
||||||
"resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.5.tgz",
|
"resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.5.tgz",
|
||||||
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
|
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"brace-expansion": "^2.0.1"
|
"brace-expansion": "^2.0.1"
|
||||||
@@ -872,7 +895,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/utils/-/utils-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/utils/-/utils-8.55.0.tgz",
|
||||||
"integrity": "sha512-BqZEsnPGdYpgyEIkDC1BadNY8oMwckftxBT+C8W0g1iKPdeqKZBtTfnvcq0nf60u7MkjFO8RBvpRGZBPw4L2ow==",
|
"integrity": "sha512-BqZEsnPGdYpgyEIkDC1BadNY8oMwckftxBT+C8W0g1iKPdeqKZBtTfnvcq0nf60u7MkjFO8RBvpRGZBPw4L2ow==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint-community/eslint-utils": "^4.9.1",
|
"@eslint-community/eslint-utils": "^4.9.1",
|
||||||
@@ -896,7 +918,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.55.0.tgz",
|
||||||
"integrity": "sha512-AxNRwEie8Nn4eFS1FzDMJWIISMGoXMb037sgCBJ3UR6o0fQTzr2tqN9WT+DkWJPhIdQCfV7T6D387566VtnCJA==",
|
"integrity": "sha512-AxNRwEie8Nn4eFS1FzDMJWIISMGoXMb037sgCBJ3UR6o0fQTzr2tqN9WT+DkWJPhIdQCfV7T6D387566VtnCJA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@typescript-eslint/types": "8.55.0",
|
"@typescript-eslint/types": "8.55.0",
|
||||||
@@ -914,7 +935,6 @@
|
|||||||
"version": "8.15.0",
|
"version": "8.15.0",
|
||||||
"resolved": "https://registry.npmmirror.com/acorn/-/acorn-8.15.0.tgz",
|
"resolved": "https://registry.npmmirror.com/acorn/-/acorn-8.15.0.tgz",
|
||||||
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"bin": {
|
"bin": {
|
||||||
@@ -928,7 +948,6 @@
|
|||||||
"version": "5.3.2",
|
"version": "5.3.2",
|
||||||
"resolved": "https://registry.npmmirror.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
|
"resolved": "https://registry.npmmirror.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
|
||||||
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
|
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
|
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
|
||||||
@@ -938,7 +957,6 @@
|
|||||||
"version": "6.12.6",
|
"version": "6.12.6",
|
||||||
"resolved": "https://registry.npmmirror.com/ajv/-/ajv-6.12.6.tgz",
|
"resolved": "https://registry.npmmirror.com/ajv/-/ajv-6.12.6.tgz",
|
||||||
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
|
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fast-deep-equal": "^3.1.1",
|
"fast-deep-equal": "^3.1.1",
|
||||||
@@ -955,7 +973,6 @@
|
|||||||
"version": "4.3.0",
|
"version": "4.3.0",
|
||||||
"resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
"resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-convert": "^2.0.1"
|
"color-convert": "^2.0.1"
|
||||||
@@ -971,21 +988,18 @@
|
|||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/argparse/-/argparse-2.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/argparse/-/argparse-2.0.1.tgz",
|
||||||
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
|
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "Python-2.0"
|
"license": "Python-2.0"
|
||||||
},
|
},
|
||||||
"node_modules/balanced-match": {
|
"node_modules/balanced-match": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz",
|
"resolved": "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/brace-expansion": {
|
"node_modules/brace-expansion": {
|
||||||
"version": "1.1.12",
|
"version": "1.1.12",
|
||||||
"resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
"resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"balanced-match": "^1.0.0",
|
"balanced-match": "^1.0.0",
|
||||||
@@ -996,7 +1010,6 @@
|
|||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmmirror.com/callsites/-/callsites-3.1.0.tgz",
|
"resolved": "https://registry.npmmirror.com/callsites/-/callsites-3.1.0.tgz",
|
||||||
"integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
|
"integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
@@ -1006,7 +1019,6 @@
|
|||||||
"version": "4.1.2",
|
"version": "4.1.2",
|
||||||
"resolved": "https://registry.npmmirror.com/chalk/-/chalk-4.1.2.tgz",
|
"resolved": "https://registry.npmmirror.com/chalk/-/chalk-4.1.2.tgz",
|
||||||
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ansi-styles": "^4.1.0",
|
"ansi-styles": "^4.1.0",
|
||||||
@@ -1023,7 +1035,6 @@
|
|||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/color-convert/-/color-convert-2.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/color-convert/-/color-convert-2.0.1.tgz",
|
||||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "~1.1.4"
|
"color-name": "~1.1.4"
|
||||||
@@ -1036,21 +1047,18 @@
|
|||||||
"version": "1.1.4",
|
"version": "1.1.4",
|
||||||
"resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz",
|
"resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz",
|
||||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/concat-map": {
|
"node_modules/concat-map": {
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz",
|
||||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
|
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/cross-spawn": {
|
"node_modules/cross-spawn": {
|
||||||
"version": "7.0.6",
|
"version": "7.0.6",
|
||||||
"resolved": "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
"resolved": "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||||
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
|
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"path-key": "^3.1.0",
|
"path-key": "^3.1.0",
|
||||||
@@ -1065,7 +1073,6 @@
|
|||||||
"version": "4.4.3",
|
"version": "4.4.3",
|
||||||
"resolved": "https://registry.npmmirror.com/debug/-/debug-4.4.3.tgz",
|
"resolved": "https://registry.npmmirror.com/debug/-/debug-4.4.3.tgz",
|
||||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ms": "^2.1.3"
|
"ms": "^2.1.3"
|
||||||
@@ -1083,7 +1090,6 @@
|
|||||||
"version": "0.1.4",
|
"version": "0.1.4",
|
||||||
"resolved": "https://registry.npmmirror.com/deep-is/-/deep-is-0.1.4.tgz",
|
"resolved": "https://registry.npmmirror.com/deep-is/-/deep-is-0.1.4.tgz",
|
||||||
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
|
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/esbuild": {
|
"node_modules/esbuild": {
|
||||||
@@ -1131,7 +1137,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
|
||||||
"integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
|
"integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
@@ -1144,7 +1149,6 @@
|
|||||||
"version": "9.39.2",
|
"version": "9.39.2",
|
||||||
"resolved": "https://registry.npmmirror.com/eslint/-/eslint-9.39.2.tgz",
|
"resolved": "https://registry.npmmirror.com/eslint/-/eslint-9.39.2.tgz",
|
||||||
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
|
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -1205,7 +1209,6 @@
|
|||||||
"version": "8.4.0",
|
"version": "8.4.0",
|
||||||
"resolved": "https://registry.npmmirror.com/eslint-scope/-/eslint-scope-8.4.0.tgz",
|
"resolved": "https://registry.npmmirror.com/eslint-scope/-/eslint-scope-8.4.0.tgz",
|
||||||
"integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==",
|
"integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-2-Clause",
|
"license": "BSD-2-Clause",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"esrecurse": "^4.3.0",
|
"esrecurse": "^4.3.0",
|
||||||
@@ -1222,7 +1225,6 @@
|
|||||||
"version": "4.2.1",
|
"version": "4.2.1",
|
||||||
"resolved": "https://registry.npmmirror.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
"resolved": "https://registry.npmmirror.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||||
@@ -1235,7 +1237,6 @@
|
|||||||
"version": "0.17.0",
|
"version": "0.17.0",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/core/-/core-0.17.0.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/core/-/core-0.17.0.tgz",
|
||||||
"integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==",
|
"integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/json-schema": "^7.0.15"
|
"@types/json-schema": "^7.0.15"
|
||||||
@@ -1248,7 +1249,6 @@
|
|||||||
"version": "0.4.1",
|
"version": "0.4.1",
|
||||||
"resolved": "https://registry.npmmirror.com/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz",
|
"resolved": "https://registry.npmmirror.com/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz",
|
||||||
"integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==",
|
"integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint/core": "^0.17.0",
|
"@eslint/core": "^0.17.0",
|
||||||
@@ -1262,7 +1262,6 @@
|
|||||||
"version": "10.4.0",
|
"version": "10.4.0",
|
||||||
"resolved": "https://registry.npmmirror.com/espree/-/espree-10.4.0.tgz",
|
"resolved": "https://registry.npmmirror.com/espree/-/espree-10.4.0.tgz",
|
||||||
"integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
|
"integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-2-Clause",
|
"license": "BSD-2-Clause",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"acorn": "^8.15.0",
|
"acorn": "^8.15.0",
|
||||||
@@ -1280,7 +1279,6 @@
|
|||||||
"version": "1.7.0",
|
"version": "1.7.0",
|
||||||
"resolved": "https://registry.npmmirror.com/esquery/-/esquery-1.7.0.tgz",
|
"resolved": "https://registry.npmmirror.com/esquery/-/esquery-1.7.0.tgz",
|
||||||
"integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==",
|
"integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-3-Clause",
|
"license": "BSD-3-Clause",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"estraverse": "^5.1.0"
|
"estraverse": "^5.1.0"
|
||||||
@@ -1293,7 +1291,6 @@
|
|||||||
"version": "4.3.0",
|
"version": "4.3.0",
|
||||||
"resolved": "https://registry.npmmirror.com/esrecurse/-/esrecurse-4.3.0.tgz",
|
"resolved": "https://registry.npmmirror.com/esrecurse/-/esrecurse-4.3.0.tgz",
|
||||||
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
|
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-2-Clause",
|
"license": "BSD-2-Clause",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"estraverse": "^5.2.0"
|
"estraverse": "^5.2.0"
|
||||||
@@ -1306,7 +1303,6 @@
|
|||||||
"version": "5.3.0",
|
"version": "5.3.0",
|
||||||
"resolved": "https://registry.npmmirror.com/estraverse/-/estraverse-5.3.0.tgz",
|
"resolved": "https://registry.npmmirror.com/estraverse/-/estraverse-5.3.0.tgz",
|
||||||
"integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
|
"integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-2-Clause",
|
"license": "BSD-2-Clause",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=4.0"
|
"node": ">=4.0"
|
||||||
@@ -1316,7 +1312,6 @@
|
|||||||
"version": "2.0.3",
|
"version": "2.0.3",
|
||||||
"resolved": "https://registry.npmmirror.com/esutils/-/esutils-2.0.3.tgz",
|
"resolved": "https://registry.npmmirror.com/esutils/-/esutils-2.0.3.tgz",
|
||||||
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
|
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-2-Clause",
|
"license": "BSD-2-Clause",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
@@ -1326,28 +1321,24 @@
|
|||||||
"version": "3.1.3",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
"resolved": "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/fast-json-stable-stringify": {
|
"node_modules/fast-json-stable-stringify": {
|
||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmmirror.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
|
"resolved": "https://registry.npmmirror.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
|
||||||
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
|
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/fast-levenshtein": {
|
"node_modules/fast-levenshtein": {
|
||||||
"version": "2.0.6",
|
"version": "2.0.6",
|
||||||
"resolved": "https://registry.npmmirror.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
|
"resolved": "https://registry.npmmirror.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
|
||||||
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
|
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/fdir": {
|
"node_modules/fdir": {
|
||||||
"version": "6.5.0",
|
"version": "6.5.0",
|
||||||
"resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz",
|
"resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz",
|
||||||
"integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
|
"integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.0.0"
|
"node": ">=12.0.0"
|
||||||
@@ -1365,7 +1356,6 @@
|
|||||||
"version": "8.0.0",
|
"version": "8.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
|
||||||
"integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==",
|
"integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"flat-cache": "^4.0.0"
|
"flat-cache": "^4.0.0"
|
||||||
@@ -1378,7 +1368,6 @@
|
|||||||
"version": "5.0.0",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/find-up/-/find-up-5.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/find-up/-/find-up-5.0.0.tgz",
|
||||||
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
|
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"locate-path": "^6.0.0",
|
"locate-path": "^6.0.0",
|
||||||
@@ -1395,7 +1384,6 @@
|
|||||||
"version": "4.0.1",
|
"version": "4.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/flat-cache/-/flat-cache-4.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/flat-cache/-/flat-cache-4.0.1.tgz",
|
||||||
"integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==",
|
"integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"flatted": "^3.2.9",
|
"flatted": "^3.2.9",
|
||||||
@@ -1409,14 +1397,12 @@
|
|||||||
"version": "3.3.3",
|
"version": "3.3.3",
|
||||||
"resolved": "https://registry.npmmirror.com/flatted/-/flatted-3.3.3.tgz",
|
"resolved": "https://registry.npmmirror.com/flatted/-/flatted-3.3.3.tgz",
|
||||||
"integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
|
"integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/glob-parent": {
|
"node_modules/glob-parent": {
|
||||||
"version": "6.0.2",
|
"version": "6.0.2",
|
||||||
"resolved": "https://registry.npmmirror.com/glob-parent/-/glob-parent-6.0.2.tgz",
|
"resolved": "https://registry.npmmirror.com/glob-parent/-/glob-parent-6.0.2.tgz",
|
||||||
"integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
|
"integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"is-glob": "^4.0.3"
|
"is-glob": "^4.0.3"
|
||||||
@@ -1441,7 +1427,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz",
|
||||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@@ -1451,7 +1436,6 @@
|
|||||||
"version": "5.3.2",
|
"version": "5.3.2",
|
||||||
"resolved": "https://registry.npmmirror.com/ignore/-/ignore-5.3.2.tgz",
|
"resolved": "https://registry.npmmirror.com/ignore/-/ignore-5.3.2.tgz",
|
||||||
"integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
|
"integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 4"
|
"node": ">= 4"
|
||||||
@@ -1461,7 +1445,6 @@
|
|||||||
"version": "3.3.1",
|
"version": "3.3.1",
|
||||||
"resolved": "https://registry.npmmirror.com/import-fresh/-/import-fresh-3.3.1.tgz",
|
"resolved": "https://registry.npmmirror.com/import-fresh/-/import-fresh-3.3.1.tgz",
|
||||||
"integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
|
"integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"parent-module": "^1.0.0",
|
"parent-module": "^1.0.0",
|
||||||
@@ -1478,7 +1461,6 @@
|
|||||||
"version": "0.1.4",
|
"version": "0.1.4",
|
||||||
"resolved": "https://registry.npmmirror.com/imurmurhash/-/imurmurhash-0.1.4.tgz",
|
"resolved": "https://registry.npmmirror.com/imurmurhash/-/imurmurhash-0.1.4.tgz",
|
||||||
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
|
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.8.19"
|
"node": ">=0.8.19"
|
||||||
@@ -1488,7 +1470,6 @@
|
|||||||
"version": "2.1.1",
|
"version": "2.1.1",
|
||||||
"resolved": "https://registry.npmmirror.com/is-extglob/-/is-extglob-2.1.1.tgz",
|
"resolved": "https://registry.npmmirror.com/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
@@ -1498,7 +1479,6 @@
|
|||||||
"version": "4.0.3",
|
"version": "4.0.3",
|
||||||
"resolved": "https://registry.npmmirror.com/is-glob/-/is-glob-4.0.3.tgz",
|
"resolved": "https://registry.npmmirror.com/is-glob/-/is-glob-4.0.3.tgz",
|
||||||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"is-extglob": "^2.1.1"
|
"is-extglob": "^2.1.1"
|
||||||
@@ -1511,7 +1491,6 @@
|
|||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/isexe/-/isexe-2.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/isexe/-/isexe-2.0.0.tgz",
|
||||||
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
|
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/jiti": {
|
"node_modules/jiti": {
|
||||||
@@ -1538,7 +1517,6 @@
|
|||||||
"version": "4.1.1",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-4.1.1.tgz",
|
"resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||||
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"argparse": "^2.0.1"
|
"argparse": "^2.0.1"
|
||||||
@@ -1551,28 +1529,24 @@
|
|||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/json-buffer/-/json-buffer-3.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/json-buffer/-/json-buffer-3.0.1.tgz",
|
||||||
"integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
|
"integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/json-schema-traverse": {
|
"node_modules/json-schema-traverse": {
|
||||||
"version": "0.4.1",
|
"version": "0.4.1",
|
||||||
"resolved": "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
"resolved": "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
||||||
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
|
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/json-stable-stringify-without-jsonify": {
|
"node_modules/json-stable-stringify-without-jsonify": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
|
||||||
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
|
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/keyv": {
|
"node_modules/keyv": {
|
||||||
"version": "4.5.4",
|
"version": "4.5.4",
|
||||||
"resolved": "https://registry.npmmirror.com/keyv/-/keyv-4.5.4.tgz",
|
"resolved": "https://registry.npmmirror.com/keyv/-/keyv-4.5.4.tgz",
|
||||||
"integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
|
"integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"json-buffer": "3.0.1"
|
"json-buffer": "3.0.1"
|
||||||
@@ -1582,7 +1556,6 @@
|
|||||||
"version": "0.4.1",
|
"version": "0.4.1",
|
||||||
"resolved": "https://registry.npmmirror.com/levn/-/levn-0.4.1.tgz",
|
"resolved": "https://registry.npmmirror.com/levn/-/levn-0.4.1.tgz",
|
||||||
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
|
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"prelude-ls": "^1.2.1",
|
"prelude-ls": "^1.2.1",
|
||||||
@@ -1596,7 +1569,6 @@
|
|||||||
"version": "6.0.0",
|
"version": "6.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/locate-path/-/locate-path-6.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/locate-path/-/locate-path-6.0.0.tgz",
|
||||||
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
|
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"p-locate": "^5.0.0"
|
"p-locate": "^5.0.0"
|
||||||
@@ -1612,14 +1584,12 @@
|
|||||||
"version": "4.6.2",
|
"version": "4.6.2",
|
||||||
"resolved": "https://registry.npmmirror.com/lodash.merge/-/lodash.merge-4.6.2.tgz",
|
"resolved": "https://registry.npmmirror.com/lodash.merge/-/lodash.merge-4.6.2.tgz",
|
||||||
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
|
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/minimatch": {
|
"node_modules/minimatch": {
|
||||||
"version": "3.1.2",
|
"version": "3.1.2",
|
||||||
"resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.2.tgz",
|
"resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.2.tgz",
|
||||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"brace-expansion": "^1.1.7"
|
"brace-expansion": "^1.1.7"
|
||||||
@@ -1632,21 +1602,18 @@
|
|||||||
"version": "2.1.3",
|
"version": "2.1.3",
|
||||||
"resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz",
|
"resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz",
|
||||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/natural-compare": {
|
"node_modules/natural-compare": {
|
||||||
"version": "1.4.0",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmmirror.com/natural-compare/-/natural-compare-1.4.0.tgz",
|
"resolved": "https://registry.npmmirror.com/natural-compare/-/natural-compare-1.4.0.tgz",
|
||||||
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
|
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/optionator": {
|
"node_modules/optionator": {
|
||||||
"version": "0.9.4",
|
"version": "0.9.4",
|
||||||
"resolved": "https://registry.npmmirror.com/optionator/-/optionator-0.9.4.tgz",
|
"resolved": "https://registry.npmmirror.com/optionator/-/optionator-0.9.4.tgz",
|
||||||
"integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
|
"integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"deep-is": "^0.1.3",
|
"deep-is": "^0.1.3",
|
||||||
@@ -1664,7 +1631,6 @@
|
|||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz",
|
"resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz",
|
||||||
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"yocto-queue": "^0.1.0"
|
"yocto-queue": "^0.1.0"
|
||||||
@@ -1680,7 +1646,6 @@
|
|||||||
"version": "5.0.0",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/p-locate/-/p-locate-5.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/p-locate/-/p-locate-5.0.0.tgz",
|
||||||
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
|
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"p-limit": "^3.0.2"
|
"p-limit": "^3.0.2"
|
||||||
@@ -1696,7 +1661,6 @@
|
|||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmmirror.com/parent-module/-/parent-module-1.0.1.tgz",
|
"resolved": "https://registry.npmmirror.com/parent-module/-/parent-module-1.0.1.tgz",
|
||||||
"integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
|
"integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"callsites": "^3.0.0"
|
"callsites": "^3.0.0"
|
||||||
@@ -1709,7 +1673,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/path-exists/-/path-exists-4.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/path-exists/-/path-exists-4.0.0.tgz",
|
||||||
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
|
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@@ -1719,7 +1682,6 @@
|
|||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmmirror.com/path-key/-/path-key-3.1.1.tgz",
|
"resolved": "https://registry.npmmirror.com/path-key/-/path-key-3.1.1.tgz",
|
||||||
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
|
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@@ -1729,7 +1691,6 @@
|
|||||||
"version": "4.0.3",
|
"version": "4.0.3",
|
||||||
"resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz",
|
"resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz",
|
||||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
@@ -1743,7 +1704,6 @@
|
|||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmmirror.com/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
"resolved": "https://registry.npmmirror.com/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
||||||
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.8.0"
|
"node": ">= 0.8.0"
|
||||||
@@ -1753,7 +1713,6 @@
|
|||||||
"version": "2.3.1",
|
"version": "2.3.1",
|
||||||
"resolved": "https://registry.npmmirror.com/punycode/-/punycode-2.3.1.tgz",
|
"resolved": "https://registry.npmmirror.com/punycode/-/punycode-2.3.1.tgz",
|
||||||
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
|
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
@@ -1763,7 +1722,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/resolve-from/-/resolve-from-4.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/resolve-from/-/resolve-from-4.0.0.tgz",
|
||||||
"integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
|
"integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=4"
|
"node": ">=4"
|
||||||
@@ -1773,7 +1731,6 @@
|
|||||||
"version": "7.7.4",
|
"version": "7.7.4",
|
||||||
"resolved": "https://registry.npmmirror.com/semver/-/semver-7.7.4.tgz",
|
"resolved": "https://registry.npmmirror.com/semver/-/semver-7.7.4.tgz",
|
||||||
"integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
|
"integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"bin": {
|
"bin": {
|
||||||
"semver": "bin/semver.js"
|
"semver": "bin/semver.js"
|
||||||
@@ -1786,7 +1743,6 @@
|
|||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/shebang-command/-/shebang-command-2.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/shebang-command/-/shebang-command-2.0.0.tgz",
|
||||||
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
|
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"shebang-regex": "^3.0.0"
|
"shebang-regex": "^3.0.0"
|
||||||
@@ -1799,7 +1755,6 @@
|
|||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmmirror.com/shebang-regex/-/shebang-regex-3.0.0.tgz",
|
"resolved": "https://registry.npmmirror.com/shebang-regex/-/shebang-regex-3.0.0.tgz",
|
||||||
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
|
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@@ -1809,7 +1764,6 @@
|
|||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
"resolved": "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
||||||
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
|
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@@ -1822,7 +1776,6 @@
|
|||||||
"version": "7.2.0",
|
"version": "7.2.0",
|
||||||
"resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-7.2.0.tgz",
|
"resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-7.2.0.tgz",
|
||||||
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"has-flag": "^4.0.0"
|
"has-flag": "^4.0.0"
|
||||||
@@ -1835,7 +1788,6 @@
|
|||||||
"version": "0.2.15",
|
"version": "0.2.15",
|
||||||
"resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz",
|
"resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz",
|
||||||
"integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
|
"integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fdir": "^6.5.0",
|
"fdir": "^6.5.0",
|
||||||
@@ -1852,7 +1804,6 @@
|
|||||||
"version": "2.4.0",
|
"version": "2.4.0",
|
||||||
"resolved": "https://registry.npmmirror.com/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
|
"resolved": "https://registry.npmmirror.com/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
|
||||||
"integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
|
"integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.12"
|
"node": ">=18.12"
|
||||||
@@ -1865,7 +1816,6 @@
|
|||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmmirror.com/type-check/-/type-check-0.4.0.tgz",
|
"resolved": "https://registry.npmmirror.com/type-check/-/type-check-0.4.0.tgz",
|
||||||
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
|
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"prelude-ls": "^1.2.1"
|
"prelude-ls": "^1.2.1"
|
||||||
@@ -1878,7 +1828,6 @@
|
|||||||
"version": "5.9.3",
|
"version": "5.9.3",
|
||||||
"resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.9.3.tgz",
|
"resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.9.3.tgz",
|
||||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||||
"dev": true,
|
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"bin": {
|
"bin": {
|
||||||
@@ -1893,7 +1842,6 @@
|
|||||||
"version": "8.55.0",
|
"version": "8.55.0",
|
||||||
"resolved": "https://registry.npmmirror.com/typescript-eslint/-/typescript-eslint-8.55.0.tgz",
|
"resolved": "https://registry.npmmirror.com/typescript-eslint/-/typescript-eslint-8.55.0.tgz",
|
||||||
"integrity": "sha512-HE4wj+r5lmDVS9gdaN0/+iqNvPZwGfnJ5lZuz7s5vLlg9ODw0bIiiETaios9LvFI1U94/VBXGm3CB2Y5cNFMpw==",
|
"integrity": "sha512-HE4wj+r5lmDVS9gdaN0/+iqNvPZwGfnJ5lZuz7s5vLlg9ODw0bIiiETaios9LvFI1U94/VBXGm3CB2Y5cNFMpw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@typescript-eslint/eslint-plugin": "8.55.0",
|
"@typescript-eslint/eslint-plugin": "8.55.0",
|
||||||
@@ -1917,7 +1865,6 @@
|
|||||||
"version": "4.4.1",
|
"version": "4.4.1",
|
||||||
"resolved": "https://registry.npmmirror.com/uri-js/-/uri-js-4.4.1.tgz",
|
"resolved": "https://registry.npmmirror.com/uri-js/-/uri-js-4.4.1.tgz",
|
||||||
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
|
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
|
||||||
"dev": true,
|
|
||||||
"license": "BSD-2-Clause",
|
"license": "BSD-2-Clause",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"punycode": "^2.1.0"
|
"punycode": "^2.1.0"
|
||||||
@@ -1927,7 +1874,6 @@
|
|||||||
"version": "2.0.2",
|
"version": "2.0.2",
|
||||||
"resolved": "https://registry.npmmirror.com/which/-/which-2.0.2.tgz",
|
"resolved": "https://registry.npmmirror.com/which/-/which-2.0.2.tgz",
|
||||||
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
|
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"isexe": "^2.0.0"
|
"isexe": "^2.0.0"
|
||||||
@@ -1943,7 +1889,6 @@
|
|||||||
"version": "1.2.5",
|
"version": "1.2.5",
|
||||||
"resolved": "https://registry.npmmirror.com/word-wrap/-/word-wrap-1.2.5.tgz",
|
"resolved": "https://registry.npmmirror.com/word-wrap/-/word-wrap-1.2.5.tgz",
|
||||||
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
|
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
@@ -1953,7 +1898,6 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"resolved": "https://registry.npmmirror.com/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
"resolved": "https://registry.npmmirror.com/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||||
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
|
|||||||
@@ -10,13 +10,12 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"esbuild": "^0.24.2",
|
"esbuild": "^0.24.2",
|
||||||
"eslint": "^9.39.2",
|
"typescript": "^5.7.2"
|
||||||
"typescript": "^5.7.2",
|
|
||||||
"typescript-eslint": "^8.55.0",
|
|
||||||
"jiti": "^2.6.1"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"eslint": "^9.39.2",
|
||||||
"globals": "^17.3.0",
|
"globals": "^17.3.0",
|
||||||
"js-sdsl": "^4.4.2"
|
"js-sdsl": "^4.4.2",
|
||||||
|
"typescript-eslint": "^8.55.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -149,11 +149,13 @@ export const unsafeGetAttrPos =
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const positions = attrs[ATTR_POSITIONS];
|
const positions = (attrs as NixAttrs & Record<symbol, unknown>)[ATTR_POSITIONS] as
|
||||||
if (!positions || !positions.has(name)) {
|
| Record<string, string>
|
||||||
|
| undefined;
|
||||||
|
if (!positions || !(name in positions)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const span = positions.get(name) as number;
|
const span = positions[name];
|
||||||
return mkPos(span);
|
return mkPos(span);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,16 +2,11 @@
|
|||||||
* Conversion and serialization builtin functions
|
* Conversion and serialization builtin functions
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {
|
import { addBuiltContext, mkStringWithContext, type NixStringContext } from "../string-context";
|
||||||
addBuiltContext,
|
|
||||||
mkStringWithContext,
|
|
||||||
type NixStringContext,
|
|
||||||
StringWithContext,
|
|
||||||
} from "../string-context";
|
|
||||||
import { force, isThunk } from "../thunk";
|
import { force, isThunk } from "../thunk";
|
||||||
import { forceFunction, forceStringNoCtx } from "../type-assert";
|
import { forceFunction, forceStringNoCtx } from "../type-assert";
|
||||||
import type { NixString, NixValue } from "../types";
|
import type { NixString, NixValue } from "../types";
|
||||||
import { isNixPath, isStringWithContext, NixPath } from "../types";
|
import { HAS_CONTEXT, IS_PATH, isNixPath, isStringWithContext } from "../types";
|
||||||
import { isAttrs, isPath, typeOf } from "./type-check";
|
import { isAttrs, isPath, typeOf } from "./type-check";
|
||||||
|
|
||||||
export const fromJSON = (e: NixValue): NixValue => {
|
export const fromJSON = (e: NixValue): NixValue => {
|
||||||
@@ -289,14 +284,13 @@ export const toStringFunc = (value: NixValue): NixString => {
|
|||||||
return coerceToStringWithContext(value, StringCoercionMode.ToString, false);
|
return coerceToStringWithContext(value, StringCoercionMode.ToString, false);
|
||||||
};
|
};
|
||||||
|
|
||||||
export type JsonValue = number | boolean | string | null | { [key: string]: JsonValue } | Array<JsonValue>;
|
|
||||||
export const nixValueToJson = (
|
export const nixValueToJson = (
|
||||||
value: NixValue,
|
value: NixValue,
|
||||||
strict: boolean,
|
strict: boolean,
|
||||||
outContext: NixStringContext,
|
outContext: NixStringContext,
|
||||||
copyToStore: boolean,
|
copyToStore: boolean,
|
||||||
seen: Set<NixValue> = new Set(),
|
seen: Set<NixValue> = new Set(),
|
||||||
): JsonValue => {
|
): unknown => {
|
||||||
const v = strict ? force(value) : value;
|
const v = strict ? force(value) : value;
|
||||||
|
|
||||||
if (isThunk(v) || typeof v === "function")
|
if (isThunk(v) || typeof v === "function")
|
||||||
@@ -312,13 +306,13 @@ export const nixValueToJson = (
|
|||||||
if (typeof v === "number") return v;
|
if (typeof v === "number") return v;
|
||||||
if (typeof v === "boolean") return v;
|
if (typeof v === "boolean") return v;
|
||||||
if (typeof v === "string") return v;
|
if (typeof v === "string") return v;
|
||||||
if (v instanceof StringWithContext) {
|
if (typeof v === "object" && HAS_CONTEXT in v) {
|
||||||
for (const elem of v.context) {
|
for (const elem of v.context) {
|
||||||
outContext.add(elem);
|
outContext.add(elem);
|
||||||
}
|
}
|
||||||
return v.value;
|
return v.value;
|
||||||
}
|
}
|
||||||
if (v instanceof NixPath) {
|
if (typeof v === "object" && IS_PATH in v) {
|
||||||
if (copyToStore) {
|
if (copyToStore) {
|
||||||
const storePath = Deno.core.ops.op_copy_path_to_store(v.value);
|
const storePath = Deno.core.ops.op_copy_path_to_store(v.value);
|
||||||
outContext.add(storePath);
|
outContext.add(storePath);
|
||||||
@@ -359,7 +353,7 @@ export const nixValueToJson = (
|
|||||||
return nixValueToJson(v.get("outPath") as NixValue, strict, outContext, copyToStore, seen);
|
return nixValueToJson(v.get("outPath") as NixValue, strict, outContext, copyToStore, seen);
|
||||||
}
|
}
|
||||||
|
|
||||||
const result: { [key: string]: JsonValue } = {};
|
const result: Record<string, unknown> = {};
|
||||||
const keys = Array.from(v.keys()).sort();
|
const keys = Array.from(v.keys()).sort();
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
result[key] = nixValueToJson(v.get(key) as NixValue, strict, outContext, copyToStore, seen);
|
result[key] = nixValueToJson(v.get(key) as NixValue, strict, outContext, copyToStore, seen);
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import {
|
|||||||
import { force } from "../thunk";
|
import { force } from "../thunk";
|
||||||
import { forceAttrs, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
|
import { forceAttrs, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
|
||||||
import type { NixAttrs, NixValue } from "../types";
|
import type { NixAttrs, NixValue } from "../types";
|
||||||
import { coerceToString, type JsonValue, nixValueToJson, StringCoercionMode } from "./conversion";
|
import { coerceToString, nixValueToJson, StringCoercionMode } from "./conversion";
|
||||||
|
|
||||||
export interface OutputInfo {
|
export interface OutputInfo {
|
||||||
path: string;
|
path: string;
|
||||||
@@ -205,9 +205,9 @@ const structuredAttrsExcludedKeys = new Set([
|
|||||||
|
|
||||||
const specialAttrs = new Set(["args", "__ignoreNulls", "__contentAddressed", "__impure"]);
|
const specialAttrs = new Set(["args", "__ignoreNulls", "__contentAddressed", "__impure"]);
|
||||||
|
|
||||||
const sortedJsonStringify = (obj: Record<string, JsonValue>): string => {
|
const sortedJsonStringify = (obj: Record<string, unknown>): string => {
|
||||||
const sortedKeys = Object.keys(obj).sort();
|
const sortedKeys = Object.keys(obj).sort();
|
||||||
const sortedObj: Record<string, JsonValue> = {};
|
const sortedObj: Record<string, unknown> = {};
|
||||||
for (const key of sortedKeys) {
|
for (const key of sortedKeys) {
|
||||||
sortedObj[key] = obj[key];
|
sortedObj[key] = obj[key];
|
||||||
}
|
}
|
||||||
@@ -224,14 +224,14 @@ const extractEnv = (
|
|||||||
const env = new Map<string, string>();
|
const env = new Map<string, string>();
|
||||||
|
|
||||||
if (structuredAttrs) {
|
if (structuredAttrs) {
|
||||||
const jsonAttrs: Record<string, JsonValue> = {};
|
const jsonAttrs: Record<string, unknown> = {};
|
||||||
for (const [key, value] of attrs) {
|
for (const [key, value] of attrs) {
|
||||||
if (!structuredAttrsExcludedKeys.has(key)) {
|
if (!structuredAttrsExcludedKeys.has(key)) {
|
||||||
const forcedValue = force(value);
|
const forcedValue = force(value as NixValue);
|
||||||
if (ignoreNulls && forcedValue === null) {
|
if (ignoreNulls && forcedValue === null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
jsonAttrs[key] = nixValueToJson(value, true, outContext, true);
|
jsonAttrs[key] = nixValueToJson(value as NixValue, true, outContext, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (key === "allowedReferences") {
|
if (key === "allowedReferences") {
|
||||||
@@ -377,16 +377,16 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
|
|||||||
const rustResult: {
|
const rustResult: {
|
||||||
drvPath: string;
|
drvPath: string;
|
||||||
outputs: [string, string][];
|
outputs: [string, string][];
|
||||||
} = Deno.core.ops.op_finalize_derivation(
|
} = Deno.core.ops.op_finalize_derivation({
|
||||||
drvName,
|
name: drvName,
|
||||||
builder,
|
builder,
|
||||||
platform,
|
platform,
|
||||||
outputs,
|
outputs,
|
||||||
drvArgs,
|
args: drvArgs,
|
||||||
envEntries,
|
env: envEntries,
|
||||||
contextArray,
|
context: contextArray,
|
||||||
fixedOutputInfo,
|
fixedOutput: fixedOutputInfo,
|
||||||
);
|
});
|
||||||
|
|
||||||
const result: NixAttrs = new Map();
|
const result: NixAttrs = new Map();
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ export const deepSeq =
|
|||||||
recurse(val);
|
recurse(val);
|
||||||
}
|
}
|
||||||
} else if (isAttrs(forced)) {
|
} else if (isAttrs(forced)) {
|
||||||
for (const [_, val] of forced.entries()) {
|
for (const [_, val] of Object.entries(forced)) {
|
||||||
recurse(val);
|
recurse(val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,5 +30,9 @@ export const convertHash = (args: NixValue): string => {
|
|||||||
|
|
||||||
const toHashFormat = forceStringNoCtx(select(attrs, ["toHashFormat"]));
|
const toHashFormat = forceStringNoCtx(select(attrs, ["toHashFormat"]));
|
||||||
|
|
||||||
return Deno.core.ops.op_convert_hash(hash, hashAlgo, toHashFormat);
|
return Deno.core.ops.op_convert_hash({
|
||||||
|
hash,
|
||||||
|
hashAlgo,
|
||||||
|
toHashFormat,
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { createThunk, force } from "../thunk";
|
import { createThunk, force } from "../thunk";
|
||||||
import type { NixAttrs, NixFunction, NixValue } from "../types";
|
import type { NixAttrs, NixValue } from "../types";
|
||||||
import * as arithmetic from "./arithmetic";
|
import * as arithmetic from "./arithmetic";
|
||||||
import * as attrs from "./attrs";
|
import * as attrs from "./attrs";
|
||||||
import * as conversion from "./conversion";
|
import * as conversion from "./conversion";
|
||||||
@@ -24,31 +24,31 @@ export interface PrimopMetadata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const mkPrimop = (
|
export const mkPrimop = (
|
||||||
func: NixFunction,
|
func: (...args: NixValue[]) => NixValue,
|
||||||
name: string,
|
name: string,
|
||||||
arity: number,
|
arity: number,
|
||||||
applied: number = 0,
|
applied: number = 0,
|
||||||
): ((...args: NixValue[]) => NixValue) => {
|
): ((...args: NixValue[]) => NixValue) => {
|
||||||
func[PRIMOP_METADATA] = {
|
(func as unknown as Record<symbol, unknown>)[PRIMOP_METADATA] = {
|
||||||
name,
|
name,
|
||||||
arity,
|
arity,
|
||||||
applied,
|
applied,
|
||||||
} satisfies PrimopMetadata;
|
} satisfies PrimopMetadata;
|
||||||
|
|
||||||
if (applied < arity - 1) {
|
if (applied < arity - 1) {
|
||||||
const wrappedFunc: NixFunction = ((arg: NixValue) => {
|
const wrappedFunc = ((...args: NixValue[]) => {
|
||||||
const result = func(arg);
|
const result = func(...args);
|
||||||
if (typeof result === "function") {
|
if (typeof result === "function") {
|
||||||
return mkPrimop(result, name, arity, applied + 1);
|
return mkPrimop(result, name, arity, applied + args.length);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
});
|
}) as (...args: NixValue[]) => NixValue;
|
||||||
|
|
||||||
wrappedFunc[PRIMOP_METADATA] = {
|
(wrappedFunc as unknown as Record<symbol, unknown>)[PRIMOP_METADATA] = {
|
||||||
name,
|
name,
|
||||||
arity,
|
arity,
|
||||||
applied,
|
applied,
|
||||||
};
|
} satisfies PrimopMetadata;
|
||||||
|
|
||||||
return wrappedFunc;
|
return wrappedFunc;
|
||||||
}
|
}
|
||||||
@@ -57,8 +57,8 @@ export const mkPrimop = (
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const isPrimop = (
|
export const isPrimop = (
|
||||||
value: NixValue,
|
value: unknown,
|
||||||
): value is NixFunction & { [PRIMOP_METADATA]: PrimopMetadata } => {
|
): value is ((...args: never[]) => unknown) & { [PRIMOP_METADATA]: PrimopMetadata } => {
|
||||||
return (
|
return (
|
||||||
typeof value === "function" &&
|
typeof value === "function" &&
|
||||||
PRIMOP_METADATA in value &&
|
PRIMOP_METADATA in value &&
|
||||||
@@ -67,7 +67,7 @@ export const isPrimop = (
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getPrimopMetadata = (func: NixValue): PrimopMetadata | undefined => {
|
export const getPrimopMetadata = (func: unknown): PrimopMetadata | undefined => {
|
||||||
if (isPrimop(func)) {
|
if (isPrimop(func)) {
|
||||||
return func[PRIMOP_METADATA];
|
return func[PRIMOP_METADATA];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,12 +11,11 @@ import {
|
|||||||
forceStringNoCtx,
|
forceStringNoCtx,
|
||||||
forceStringValue,
|
forceStringValue,
|
||||||
} from "../type-assert";
|
} from "../type-assert";
|
||||||
import type { NixAttrs, NixString, NixValue } from "../types";
|
import type { NixAttrs, NixPath, NixString, NixValue } from "../types";
|
||||||
import { CatchableError, isNixPath, NixPath } from "../types";
|
import { CatchableError, IS_PATH, isNixPath } from "../types";
|
||||||
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
|
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
|
||||||
import { baseNameOf } from "./path";
|
import { baseNameOf } from "./path";
|
||||||
import { isAttrs, isPath, isString } from "./type-check";
|
import { isAttrs, isPath, isString } from "./type-check";
|
||||||
import { execBytecode, execBytecodeScoped } from "../vm";
|
|
||||||
|
|
||||||
const importCache = new Map<string, NixValue>();
|
const importCache = new Map<string, NixValue>();
|
||||||
|
|
||||||
@@ -50,8 +49,8 @@ export const importFunc = (path: NixValue): NixValue => {
|
|||||||
return cached;
|
return cached;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [code, currentDir] = Deno.core.ops.op_import(pathStr);
|
const code = Deno.core.ops.op_import(pathStr);
|
||||||
const result = execBytecode(code, currentDir);
|
const result = Function(`return (${code})`)();
|
||||||
|
|
||||||
importCache.set(pathStr, result);
|
importCache.set(pathStr, result);
|
||||||
return result;
|
return result;
|
||||||
@@ -65,8 +64,10 @@ export const scopedImport =
|
|||||||
|
|
||||||
const pathStr = realisePath(path);
|
const pathStr = realisePath(path);
|
||||||
|
|
||||||
const [code, currentDir] = Deno.core.ops.op_scoped_import(pathStr, scopeKeys);
|
const code = Deno.core.ops.op_scoped_import(pathStr, scopeKeys);
|
||||||
return execBytecodeScoped(code, currentDir, scopeAttrs);
|
|
||||||
|
const scopedFunc = Function(`return (${code})`)();
|
||||||
|
return scopedFunc(scopeAttrs);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const storePath = (pathArg: NixValue): StringWithContext => {
|
export const storePath = (pathArg: NixValue): StringWithContext => {
|
||||||
@@ -84,24 +85,24 @@ export const fetchClosure = (_args: NixValue): never => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export interface FetchUrlResult {
|
export interface FetchUrlResult {
|
||||||
storePath: string;
|
store_path: string;
|
||||||
hash: string;
|
hash: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FetchTarballResult {
|
export interface FetchTarballResult {
|
||||||
storePath: string;
|
store_path: string;
|
||||||
narHash: string;
|
nar_hash: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FetchGitResult {
|
export interface FetchGitResult {
|
||||||
outPath: string;
|
out_path: string;
|
||||||
rev: string;
|
rev: string;
|
||||||
shortRev: string;
|
short_rev: string;
|
||||||
revCount: number;
|
rev_count: number;
|
||||||
lastModified: number;
|
last_modified: number;
|
||||||
lastModifiedDate: string;
|
last_modified_date: string;
|
||||||
submodules: boolean;
|
submodules: boolean;
|
||||||
narHash: string | null;
|
nar_hash: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const normalizeUrlInput = (
|
const normalizeUrlInput = (
|
||||||
@@ -153,16 +154,16 @@ export const fetchurl = (args: NixValue): NixString => {
|
|||||||
executable ?? false,
|
executable ?? false,
|
||||||
);
|
);
|
||||||
const context: NixStringContext = new Set();
|
const context: NixStringContext = new Set();
|
||||||
addOpaqueContext(context, result.storePath);
|
addOpaqueContext(context, result.store_path);
|
||||||
return mkStringWithContext(result.storePath, context);
|
return mkStringWithContext(result.store_path, context);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchTarball = (args: NixValue): NixString => {
|
export const fetchTarball = (args: NixValue): NixString => {
|
||||||
const { url, name, sha256 } = normalizeTarballInput(args);
|
const { url, name, sha256 } = normalizeTarballInput(args);
|
||||||
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, name ?? null, sha256 ?? null);
|
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, name ?? null, sha256 ?? null);
|
||||||
const context: NixStringContext = new Set();
|
const context: NixStringContext = new Set();
|
||||||
addOpaqueContext(context, result.storePath);
|
addOpaqueContext(context, result.store_path);
|
||||||
return mkStringWithContext(result.storePath, context);
|
return mkStringWithContext(result.store_path, context);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchGit = (args: NixValue): NixAttrs => {
|
export const fetchGit = (args: NixValue): NixAttrs => {
|
||||||
@@ -172,20 +173,20 @@ export const fetchGit = (args: NixValue): NixAttrs => {
|
|||||||
const url = coerceToString(forced, StringCoercionMode.Base, false, disposedContext);
|
const url = coerceToString(forced, StringCoercionMode.Base, false, disposedContext);
|
||||||
const result = Deno.core.ops.op_fetch_git(url, null, null, false, false, false, null);
|
const result = Deno.core.ops.op_fetch_git(url, null, null, false, false, false, null);
|
||||||
const outContext: NixStringContext = new Set();
|
const outContext: NixStringContext = new Set();
|
||||||
addOpaqueContext(outContext, result.outPath);
|
addOpaqueContext(outContext, result.out_path);
|
||||||
return new Map<string, NixValue>([
|
return new Map<string, NixValue>([
|
||||||
["outPath", mkStringWithContext(result.outPath, outContext)],
|
["outPath", mkStringWithContext(result.out_path, outContext)],
|
||||||
["rev", result.rev],
|
["rev", result.rev],
|
||||||
["shortRev", result.shortRev],
|
["shortRev", result.short_rev],
|
||||||
["revCount", BigInt(result.revCount)],
|
["revCount", BigInt(result.rev_count)],
|
||||||
["lastModified", BigInt(result.lastModified)],
|
["lastModified", BigInt(result.last_modified)],
|
||||||
["lastModifiedDate", result.lastModifiedDate],
|
["lastModifiedDate", result.last_modified_date],
|
||||||
["submodules", result.submodules],
|
["submodules", result.submodules],
|
||||||
["narHash", result.narHash],
|
["narHash", result.nar_hash],
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
const attrs = forceAttrs(args);
|
const attrs = forceAttrs(args);
|
||||||
const url = forceStringValue(select(attrs, ["url"]));
|
const url = forceStringValue(select("attrs", ["url"]));
|
||||||
const gitRef = attrs.has("ref") ? forceStringValue(attrs.get("ref") as NixValue) : null;
|
const gitRef = attrs.has("ref") ? forceStringValue(attrs.get("ref") as NixValue) : null;
|
||||||
const rev = attrs.has("rev") ? forceStringValue(attrs.get("rev") as NixValue) : null;
|
const rev = attrs.has("rev") ? forceStringValue(attrs.get("rev") as NixValue) : null;
|
||||||
const shallow = attrs.has("shallow") ? forceBool(attrs.get("shallow") as NixValue) : false;
|
const shallow = attrs.has("shallow") ? forceBool(attrs.get("shallow") as NixValue) : false;
|
||||||
@@ -204,16 +205,16 @@ export const fetchGit = (args: NixValue): NixAttrs => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const outContext: NixStringContext = new Set();
|
const outContext: NixStringContext = new Set();
|
||||||
addOpaqueContext(outContext, result.outPath);
|
addOpaqueContext(outContext, result.out_path);
|
||||||
return new Map<string, NixValue>([
|
return new Map<string, NixValue>([
|
||||||
["outPath", mkStringWithContext(result.outPath, outContext)],
|
["outPath", mkStringWithContext(result.out_path, outContext)],
|
||||||
["rev", result.rev],
|
["rev", result.rev],
|
||||||
["shortRev", result.shortRev],
|
["shortRev", result.short_rev],
|
||||||
["revCount", BigInt(result.revCount)],
|
["revCount", BigInt(result.rev_count)],
|
||||||
["lastModified", BigInt(result.lastModified)],
|
["lastModified", BigInt(result.last_modified)],
|
||||||
["lastModifiedDate", result.lastModifiedDate],
|
["lastModifiedDate", result.last_modified_date],
|
||||||
["submodules", result.submodules],
|
["submodules", result.submodules],
|
||||||
["narHash", result.narHash],
|
["narHash", result.nar_hash],
|
||||||
]);
|
]);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -306,7 +307,12 @@ const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
|
|||||||
|
|
||||||
export const readDir = (path: NixValue): NixAttrs => {
|
export const readDir = (path: NixValue): NixAttrs => {
|
||||||
const pathStr = realisePath(path);
|
const pathStr = realisePath(path);
|
||||||
return Deno.core.ops.op_read_dir(pathStr);
|
const entries: Record<string, string> = Deno.core.ops.op_read_dir(pathStr);
|
||||||
|
const result: NixAttrs = new Map();
|
||||||
|
for (const [name, type] of Object.entries(entries)) {
|
||||||
|
result.set(name, type);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const readFile = (path: NixValue): string => {
|
export const readFile = (path: NixValue): string => {
|
||||||
@@ -469,13 +475,13 @@ export const findFile =
|
|||||||
suffix.length > 0 ? Deno.core.ops.op_resolve_path(suffix, resolvedPath) : resolvedPath;
|
suffix.length > 0 ? Deno.core.ops.op_resolve_path(suffix, resolvedPath) : resolvedPath;
|
||||||
|
|
||||||
if (Deno.core.ops.op_path_exists(candidatePath)) {
|
if (Deno.core.ops.op_path_exists(candidatePath)) {
|
||||||
return new NixPath(candidatePath);
|
return { [IS_PATH]: true, value: candidatePath };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (lookupPathStr.startsWith("nix/")) {
|
if (lookupPathStr.startsWith("nix/")) {
|
||||||
// FIXME: special path type
|
// FIXME: special path type
|
||||||
return new NixPath(`<${lookupPathStr}>`);
|
return { [IS_PATH]: true, value: `<${lookupPathStr}>` };
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`);
|
throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`);
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ export const functionArgs = (f: NixValue): NixAttrs => {
|
|||||||
ret.set(key, true);
|
ret.set(key, true);
|
||||||
}
|
}
|
||||||
const positions = func.args.positions;
|
const positions = func.args.positions;
|
||||||
if (positions) {
|
if (positions && Object.keys(positions).length > 0) {
|
||||||
ret[ATTR_POSITIONS] = positions;
|
ret[ATTR_POSITIONS] = positions;
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
@@ -318,9 +318,7 @@ export const splitVersion = (s: NixValue): NixValue => {
|
|||||||
return components;
|
return components;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const traceVerbose =
|
export const traceVerbose = (_e1: NixValue, e2: NixValue): NixStrictValue => {
|
||||||
(_e1: NixValue) =>
|
|
||||||
(e2: NixValue): NixStrictValue => {
|
|
||||||
// TODO: implement traceVerbose
|
// TODO: implement traceVerbose
|
||||||
return force(e2);
|
return force(e2);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import type { NixAttrs, NixBool, NixPath, NixString, NixValue } from "./types";
|
|||||||
import { CatchableError, isNixPath } from "./types";
|
import { CatchableError, isNixPath } from "./types";
|
||||||
|
|
||||||
interface StackFrame {
|
interface StackFrame {
|
||||||
span: number;
|
span: string;
|
||||||
message: string;
|
message: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ function enrichError(error: unknown): Error {
|
|||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|
||||||
const pushContext = (message: string, span: number): void => {
|
const pushContext = (message: string, span: string): void => {
|
||||||
if (callStack.length >= MAX_STACK_DEPTH) {
|
if (callStack.length >= MAX_STACK_DEPTH) {
|
||||||
callStack.shift();
|
callStack.shift();
|
||||||
}
|
}
|
||||||
@@ -43,7 +43,7 @@ const popContext = (): void => {
|
|||||||
callStack.pop();
|
callStack.pop();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const withContext = <T>(message: string, span: number, fn: () => T): T => {
|
export const withContext = <T>(message: string, span: string, fn: () => T): T => {
|
||||||
pushContext(message, span);
|
pushContext(message, span);
|
||||||
try {
|
try {
|
||||||
return fn();
|
return fn();
|
||||||
@@ -142,22 +142,19 @@ export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
|
|||||||
return mkPath(resolved);
|
return mkPath(resolved);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const select = (obj: NixValue, attrpath: NixValue[], span?: number): NixValue => {
|
export const select = (obj: NixValue, attrpath: NixValue[], span?: string): NixValue => {
|
||||||
if (span !== undefined) {
|
if (span) {
|
||||||
|
const pathStrings = attrpath.map((a) => forceStringValue(a));
|
||||||
|
const path = pathStrings.join(".");
|
||||||
|
const message = path ? `while selecting attribute [${path}]` : "while selecting attribute";
|
||||||
|
|
||||||
if (callStack.length >= MAX_STACK_DEPTH) {
|
if (callStack.length >= MAX_STACK_DEPTH) {
|
||||||
callStack.shift();
|
callStack.shift();
|
||||||
}
|
}
|
||||||
const frame: StackFrame = { span, message: "while selecting attribute" };
|
callStack.push({ span, message });
|
||||||
callStack.push(frame);
|
|
||||||
try {
|
try {
|
||||||
return selectImpl(obj, attrpath);
|
return selectImpl(obj, attrpath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
try {
|
|
||||||
const path = attrpath.map((a) => forceStringValue(a)).join(".");
|
|
||||||
if (path) frame.message = `while selecting attribute [${path}]`;
|
|
||||||
} catch {
|
|
||||||
throw enrichError(error);
|
|
||||||
}
|
|
||||||
throw enrichError(error);
|
throw enrichError(error);
|
||||||
} finally {
|
} finally {
|
||||||
callStack.pop();
|
callStack.pop();
|
||||||
@@ -170,8 +167,8 @@ export const select = (obj: NixValue, attrpath: NixValue[], span?: number): NixV
|
|||||||
function selectImpl(obj: NixValue, attrpath: NixValue[]): NixValue {
|
function selectImpl(obj: NixValue, attrpath: NixValue[]): NixValue {
|
||||||
let attrs = forceAttrs(obj);
|
let attrs = forceAttrs(obj);
|
||||||
|
|
||||||
for (let i = 0; i < attrpath.length - 1; i++) {
|
for (const attr of attrpath.slice(0, -1)) {
|
||||||
const key = forceStringValue(attrpath[i]);
|
const key = forceStringValue(attr);
|
||||||
if (!attrs.has(key)) {
|
if (!attrs.has(key)) {
|
||||||
throw new Error(`Attribute '${key}' not found`);
|
throw new Error(`Attribute '${key}' not found`);
|
||||||
}
|
}
|
||||||
@@ -190,23 +187,20 @@ export const selectWithDefault = (
|
|||||||
obj: NixValue,
|
obj: NixValue,
|
||||||
attrpath: NixValue[],
|
attrpath: NixValue[],
|
||||||
defaultVal: NixValue,
|
defaultVal: NixValue,
|
||||||
span?: number,
|
span?: string,
|
||||||
): NixValue => {
|
): NixValue => {
|
||||||
if (span !== undefined) {
|
if (span) {
|
||||||
|
const pathStrings = attrpath.map((a) => forceStringValue(a));
|
||||||
|
const path = pathStrings.join(".");
|
||||||
|
const message = path ? `while selecting attribute [${path}]` : "while selecting attribute";
|
||||||
|
|
||||||
if (callStack.length >= MAX_STACK_DEPTH) {
|
if (callStack.length >= MAX_STACK_DEPTH) {
|
||||||
callStack.shift();
|
callStack.shift();
|
||||||
}
|
}
|
||||||
const frame: StackFrame = { span, message: "while selecting attribute" };
|
callStack.push({ span, message });
|
||||||
callStack.push(frame);
|
|
||||||
try {
|
try {
|
||||||
return selectWithDefaultImpl(obj, attrpath, defaultVal);
|
return selectWithDefaultImpl(obj, attrpath, defaultVal);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
try {
|
|
||||||
const path = attrpath.map((a) => forceStringValue(a)).join(".");
|
|
||||||
if (path) frame.message = `while selecting attribute [${path}]`;
|
|
||||||
} catch {
|
|
||||||
throw enrichError(error);
|
|
||||||
}
|
|
||||||
throw enrichError(error);
|
throw enrichError(error);
|
||||||
} finally {
|
} finally {
|
||||||
callStack.pop();
|
callStack.pop();
|
||||||
@@ -222,8 +216,8 @@ function selectWithDefaultImpl(obj: NixValue, attrpath: NixValue[], defaultVal:
|
|||||||
return defaultVal;
|
return defaultVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < attrpath.length - 1; i++) {
|
for (const attr of attrpath.slice(0, -1)) {
|
||||||
const key = forceStringValue(attrpath[i]);
|
const key = forceStringValue(attr);
|
||||||
if (!attrs.has(key)) {
|
if (!attrs.has(key)) {
|
||||||
return defaultVal;
|
return defaultVal;
|
||||||
}
|
}
|
||||||
@@ -248,8 +242,8 @@ export const hasAttr = (obj: NixValue, attrpath: NixValue[]): NixBool => {
|
|||||||
}
|
}
|
||||||
let attrs = forced;
|
let attrs = forced;
|
||||||
|
|
||||||
for (let i = 0; i < attrpath.length - 1; i++) {
|
for (const attr of attrpath.slice(0, -1)) {
|
||||||
const key = forceStringNoCtx(attrpath[i]);
|
const key = forceStringNoCtx(attr);
|
||||||
if (!attrs.has(key)) {
|
if (!attrs.has(key)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -263,8 +257,8 @@ export const hasAttr = (obj: NixValue, attrpath: NixValue[]): NixBool => {
|
|||||||
return attrs.has(forceStringValue(attrpath[attrpath.length - 1]));
|
return attrs.has(forceStringValue(attrpath[attrpath.length - 1]));
|
||||||
};
|
};
|
||||||
|
|
||||||
export const call = (func: NixValue, arg: NixValue, span?: number): NixValue => {
|
export const call = (func: NixValue, arg: NixValue, span?: string): NixValue => {
|
||||||
if (span !== undefined) {
|
if (span) {
|
||||||
if (callStack.length >= MAX_STACK_DEPTH) {
|
if (callStack.length >= MAX_STACK_DEPTH) {
|
||||||
callStack.shift();
|
callStack.shift();
|
||||||
}
|
}
|
||||||
@@ -282,19 +276,19 @@ export const call = (func: NixValue, arg: NixValue, span?: number): NixValue =>
|
|||||||
};
|
};
|
||||||
|
|
||||||
function callImpl(func: NixValue, arg: NixValue): NixValue {
|
function callImpl(func: NixValue, arg: NixValue): NixValue {
|
||||||
const forced = force(func);
|
const forcedFunc = force(func);
|
||||||
if (typeof forced === "function") {
|
if (typeof forcedFunc === "function") {
|
||||||
forced.args?.check(arg);
|
forcedFunc.args?.check(arg);
|
||||||
return forced(arg);
|
return forcedFunc(arg);
|
||||||
}
|
}
|
||||||
if (forced instanceof Map && forced.has("__functor")) {
|
if (forcedFunc instanceof Map && forcedFunc.has("__functor")) {
|
||||||
const functor = forceFunction(forced.get("__functor") as NixValue);
|
const functor = forceFunction(forcedFunc.get("__functor") as NixValue);
|
||||||
return call(callImpl(functor, forced), arg);
|
return call(functor(forcedFunc), arg);
|
||||||
}
|
}
|
||||||
throw new Error(`attempt to call something which is not a function but ${typeOf(forced)}`);
|
throw new Error(`attempt to call something which is not a function but ${typeOf(forcedFunc)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string, span: number): NixValue => {
|
export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string, span: string): NixValue => {
|
||||||
if (forceBool(assertion)) {
|
if (forceBool(assertion)) {
|
||||||
return expr;
|
return expr;
|
||||||
}
|
}
|
||||||
@@ -304,8 +298,15 @@ export const assert = (assertion: NixValue, expr: NixValue, assertionRaw: string
|
|||||||
throw "unreachable";
|
throw "unreachable";
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mkPos = (span: number): NixAttrs => {
|
export const ifFunc = (cond: NixValue, consq: NixValue, alter: NixValue) => {
|
||||||
return Deno.core.ops.op_decode_span(span);
|
if (forceBool(cond)) {
|
||||||
|
return consq;
|
||||||
|
}
|
||||||
|
return alter;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const mkPos = (span: string): NixAttrs => {
|
||||||
|
return new Map(Object.entries(Deno.core.ops.op_decode_span(span)));
|
||||||
};
|
};
|
||||||
|
|
||||||
interface WithScope {
|
interface WithScope {
|
||||||
@@ -313,8 +314,8 @@ interface WithScope {
|
|||||||
last: WithScope | null;
|
last: WithScope | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const lookupWith = (name: string, withScope: WithScope): NixValue => {
|
export const lookupWith = (name: string, withScope: WithScope | null): NixValue => {
|
||||||
let current: WithScope | null = withScope;
|
let current = withScope;
|
||||||
while (current !== null) {
|
while (current !== null) {
|
||||||
const attrs = forceAttrs(current.env);
|
const attrs = forceAttrs(current.env);
|
||||||
if (attrs.has(name)) {
|
if (attrs.has(name)) {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { builtins, PRIMOP_METADATA } from "./builtins";
|
import { builtins, PRIMOP_METADATA } from "./builtins";
|
||||||
|
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
||||||
import {
|
import {
|
||||||
assert,
|
assert,
|
||||||
call,
|
call,
|
||||||
@@ -15,32 +16,40 @@ import {
|
|||||||
resolvePath,
|
resolvePath,
|
||||||
select,
|
select,
|
||||||
selectWithDefault,
|
selectWithDefault,
|
||||||
|
withContext,
|
||||||
} from "./helpers";
|
} from "./helpers";
|
||||||
import { op } from "./operators";
|
import { op } from "./operators";
|
||||||
import { HAS_CONTEXT } from "./string-context";
|
import { HAS_CONTEXT } from "./string-context";
|
||||||
import { createThunk, DEBUG_THUNKS, force, forceDeep, forceShallow, IS_CYCLE, IS_THUNK } from "./thunk";
|
import {
|
||||||
|
createThunk,
|
||||||
|
DEBUG_THUNKS,
|
||||||
|
force,
|
||||||
|
forceDeep,
|
||||||
|
forceShallow,
|
||||||
|
IS_CYCLE,
|
||||||
|
IS_THUNK,
|
||||||
|
isThunk,
|
||||||
|
} from "./thunk";
|
||||||
import { forceBool } from "./type-assert";
|
import { forceBool } from "./type-assert";
|
||||||
import { IS_PATH, mkAttrs, mkFunction, type NixValue } from "./types";
|
import { ATTR_POSITIONS, IS_PATH, mkAttrs, mkAttrsWithPos, mkFunction, type NixValue } from "./types";
|
||||||
import { execBytecode, execBytecodeScoped, vmStrings, vmConstants } from "./vm";
|
|
||||||
|
|
||||||
export type NixRuntime = typeof Nix;
|
export type NixRuntime = typeof Nix;
|
||||||
|
|
||||||
const replBindings: Map<string, NixValue> = new Map();
|
const replBindings: Record<string, NixValue> = {};
|
||||||
|
|
||||||
export const Nix = {
|
export const Nix = {
|
||||||
|
createThunk,
|
||||||
|
force,
|
||||||
|
forceShallow,
|
||||||
|
forceDeep,
|
||||||
|
forceBool,
|
||||||
|
isThunk,
|
||||||
IS_THUNK,
|
IS_THUNK,
|
||||||
IS_CYCLE,
|
IS_CYCLE,
|
||||||
HAS_CONTEXT,
|
HAS_CONTEXT,
|
||||||
IS_PATH,
|
IS_PATH,
|
||||||
PRIMOP_METADATA,
|
|
||||||
DEBUG_THUNKS,
|
DEBUG_THUNKS,
|
||||||
|
|
||||||
createThunk,
|
|
||||||
force,
|
|
||||||
forceBool,
|
|
||||||
forceShallow,
|
|
||||||
forceDeep,
|
|
||||||
|
|
||||||
assert,
|
assert,
|
||||||
call,
|
call,
|
||||||
hasAttr,
|
hasAttr,
|
||||||
@@ -48,51 +57,26 @@ export const Nix = {
|
|||||||
selectWithDefault,
|
selectWithDefault,
|
||||||
lookupWith,
|
lookupWith,
|
||||||
resolvePath,
|
resolvePath,
|
||||||
|
coerceToString,
|
||||||
concatStringsWithContext,
|
concatStringsWithContext,
|
||||||
|
StringCoercionMode,
|
||||||
mkAttrs,
|
mkAttrs,
|
||||||
|
mkAttrsWithPos,
|
||||||
mkFunction,
|
mkFunction,
|
||||||
mkPos,
|
mkPos,
|
||||||
|
ATTR_POSITIONS,
|
||||||
|
|
||||||
|
withContext,
|
||||||
|
|
||||||
op,
|
op,
|
||||||
builtins,
|
builtins,
|
||||||
|
PRIMOP_METADATA,
|
||||||
strings: vmStrings,
|
|
||||||
constants: vmConstants,
|
|
||||||
execBytecode,
|
|
||||||
execBytecodeScoped,
|
|
||||||
|
|
||||||
replBindings,
|
replBindings,
|
||||||
setReplBinding: (name: string, value: NixValue) => {
|
setReplBinding: (name: string, value: NixValue) => {
|
||||||
replBindings.set(name, value);
|
replBindings[name] = value;
|
||||||
},
|
},
|
||||||
getReplBinding: (name: string) => replBindings.get(name),
|
getReplBinding: (name: string) => replBindings[name],
|
||||||
};
|
};
|
||||||
|
|
||||||
globalThis.Nix = Nix;
|
globalThis.Nix = Nix;
|
||||||
globalThis.$t = createThunk;
|
|
||||||
globalThis.$f = force;
|
|
||||||
globalThis.$fb = forceBool;
|
|
||||||
globalThis.$a = assert;
|
|
||||||
globalThis.$c = call;
|
|
||||||
globalThis.$h = hasAttr;
|
|
||||||
globalThis.$s = select;
|
|
||||||
globalThis.$sd = selectWithDefault;
|
|
||||||
globalThis.$l = lookupWith;
|
|
||||||
globalThis.$r = resolvePath;
|
|
||||||
globalThis.$cs = concatStringsWithContext;
|
|
||||||
globalThis.$ma = mkAttrs;
|
|
||||||
globalThis.$mf = mkFunction;
|
|
||||||
globalThis.$mp = mkPos;
|
|
||||||
globalThis.$gb = Nix.getReplBinding;
|
|
||||||
|
|
||||||
globalThis.$oa = op.add;
|
|
||||||
globalThis.$os = op.sub;
|
|
||||||
globalThis.$om = op.mul;
|
|
||||||
globalThis.$od = op.div;
|
|
||||||
globalThis.$oe = op.eq;
|
|
||||||
globalThis.$ol = op.lt;
|
|
||||||
globalThis.$og = op.gt;
|
|
||||||
globalThis.$oc = op.concat;
|
|
||||||
globalThis.$ou = op.update;
|
|
||||||
globalThis.$b = builtins;
|
|
||||||
globalThis.$e = new Map();
|
|
||||||
|
|||||||
@@ -270,12 +270,6 @@ export const op = {
|
|||||||
update: (a: NixValue, b: NixValue): NixAttrs => {
|
update: (a: NixValue, b: NixValue): NixAttrs => {
|
||||||
const mapA = forceAttrs(a);
|
const mapA = forceAttrs(a);
|
||||||
const mapB = forceAttrs(b);
|
const mapB = forceAttrs(b);
|
||||||
if (mapA.size === 0) {
|
|
||||||
return mapB;
|
|
||||||
}
|
|
||||||
if (mapB.size === 0) {
|
|
||||||
return mapA;
|
|
||||||
}
|
|
||||||
const result: NixAttrs = new Map(mapA);
|
const result: NixAttrs = new Map(mapA);
|
||||||
for (const [k, v] of mapB) {
|
for (const [k, v] of mapB) {
|
||||||
result.set(k, v);
|
result.set(k, v);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { NixPath } from "./types";
|
import { IS_PATH, type NixPath } from "./types";
|
||||||
|
|
||||||
const canonicalizePath = (path: string): string => {
|
const canonicalizePath = (path: string): string => {
|
||||||
const parts: string[] = [];
|
const parts: string[] = [];
|
||||||
@@ -30,7 +30,7 @@ const canonicalizePath = (path: string): string => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const mkPath = (value: string): NixPath => {
|
export const mkPath = (value: string): NixPath => {
|
||||||
return new NixPath(canonicalizePath(value));
|
return { [IS_PATH]: true, value: canonicalizePath(value) };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getPathValue = (p: NixPath): string => {
|
export const getPathValue = (p: NixPath): string => {
|
||||||
|
|||||||
@@ -39,10 +39,16 @@ export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet(
|
|||||||
return "<LAMBDA>";
|
return "<LAMBDA>";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (IS_CYCLE in value) {
|
if (typeof value === "object") {
|
||||||
|
if (IS_CYCLE in value && (value as Record<symbol, unknown>)[IS_CYCLE] === true) {
|
||||||
return "«repeated»";
|
return "«repeated»";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (seen.has(value)) {
|
||||||
|
return "«repeated»";
|
||||||
|
}
|
||||||
|
seen.add(value);
|
||||||
|
|
||||||
if (isNixPath(value)) {
|
if (isNixPath(value)) {
|
||||||
return value.value;
|
return value.value;
|
||||||
}
|
}
|
||||||
@@ -52,27 +58,17 @@ export const printValue = (value: NixValue, seen: WeakSet<object> = new WeakSet(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (Array.isArray(value)) {
|
if (Array.isArray(value)) {
|
||||||
if (value.length > 0) {
|
|
||||||
if (seen.has(value)) {
|
|
||||||
return "«repeated»";
|
|
||||||
}
|
|
||||||
seen.add(value);
|
|
||||||
}
|
|
||||||
const items = value.map((v) => printValue(v, seen)).join(" ");
|
const items = value.map((v) => printValue(v, seen)).join(" ");
|
||||||
return `[ ${items} ]`;
|
return `[ ${items} ]`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (seen.has(value)) {
|
const entries = Object.entries(value)
|
||||||
return "«repeated»";
|
|
||||||
}
|
|
||||||
if (value.size > 0) {
|
|
||||||
seen.add(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
const entries = [...value.entries()]
|
|
||||||
.map(([k, v]) => `${printSymbol(k)} = ${printValue(v, seen)};`)
|
.map(([k, v]) => `${printSymbol(k)} = ${printValue(v, seen)};`)
|
||||||
.join(" ");
|
.join(" ");
|
||||||
return `{${entries ? ` ${entries} ` : " "}}`;
|
return `{${entries ? ` ${entries} ` : " "}}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return "<unknown>";
|
||||||
};
|
};
|
||||||
|
|
||||||
const printString = (s: string): string => {
|
const printString = (s: string): string => {
|
||||||
|
|||||||
@@ -22,22 +22,18 @@ export type StringContextElem = StringContextOpaque | StringContextDrvDeep | Str
|
|||||||
|
|
||||||
export type NixStringContext = Set<string>;
|
export type NixStringContext = Set<string>;
|
||||||
|
|
||||||
export class StringWithContext {
|
export interface StringWithContext {
|
||||||
readonly [HAS_CONTEXT] = true as const;
|
readonly [HAS_CONTEXT]: true;
|
||||||
value: string;
|
value: string;
|
||||||
context: NixStringContext;
|
context: NixStringContext;
|
||||||
constructor(value: string, context: NixStringContext) {
|
|
||||||
this.value = value;
|
|
||||||
this.context = context;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const isStringWithContext = (v: NixStrictValue): v is StringWithContext => {
|
export const isStringWithContext = (v: NixStrictValue): v is StringWithContext => {
|
||||||
return v instanceof StringWithContext;
|
return typeof v === "object" && v !== null && HAS_CONTEXT in v;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mkStringWithContext = (value: string, context: NixStringContext): StringWithContext => {
|
export const mkStringWithContext = (value: string, context: NixStringContext): StringWithContext => {
|
||||||
return new StringWithContext(value, context);
|
return { [HAS_CONTEXT]: true, value, context };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mkPlainString = (value: string): string => value;
|
export const mkPlainString = (value: string): string => value;
|
||||||
@@ -49,12 +45,11 @@ export const getStringValue = (s: string | StringWithContext): string => {
|
|||||||
return s;
|
return s;
|
||||||
};
|
};
|
||||||
|
|
||||||
const emptyContext: NixStringContext = new Set();
|
|
||||||
export const getStringContext = (s: string | StringWithContext): NixStringContext => {
|
export const getStringContext = (s: string | StringWithContext): NixStringContext => {
|
||||||
if (isStringWithContext(s)) {
|
if (isStringWithContext(s)) {
|
||||||
return s.context;
|
return s.context;
|
||||||
}
|
}
|
||||||
return emptyContext;
|
return new Set();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mergeContexts = (...contexts: NixStringContext[]): NixStringContext => {
|
export const mergeContexts = (...contexts: NixStringContext[]): NixStringContext => {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { isAttrs, isList } from "./builtins/type-check";
|
import { isAttrs, isList } from "./builtins/type-check";
|
||||||
import { StringWithContext } from "./string-context";
|
import { HAS_CONTEXT } from "./string-context";
|
||||||
import type { NixAttrs, NixStrictValue, NixValue } from "./types";
|
import type { NixAttrs, NixStrictValue, NixThunkInterface, NixValue } from "./types";
|
||||||
import { NixPath } from "./types";
|
import { IS_PATH } from "./types";
|
||||||
|
|
||||||
export const IS_THUNK = Symbol("is_thunk");
|
export const IS_THUNK = Symbol("is_thunk");
|
||||||
|
|
||||||
@@ -21,7 +21,8 @@ export const DEBUG_THUNKS = { enabled: true };
|
|||||||
* - Evaluating (blackhole): func is undefined, result is undefined
|
* - Evaluating (blackhole): func is undefined, result is undefined
|
||||||
* - Evaluated: func is undefined, result is defined
|
* - Evaluated: func is undefined, result is defined
|
||||||
*/
|
*/
|
||||||
export class NixThunk {
|
export class NixThunk implements NixThunkInterface {
|
||||||
|
[key: symbol]: unknown;
|
||||||
readonly [IS_THUNK] = true as const;
|
readonly [IS_THUNK] = true as const;
|
||||||
func: (() => NixValue) | undefined;
|
func: (() => NixValue) | undefined;
|
||||||
result: NixStrictValue | undefined;
|
result: NixStrictValue | undefined;
|
||||||
@@ -41,8 +42,8 @@ export class NixThunk {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const isThunk = (value: NixValue): value is NixThunk => {
|
export const isThunk = (value: NixValue): value is NixThunkInterface => {
|
||||||
return value instanceof NixThunk;
|
return value !== null && typeof value === "object" && IS_THUNK in value && value[IS_THUNK] === true;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -111,7 +112,7 @@ export const force = (value: NixValue): NixStrictValue => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const createThunk = (func: () => NixValue, label?: string): NixThunk => {
|
export const createThunk = (func: () => NixValue, label?: string): NixThunkInterface => {
|
||||||
return new NixThunk(func, label);
|
return new NixThunk(func, label);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -132,13 +133,16 @@ export const forceDeep = (value: NixValue, seen: WeakSet<object> = new WeakSet()
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (seen.has(forced)) {
|
if (seen.has(forced)) {
|
||||||
|
if (Array.isArray(forced)) {
|
||||||
|
return [CYCLE_MARKER];
|
||||||
|
}
|
||||||
return CYCLE_MARKER;
|
return CYCLE_MARKER;
|
||||||
}
|
}
|
||||||
if ((isAttrs(forced) && forced.size > 0) || (isList(forced) && forced.length > 0)) {
|
if (isAttrs(forced) || isList(forced)) {
|
||||||
seen.add(forced);
|
seen.add(forced);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (forced instanceof StringWithContext || forced instanceof NixPath) {
|
if (HAS_CONTEXT in forced || IS_PATH in forced) {
|
||||||
return forced;
|
return forced;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,22 +1,18 @@
|
|||||||
import { PRIMOP_METADATA, type PrimopMetadata } from "./builtins";
|
|
||||||
import { HAS_CONTEXT, isStringWithContext, type StringWithContext } from "./string-context";
|
import { HAS_CONTEXT, isStringWithContext, type StringWithContext } from "./string-context";
|
||||||
import { type CYCLE_MARKER, force, type NixThunk } from "./thunk";
|
import { type CYCLE_MARKER, force, IS_THUNK } from "./thunk";
|
||||||
import { forceAttrs, forceStringNoCtx } from "./type-assert";
|
import { forceAttrs, forceStringNoCtx } from "./type-assert";
|
||||||
export { HAS_CONTEXT, isStringWithContext };
|
export { HAS_CONTEXT, isStringWithContext };
|
||||||
export type { StringWithContext };
|
export type { StringWithContext };
|
||||||
|
|
||||||
export const IS_PATH = Symbol("IS_PATH");
|
export const IS_PATH = Symbol("IS_PATH");
|
||||||
|
|
||||||
export class NixPath {
|
export interface NixPath {
|
||||||
readonly [IS_PATH] = true as const;
|
readonly [IS_PATH]: true;
|
||||||
value: string;
|
value: string;
|
||||||
constructor(value: string) {
|
|
||||||
this.value = value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const isNixPath = (v: NixStrictValue): v is NixPath => {
|
export const isNixPath = (v: NixStrictValue): v is NixPath => {
|
||||||
return v instanceof NixPath;
|
return typeof v === "object" && v !== null && IS_PATH in v;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type NixInt = bigint;
|
export type NixInt = bigint;
|
||||||
@@ -28,18 +24,15 @@ export type NixNull = null;
|
|||||||
|
|
||||||
export const ATTR_POSITIONS = Symbol("attrPositions");
|
export const ATTR_POSITIONS = Symbol("attrPositions");
|
||||||
export type NixList = NixValue[];
|
export type NixList = NixValue[];
|
||||||
export type NixAttrs = Map<string, NixValue> & { [ATTR_POSITIONS]?: Map<string, number> };
|
export type NixAttrs = Map<string, NixValue> & { [ATTR_POSITIONS]?: Record<string, string> };
|
||||||
export type NixFunction = ((arg: NixValue) => NixValue) & {
|
export type NixFunction = ((arg: NixValue) => NixValue) & { args?: NixArgs };
|
||||||
args?: NixArgs;
|
|
||||||
[PRIMOP_METADATA]?: PrimopMetadata;
|
|
||||||
};
|
|
||||||
export class NixArgs {
|
export class NixArgs {
|
||||||
required: string[];
|
required: string[];
|
||||||
optional: string[];
|
optional: string[];
|
||||||
allowed: Set<string>;
|
allowed: Set<string>;
|
||||||
ellipsis: boolean;
|
ellipsis: boolean;
|
||||||
positions: Map<string, number>;
|
positions: Record<string, string>;
|
||||||
constructor(required: string[], optional: string[], positions: Map<string, number>, ellipsis: boolean) {
|
constructor(required: string[], optional: string[], positions: Record<string, string>, ellipsis: boolean) {
|
||||||
this.required = required;
|
this.required = required;
|
||||||
this.optional = optional;
|
this.optional = optional;
|
||||||
this.positions = positions;
|
this.positions = positions;
|
||||||
@@ -68,7 +61,7 @@ export const mkFunction = (
|
|||||||
f: (arg: NixValue) => NixValue,
|
f: (arg: NixValue) => NixValue,
|
||||||
required: string[],
|
required: string[],
|
||||||
optional: string[],
|
optional: string[],
|
||||||
positions: Map<string, number>,
|
positions: Record<string, string>,
|
||||||
ellipsis: boolean,
|
ellipsis: boolean,
|
||||||
): NixFunction => {
|
): NixFunction => {
|
||||||
const func: NixFunction = f;
|
const func: NixFunction = f;
|
||||||
@@ -76,11 +69,26 @@ export const mkFunction = (
|
|||||||
return func;
|
return func;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mkAttrs = (
|
export const mkAttrs = (attrs: NixAttrs, keys: NixValue[], values: NixValue[]): NixAttrs => {
|
||||||
attrs: NixAttrs,
|
const len = keys.length;
|
||||||
positions: Map<string, number>,
|
for (let i = 0; i < len; i++) {
|
||||||
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: number[] },
|
const key = force(keys[i]);
|
||||||
|
if (key === null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const str = forceStringNoCtx(key);
|
||||||
|
attrs.set(str, values[i]);
|
||||||
|
}
|
||||||
|
return attrs;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const mkAttrsWithPos = (
|
||||||
|
obj: Record<string, NixValue>,
|
||||||
|
positions: Record<string, string>,
|
||||||
|
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] },
|
||||||
): NixAttrs => {
|
): NixAttrs => {
|
||||||
|
const attrs: NixAttrs = new Map(Object.entries(obj));
|
||||||
|
|
||||||
if (dyns) {
|
if (dyns) {
|
||||||
const len = dyns.dynKeys.length;
|
const len = dyns.dynKeys.length;
|
||||||
for (let i = 0; i < len; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
@@ -90,17 +98,23 @@ export const mkAttrs = (
|
|||||||
}
|
}
|
||||||
const str = forceStringNoCtx(key);
|
const str = forceStringNoCtx(key);
|
||||||
attrs.set(str, dyns.dynVals[i]);
|
attrs.set(str, dyns.dynVals[i]);
|
||||||
positions.set(str, dyns.dynSpans[i]);
|
positions[str] = dyns.dynSpans[i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (positions.size > 0) {
|
if (Object.keys(positions).length > 0) {
|
||||||
attrs[ATTR_POSITIONS] = positions;
|
attrs[ATTR_POSITIONS] = positions;
|
||||||
}
|
}
|
||||||
|
|
||||||
return attrs;
|
return attrs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export interface NixThunkInterface {
|
||||||
|
readonly [IS_THUNK]: true;
|
||||||
|
func: (() => NixValue) | undefined;
|
||||||
|
result: NixStrictValue | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
export type NixPrimitive = NixNull | NixBool | NixInt | NixFloat | NixString;
|
export type NixPrimitive = NixNull | NixBool | NixInt | NixFloat | NixString;
|
||||||
export type NixValue =
|
export type NixValue =
|
||||||
| NixPrimitive
|
| NixPrimitive
|
||||||
@@ -108,8 +122,8 @@ export type NixValue =
|
|||||||
| NixList
|
| NixList
|
||||||
| NixAttrs
|
| NixAttrs
|
||||||
| NixFunction
|
| NixFunction
|
||||||
| NixThunk
|
| NixThunkInterface
|
||||||
| typeof CYCLE_MARKER;
|
| typeof CYCLE_MARKER;
|
||||||
export type NixStrictValue = Exclude<NixValue, NixThunk>;
|
export type NixStrictValue = Exclude<NixValue, NixThunkInterface>;
|
||||||
|
|
||||||
export class CatchableError extends Error {}
|
export class CatchableError extends Error {}
|
||||||
|
|||||||
85
nix-js/runtime-ts/src/types/global.d.ts
vendored
85
nix-js/runtime-ts/src/types/global.d.ts
vendored
@@ -1,69 +1,30 @@
|
|||||||
import type { NixRuntime } from "..";
|
import type { NixRuntime } from "..";
|
||||||
import type { builtins } from "../builtins";
|
|
||||||
import type { FetchGitResult, FetchTarballResult, FetchUrlResult } from "../builtins/io";
|
import type { FetchGitResult, FetchTarballResult, FetchUrlResult } from "../builtins/io";
|
||||||
import type {
|
|
||||||
assert,
|
|
||||||
call,
|
|
||||||
concatStringsWithContext,
|
|
||||||
hasAttr,
|
|
||||||
lookupWith,
|
|
||||||
mkPos,
|
|
||||||
resolvePath,
|
|
||||||
select,
|
|
||||||
selectWithDefault,
|
|
||||||
} from "../helpers";
|
|
||||||
import type { op } from "../operators";
|
|
||||||
import type { createThunk, force } from "../thunk";
|
|
||||||
import type { forceBool } from "../type-assert";
|
|
||||||
import type { mkAttrs, mkFunction, NixAttrs, NixStrictValue } from "../types";
|
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
var Nix: NixRuntime;
|
var Nix: NixRuntime;
|
||||||
var $t: typeof createThunk;
|
|
||||||
var $f: typeof force;
|
|
||||||
var $fb: typeof forceBool;
|
|
||||||
var $a: typeof assert;
|
|
||||||
var $c: typeof call;
|
|
||||||
var $h: typeof hasAttr;
|
|
||||||
var $s: typeof select;
|
|
||||||
var $sd: typeof selectWithDefault;
|
|
||||||
var $l: typeof lookupWith;
|
|
||||||
var $r: typeof resolvePath;
|
|
||||||
var $cs: typeof concatStringsWithContext;
|
|
||||||
var $ma: typeof mkAttrs;
|
|
||||||
var $mf: typeof mkFunction;
|
|
||||||
var $mp: typeof mkPos;
|
|
||||||
var $oa: typeof op.add;
|
|
||||||
var $os: typeof op.sub;
|
|
||||||
var $om: typeof op.mul;
|
|
||||||
var $od: typeof op.div;
|
|
||||||
var $oe: typeof op.eq;
|
|
||||||
var $ol: typeof op.lt;
|
|
||||||
var $og: typeof op.gt;
|
|
||||||
var $oc: typeof op.concat;
|
|
||||||
var $ou: typeof op.update;
|
|
||||||
var $b: typeof builtins;
|
|
||||||
var $e: NixAttrs;
|
|
||||||
var $gb: typeof Nix.getReplBinding;
|
|
||||||
|
|
||||||
namespace Deno {
|
namespace Deno {
|
||||||
namespace core {
|
namespace core {
|
||||||
namespace ops {
|
namespace ops {
|
||||||
function op_import(path: string): [Uint8Array, string];
|
function op_import(path: string): string;
|
||||||
function op_scoped_import(path: string, scopeKeys: string[]): [Uint8Array, string];
|
function op_scoped_import(path: string, scopeKeys: string[]): string;
|
||||||
|
|
||||||
function op_resolve_path(currentDir: string, path: string): string;
|
function op_resolve_path(currentDir: string, path: string): string;
|
||||||
|
|
||||||
function op_read_file(path: string): string;
|
function op_read_file(path: string): string;
|
||||||
function op_read_file_type(path: string): string;
|
function op_read_file_type(path: string): string;
|
||||||
function op_read_dir(path: string): Map<string, string>;
|
function op_read_dir(path: string): Record<string, string>;
|
||||||
function op_path_exists(path: string): boolean;
|
function op_path_exists(path: string): boolean;
|
||||||
function op_walk_dir(path: string): [string, string][];
|
function op_walk_dir(path: string): [string, string][];
|
||||||
|
|
||||||
function op_make_placeholder(output: string): string;
|
function op_make_placeholder(output: string): string;
|
||||||
function op_store_path(path: string): string;
|
function op_store_path(path: string): string;
|
||||||
|
|
||||||
function op_convert_hash(hash: string, hashAlgo: string | null, toHashFormat: string): string;
|
function op_convert_hash(input: {
|
||||||
|
hash: string;
|
||||||
|
hashAlgo: string | null;
|
||||||
|
toHashFormat: string;
|
||||||
|
}): string;
|
||||||
function op_hash_string(algo: string, data: string): string;
|
function op_hash_string(algo: string, data: string): string;
|
||||||
function op_hash_file(algo: string, path: string): string;
|
function op_hash_file(algo: string, path: string): string;
|
||||||
function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string };
|
function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string };
|
||||||
@@ -82,7 +43,11 @@ declare global {
|
|||||||
includePaths: string[],
|
includePaths: string[],
|
||||||
): string;
|
): string;
|
||||||
|
|
||||||
function op_decode_span(span: number): NixAttrs;
|
function op_decode_span(span: string): {
|
||||||
|
file: string | null;
|
||||||
|
line: number | null;
|
||||||
|
column: number | null;
|
||||||
|
};
|
||||||
|
|
||||||
function op_to_file(name: string, contents: string, references: string[]): string;
|
function op_to_file(name: string, contents: string, references: string[]): string;
|
||||||
|
|
||||||
@@ -93,20 +58,20 @@ declare global {
|
|||||||
function op_match(regex: string, text: string): (string | null)[] | null;
|
function op_match(regex: string, text: string): (string | null)[] | null;
|
||||||
function op_split(regex: string, text: string): (string | (string | null)[])[];
|
function op_split(regex: string, text: string): (string | (string | null)[])[];
|
||||||
|
|
||||||
function op_from_json(json: string): NixStrictValue;
|
function op_from_json(json: string): unknown;
|
||||||
function op_from_toml(toml: string): NixStrictValue;
|
function op_from_toml(toml: string): unknown;
|
||||||
function op_to_xml(e: NixValue): [string, string[]];
|
function op_to_xml(e: NixValue): [string, string[]];
|
||||||
|
|
||||||
function op_finalize_derivation(
|
function op_finalize_derivation(input: {
|
||||||
name: string,
|
name: string;
|
||||||
builder: string,
|
builder: string;
|
||||||
platform: string,
|
platform: string;
|
||||||
outputs: string[],
|
outputs: string[];
|
||||||
args: string[],
|
args: string[];
|
||||||
env: [string, string][],
|
env: [string, string][];
|
||||||
context: string[],
|
context: string[];
|
||||||
fixedOutput: { hashAlgo: string; hash: string; hashMode: string } | null,
|
fixedOutput: { hashAlgo: string; hash: string; hashMode: string } | null;
|
||||||
): { drvPath: string; outputs: [string, string][] };
|
}): { drvPath: string; outputs: [string, string][] };
|
||||||
|
|
||||||
function op_fetch_url(
|
function op_fetch_url(
|
||||||
url: string,
|
url: string,
|
||||||
|
|||||||
@@ -1,617 +0,0 @@
|
|||||||
import {
|
|
||||||
assert,
|
|
||||||
call,
|
|
||||||
concatStringsWithContext,
|
|
||||||
hasAttr,
|
|
||||||
lookupWith,
|
|
||||||
mkPos,
|
|
||||||
resolvePath,
|
|
||||||
select,
|
|
||||||
selectWithDefault,
|
|
||||||
} from "./helpers";
|
|
||||||
import { op } from "./operators";
|
|
||||||
import { NixThunk } from "./thunk";
|
|
||||||
import { forceBool } from "./type-assert";
|
|
||||||
import { mkAttrs, NixArgs, type NixAttrs, type NixFunction, type NixValue } from "./types";
|
|
||||||
import { builtins } from "./builtins";
|
|
||||||
|
|
||||||
enum Op {
|
|
||||||
PushConst = 0x01,
|
|
||||||
PushString = 0x02,
|
|
||||||
PushNull = 0x03,
|
|
||||||
PushTrue = 0x04,
|
|
||||||
PushFalse = 0x05,
|
|
||||||
|
|
||||||
LoadLocal = 0x06,
|
|
||||||
LoadOuter = 0x07,
|
|
||||||
StoreLocal = 0x08,
|
|
||||||
AllocLocals = 0x09,
|
|
||||||
|
|
||||||
MakeThunk = 0x0A,
|
|
||||||
MakeClosure = 0x0B,
|
|
||||||
MakePatternClosure = 0x0C,
|
|
||||||
|
|
||||||
Call = 0x0D,
|
|
||||||
CallNoSpan = 0x0E,
|
|
||||||
|
|
||||||
MakeAttrs = 0x0F,
|
|
||||||
MakeAttrsDyn = 0x10,
|
|
||||||
MakeEmptyAttrs = 0x11,
|
|
||||||
Select = 0x12,
|
|
||||||
SelectDefault = 0x13,
|
|
||||||
HasAttr = 0x14,
|
|
||||||
|
|
||||||
MakeList = 0x15,
|
|
||||||
|
|
||||||
OpAdd = 0x16,
|
|
||||||
OpSub = 0x17,
|
|
||||||
OpMul = 0x18,
|
|
||||||
OpDiv = 0x19,
|
|
||||||
OpEq = 0x20,
|
|
||||||
OpNeq = 0x21,
|
|
||||||
OpLt = 0x22,
|
|
||||||
OpGt = 0x23,
|
|
||||||
OpLeq = 0x24,
|
|
||||||
OpGeq = 0x25,
|
|
||||||
OpConcat = 0x26,
|
|
||||||
OpUpdate = 0x27,
|
|
||||||
|
|
||||||
OpNeg = 0x28,
|
|
||||||
OpNot = 0x29,
|
|
||||||
|
|
||||||
ForceBool = 0x30,
|
|
||||||
JumpIfFalse = 0x31,
|
|
||||||
JumpIfTrue = 0x32,
|
|
||||||
Jump = 0x33,
|
|
||||||
|
|
||||||
ConcatStrings = 0x34,
|
|
||||||
ResolvePath = 0x35,
|
|
||||||
|
|
||||||
Assert = 0x36,
|
|
||||||
|
|
||||||
PushWith = 0x37,
|
|
||||||
PopWith = 0x38,
|
|
||||||
WithLookup = 0x39,
|
|
||||||
|
|
||||||
LoadBuiltins = 0x40,
|
|
||||||
LoadBuiltin = 0x41,
|
|
||||||
|
|
||||||
MkPos = 0x43,
|
|
||||||
|
|
||||||
LoadReplBinding = 0x44,
|
|
||||||
LoadScopedBinding = 0x45,
|
|
||||||
|
|
||||||
Return = 0x46,
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ScopeChain {
|
|
||||||
locals: NixValue[];
|
|
||||||
parent: ScopeChain | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WithScope {
|
|
||||||
env: NixValue;
|
|
||||||
last: WithScope | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const strings: string[] = [];
|
|
||||||
const constants: NixValue[] = [];
|
|
||||||
|
|
||||||
const $e: NixAttrs = new Map();
|
|
||||||
|
|
||||||
function readU16(code: Uint8Array, offset: number): number {
|
|
||||||
return code[offset] | (code[offset + 1] << 8);
|
|
||||||
}
|
|
||||||
|
|
||||||
function readU32(code: Uint8Array, offset: number): number {
|
|
||||||
return (
|
|
||||||
code[offset] |
|
|
||||||
(code[offset + 1] << 8) |
|
|
||||||
(code[offset + 2] << 16) |
|
|
||||||
(code[offset + 3] << 24)
|
|
||||||
) >>> 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function readI32(code: Uint8Array, offset: number): number {
|
|
||||||
return code[offset] | (code[offset + 1] << 8) | (code[offset + 2] << 16) | (code[offset + 3] << 24);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function execBytecode(code: Uint8Array, currentDir: string): NixValue {
|
|
||||||
const chain: ScopeChain = { locals: [], parent: null };
|
|
||||||
return execFrame(code, 0, chain, currentDir, null, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function execBytecodeScoped(
|
|
||||||
code: Uint8Array,
|
|
||||||
currentDir: string,
|
|
||||||
scopeMap: NixAttrs,
|
|
||||||
): NixValue {
|
|
||||||
const chain: ScopeChain = { locals: [], parent: null };
|
|
||||||
return execFrame(code, 0, chain, currentDir, null, scopeMap);
|
|
||||||
}
|
|
||||||
|
|
||||||
function execFrame(
|
|
||||||
code: Uint8Array,
|
|
||||||
startPc: number,
|
|
||||||
chain: ScopeChain,
|
|
||||||
currentDir: string,
|
|
||||||
withScope: WithScope | null,
|
|
||||||
scopeMap: NixAttrs | null,
|
|
||||||
): NixValue {
|
|
||||||
const locals = chain.locals;
|
|
||||||
const stack: NixValue[] = [];
|
|
||||||
let pc = startPc;
|
|
||||||
|
|
||||||
for (;;) {
|
|
||||||
const opcode = code[pc++];
|
|
||||||
switch (opcode) {
|
|
||||||
case Op.PushConst: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(constants[idx]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.PushString: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(strings[idx]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.PushNull:
|
|
||||||
stack.push(null);
|
|
||||||
break;
|
|
||||||
case Op.PushTrue:
|
|
||||||
stack.push(true);
|
|
||||||
break;
|
|
||||||
case Op.PushFalse:
|
|
||||||
stack.push(false);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case Op.LoadLocal: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(locals[idx]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.LoadOuter: {
|
|
||||||
const layer = code[pc++];
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
let c: ScopeChain = chain;
|
|
||||||
for (let i = 0; i < layer; i++) c = c.parent!;
|
|
||||||
stack.push(c.locals[idx]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.StoreLocal: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
locals[idx] = stack.pop()!;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.AllocLocals: {
|
|
||||||
const n = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
for (let i = 0; i < n; i++) locals.push(null);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.MakeThunk: {
|
|
||||||
const bodyPc = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const labelIdx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const label = strings[labelIdx];
|
|
||||||
const scopeChain = chain;
|
|
||||||
const scopeCode = code;
|
|
||||||
const scopeDir = currentDir;
|
|
||||||
const scopeWith = withScope;
|
|
||||||
stack.push(
|
|
||||||
new NixThunk(
|
|
||||||
() => execFrame(scopeCode, bodyPc, scopeChain, scopeDir, scopeWith, null),
|
|
||||||
label,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.MakeClosure: {
|
|
||||||
const bodyPc = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const nSlots = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const closureChain = chain;
|
|
||||||
const closureCode = code;
|
|
||||||
const closureDir = currentDir;
|
|
||||||
const closureWith = withScope;
|
|
||||||
const func: NixFunction = (arg: NixValue) => {
|
|
||||||
const innerLocals = new Array<NixValue>(1 + nSlots).fill(null);
|
|
||||||
innerLocals[0] = arg;
|
|
||||||
const innerChain: ScopeChain = { locals: innerLocals, parent: closureChain };
|
|
||||||
return execFrame(closureCode, bodyPc, innerChain, closureDir, closureWith, null);
|
|
||||||
};
|
|
||||||
stack.push(func);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.MakePatternClosure: {
|
|
||||||
const bodyPc = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const nSlots = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const nRequired = readU16(code, pc);
|
|
||||||
pc += 2;
|
|
||||||
const nOptional = readU16(code, pc);
|
|
||||||
pc += 2;
|
|
||||||
const hasEllipsis = code[pc++] !== 0;
|
|
||||||
|
|
||||||
const required: string[] = [];
|
|
||||||
for (let i = 0; i < nRequired; i++) {
|
|
||||||
required.push(strings[readU32(code, pc)]);
|
|
||||||
pc += 4;
|
|
||||||
}
|
|
||||||
const optional: string[] = [];
|
|
||||||
for (let i = 0; i < nOptional; i++) {
|
|
||||||
optional.push(strings[readU32(code, pc)]);
|
|
||||||
pc += 4;
|
|
||||||
}
|
|
||||||
const positions = new Map<string, number>();
|
|
||||||
const nTotal = nRequired + nOptional;
|
|
||||||
for (let i = 0; i < nTotal; i++) {
|
|
||||||
const nameIdx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const spanId = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
positions.set(strings[nameIdx], spanId);
|
|
||||||
}
|
|
||||||
|
|
||||||
const closureChain = chain;
|
|
||||||
const closureCode = code;
|
|
||||||
const closureDir = currentDir;
|
|
||||||
const closureWith = withScope;
|
|
||||||
const func: NixFunction = (arg: NixValue) => {
|
|
||||||
const innerLocals = new Array<NixValue>(1 + nSlots).fill(null);
|
|
||||||
innerLocals[0] = arg;
|
|
||||||
const innerChain: ScopeChain = { locals: innerLocals, parent: closureChain };
|
|
||||||
return execFrame(closureCode, bodyPc, innerChain, closureDir, closureWith, null);
|
|
||||||
};
|
|
||||||
func.args = new NixArgs(required, optional, positions, hasEllipsis);
|
|
||||||
stack.push(func);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.Call: {
|
|
||||||
const spanId = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const arg = stack.pop()!;
|
|
||||||
const func = stack.pop()!;
|
|
||||||
stack.push(call(func, arg, spanId));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.CallNoSpan: {
|
|
||||||
const arg = stack.pop()!;
|
|
||||||
const func = stack.pop()!;
|
|
||||||
stack.push(call(func, arg));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.MakeAttrs: {
|
|
||||||
const n = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const spanValues: number[] = [];
|
|
||||||
for (let i = 0; i < n; i++) {
|
|
||||||
spanValues.push(stack.pop() as number);
|
|
||||||
}
|
|
||||||
spanValues.reverse();
|
|
||||||
const map: NixAttrs = new Map();
|
|
||||||
const posMap = new Map<string, number>();
|
|
||||||
const pairs: [string, NixValue][] = [];
|
|
||||||
for (let i = 0; i < n; i++) {
|
|
||||||
const val = stack.pop()!;
|
|
||||||
const key = stack.pop() as string;
|
|
||||||
pairs.push([key, val]);
|
|
||||||
}
|
|
||||||
pairs.reverse();
|
|
||||||
for (let i = 0; i < n; i++) {
|
|
||||||
map.set(pairs[i][0], pairs[i][1]);
|
|
||||||
posMap.set(pairs[i][0], spanValues[i]);
|
|
||||||
}
|
|
||||||
stack.push(mkAttrs(map, posMap));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.MakeAttrsDyn: {
|
|
||||||
const nStatic = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const nDyn = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
|
|
||||||
const dynTriples: [NixValue, NixValue, number][] = [];
|
|
||||||
for (let i = 0; i < nDyn; i++) {
|
|
||||||
const dynSpan = stack.pop() as number;
|
|
||||||
const dynVal = stack.pop()!;
|
|
||||||
const dynKey = stack.pop()!;
|
|
||||||
dynTriples.push([dynKey, dynVal, dynSpan]);
|
|
||||||
}
|
|
||||||
dynTriples.reverse();
|
|
||||||
|
|
||||||
const spanValues: number[] = [];
|
|
||||||
for (let i = 0; i < nStatic; i++) {
|
|
||||||
spanValues.push(stack.pop() as number);
|
|
||||||
}
|
|
||||||
spanValues.reverse();
|
|
||||||
|
|
||||||
const map: NixAttrs = new Map();
|
|
||||||
const posMap = new Map<string, number>();
|
|
||||||
const pairs: [string, NixValue][] = [];
|
|
||||||
for (let i = 0; i < nStatic; i++) {
|
|
||||||
const val = stack.pop()!;
|
|
||||||
const key = stack.pop() as string;
|
|
||||||
pairs.push([key, val]);
|
|
||||||
}
|
|
||||||
pairs.reverse();
|
|
||||||
for (let i = 0; i < nStatic; i++) {
|
|
||||||
map.set(pairs[i][0], pairs[i][1]);
|
|
||||||
posMap.set(pairs[i][0], spanValues[i]);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dynKeys: NixValue[] = [];
|
|
||||||
const dynVals: NixValue[] = [];
|
|
||||||
const dynSpans: number[] = [];
|
|
||||||
for (const [k, v, s] of dynTriples) {
|
|
||||||
dynKeys.push(k);
|
|
||||||
dynVals.push(v);
|
|
||||||
dynSpans.push(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
stack.push(mkAttrs(map, posMap, { dynKeys, dynVals, dynSpans }));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.MakeEmptyAttrs:
|
|
||||||
stack.push($e);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case Op.Select: {
|
|
||||||
const nKeys = readU16(code, pc);
|
|
||||||
pc += 2;
|
|
||||||
const spanId = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const keys: NixValue[] = [];
|
|
||||||
for (let i = 0; i < nKeys; i++) keys.push(stack.pop()!);
|
|
||||||
keys.reverse();
|
|
||||||
const obj = stack.pop()!;
|
|
||||||
stack.push(select(obj, keys, spanId));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.SelectDefault: {
|
|
||||||
const nKeys = readU16(code, pc);
|
|
||||||
pc += 2;
|
|
||||||
const spanId = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const defaultVal = stack.pop()!;
|
|
||||||
const keys: NixValue[] = [];
|
|
||||||
for (let i = 0; i < nKeys; i++) keys.push(stack.pop()!);
|
|
||||||
keys.reverse();
|
|
||||||
const obj = stack.pop()!;
|
|
||||||
stack.push(selectWithDefault(obj, keys, defaultVal, spanId));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.HasAttr: {
|
|
||||||
const nKeys = readU16(code, pc);
|
|
||||||
pc += 2;
|
|
||||||
const keys: NixValue[] = [];
|
|
||||||
for (let i = 0; i < nKeys; i++) keys.push(stack.pop()!);
|
|
||||||
keys.reverse();
|
|
||||||
const obj = stack.pop()!;
|
|
||||||
stack.push(hasAttr(obj, keys));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.MakeList: {
|
|
||||||
const count = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const items: NixValue[] = new Array(count);
|
|
||||||
for (let i = count - 1; i >= 0; i--) {
|
|
||||||
items[i] = stack.pop()!;
|
|
||||||
}
|
|
||||||
stack.push(items);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.OpAdd: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.add(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpSub: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.sub(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpMul: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.mul(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpDiv: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.div(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpEq: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.eq(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpNeq: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(!op.eq(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpLt: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.lt(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpGt: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.gt(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpLeq: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(!op.gt(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpGeq: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(!op.lt(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpConcat: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.concat(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpUpdate: {
|
|
||||||
const b = stack.pop()!;
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.update(a, b));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.OpNeg: {
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(op.sub(0n, a));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.OpNot: {
|
|
||||||
const a = stack.pop()!;
|
|
||||||
stack.push(!forceBool(a));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.ForceBool: {
|
|
||||||
const val = stack.pop()!;
|
|
||||||
stack.push(forceBool(val));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.JumpIfFalse: {
|
|
||||||
const offset = readI32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const val = stack.pop()!;
|
|
||||||
if (val === false) {
|
|
||||||
pc += offset;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.JumpIfTrue: {
|
|
||||||
const offset = readI32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const val = stack.pop()!;
|
|
||||||
if (val === true) {
|
|
||||||
pc += offset;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.Jump: {
|
|
||||||
const offset = readI32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
pc += offset;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.ConcatStrings: {
|
|
||||||
const nParts = readU16(code, pc);
|
|
||||||
pc += 2;
|
|
||||||
const forceString = code[pc++] !== 0;
|
|
||||||
const parts: NixValue[] = new Array(nParts);
|
|
||||||
for (let i = nParts - 1; i >= 0; i--) {
|
|
||||||
parts[i] = stack.pop()!;
|
|
||||||
}
|
|
||||||
stack.push(concatStringsWithContext(parts, forceString));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.ResolvePath: {
|
|
||||||
const pathExpr = stack.pop()!;
|
|
||||||
stack.push(resolvePath(currentDir, pathExpr));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.Assert: {
|
|
||||||
const rawIdx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const spanId = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
const expr = stack.pop()!;
|
|
||||||
const assertion = stack.pop()!;
|
|
||||||
stack.push(assert(assertion, expr, strings[rawIdx], spanId));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.PushWith: {
|
|
||||||
const namespace = stack.pop()!;
|
|
||||||
withScope = { env: namespace, last: withScope };
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.PopWith:
|
|
||||||
withScope = withScope!.last;
|
|
||||||
break;
|
|
||||||
case Op.WithLookup: {
|
|
||||||
const nameIdx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(lookupWith(strings[nameIdx], withScope!));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.LoadBuiltins:
|
|
||||||
stack.push(builtins);
|
|
||||||
break;
|
|
||||||
case Op.LoadBuiltin: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(builtins.get(strings[idx])!);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.MkPos: {
|
|
||||||
const spanId = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(mkPos(spanId));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.LoadReplBinding: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(Nix.getReplBinding(strings[idx]));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case Op.LoadScopedBinding: {
|
|
||||||
const idx = readU32(code, pc);
|
|
||||||
pc += 4;
|
|
||||||
stack.push(scopeMap!.get(strings[idx])!);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case Op.Return:
|
|
||||||
return stack.pop()!;
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown bytecode opcode: ${opcode ? `0x${opcode.toString(16)}` : "undefined"} at pc=${pc - 1}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
declare const Nix: {
|
|
||||||
getReplBinding: (name: string) => NixValue;
|
|
||||||
};
|
|
||||||
|
|
||||||
export { strings as vmStrings, constants as vmConstants };
|
|
||||||
@@ -1,906 +0,0 @@
|
|||||||
use std::ops::Deref;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
|
||||||
use num_enum::TryFromPrimitive;
|
|
||||||
use rnix::TextRange;
|
|
||||||
|
|
||||||
use crate::ir::{ArgId, Attr, BinOpKind, Ir, Param, RawIrRef, SymId, ThunkId, UnOpKind};
|
|
||||||
|
|
||||||
#[derive(Clone, Hash, Eq, PartialEq)]
|
|
||||||
pub(crate) enum Constant {
|
|
||||||
Int(i64),
|
|
||||||
Float(u64),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Bytecode {
|
|
||||||
pub code: Box<[u8]>,
|
|
||||||
pub current_dir: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) trait BytecodeContext {
|
|
||||||
fn intern_string(&mut self, s: &str) -> u32;
|
|
||||||
fn intern_constant(&mut self, c: Constant) -> u32;
|
|
||||||
fn register_span(&self, range: TextRange) -> u32;
|
|
||||||
fn get_sym(&self, id: SymId) -> &str;
|
|
||||||
fn get_current_dir(&self) -> &Path;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[repr(u8)]
|
|
||||||
#[derive(Clone, Copy, TryFromPrimitive)]
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
pub enum Op {
|
|
||||||
PushConst = 0x01,
|
|
||||||
PushString = 0x02,
|
|
||||||
PushNull = 0x03,
|
|
||||||
PushTrue = 0x04,
|
|
||||||
PushFalse = 0x05,
|
|
||||||
|
|
||||||
LoadLocal = 0x06,
|
|
||||||
LoadOuter = 0x07,
|
|
||||||
StoreLocal = 0x08,
|
|
||||||
AllocLocals = 0x09,
|
|
||||||
|
|
||||||
MakeThunk = 0x0A,
|
|
||||||
MakeClosure = 0x0B,
|
|
||||||
MakePatternClosure = 0x0C,
|
|
||||||
|
|
||||||
Call = 0x0D,
|
|
||||||
CallNoSpan = 0x0E,
|
|
||||||
|
|
||||||
MakeAttrs = 0x0F,
|
|
||||||
MakeAttrsDyn = 0x10,
|
|
||||||
MakeEmptyAttrs = 0x11,
|
|
||||||
Select = 0x12,
|
|
||||||
SelectDefault = 0x13,
|
|
||||||
HasAttr = 0x14,
|
|
||||||
|
|
||||||
MakeList = 0x15,
|
|
||||||
|
|
||||||
OpAdd = 0x16,
|
|
||||||
OpSub = 0x17,
|
|
||||||
OpMul = 0x18,
|
|
||||||
OpDiv = 0x19,
|
|
||||||
OpEq = 0x20,
|
|
||||||
OpNeq = 0x21,
|
|
||||||
OpLt = 0x22,
|
|
||||||
OpGt = 0x23,
|
|
||||||
OpLeq = 0x24,
|
|
||||||
OpGeq = 0x25,
|
|
||||||
OpConcat = 0x26,
|
|
||||||
OpUpdate = 0x27,
|
|
||||||
|
|
||||||
OpNeg = 0x28,
|
|
||||||
OpNot = 0x29,
|
|
||||||
|
|
||||||
ForceBool = 0x30,
|
|
||||||
JumpIfFalse = 0x31,
|
|
||||||
JumpIfTrue = 0x32,
|
|
||||||
Jump = 0x33,
|
|
||||||
|
|
||||||
ConcatStrings = 0x34,
|
|
||||||
ResolvePath = 0x35,
|
|
||||||
|
|
||||||
Assert = 0x36,
|
|
||||||
|
|
||||||
PushWith = 0x37,
|
|
||||||
PopWith = 0x38,
|
|
||||||
WithLookup = 0x39,
|
|
||||||
|
|
||||||
LoadBuiltins = 0x40,
|
|
||||||
LoadBuiltin = 0x41,
|
|
||||||
|
|
||||||
MkPos = 0x43,
|
|
||||||
|
|
||||||
LoadReplBinding = 0x44,
|
|
||||||
LoadScopedBinding = 0x45,
|
|
||||||
|
|
||||||
Return = 0x46,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ScopeInfo {
|
|
||||||
depth: u16,
|
|
||||||
arg_id: Option<ArgId>,
|
|
||||||
thunk_map: HashMap<ThunkId, u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BytecodeEmitter<'a, Ctx: BytecodeContext> {
|
|
||||||
ctx: &'a mut Ctx,
|
|
||||||
code: Vec<u8>,
|
|
||||||
scope_stack: Vec<ScopeInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn compile_bytecode(ir: RawIrRef<'_>, ctx: &mut impl BytecodeContext) -> Bytecode {
|
|
||||||
let current_dir = ctx.get_current_dir().to_string_lossy().to_string();
|
|
||||||
let mut emitter = BytecodeEmitter::new(ctx);
|
|
||||||
emitter.emit_toplevel(ir);
|
|
||||||
Bytecode {
|
|
||||||
code: emitter.code.into_boxed_slice(),
|
|
||||||
current_dir,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn compile_bytecode_scoped(
|
|
||||||
ir: RawIrRef<'_>,
|
|
||||||
ctx: &mut impl BytecodeContext,
|
|
||||||
) -> Bytecode {
|
|
||||||
let current_dir = ctx.get_current_dir().to_string_lossy().to_string();
|
|
||||||
let mut emitter = BytecodeEmitter::new(ctx);
|
|
||||||
emitter.emit_toplevel_scoped(ir);
|
|
||||||
Bytecode {
|
|
||||||
code: emitter.code.into_boxed_slice(),
|
|
||||||
current_dir,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, Ctx: BytecodeContext> BytecodeEmitter<'a, Ctx> {
|
|
||||||
fn new(ctx: &'a mut Ctx) -> Self {
|
|
||||||
Self {
|
|
||||||
ctx,
|
|
||||||
code: Vec::with_capacity(4096),
|
|
||||||
scope_stack: Vec::with_capacity(32),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn emit_op(&mut self, op: Op) {
|
|
||||||
self.code.push(op as u8);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn emit_u8(&mut self, val: u8) {
|
|
||||||
self.code.push(val);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn emit_u16(&mut self, val: u16) {
|
|
||||||
self.code.extend_from_slice(&val.to_le_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn emit_u32(&mut self, val: u32) {
|
|
||||||
self.code.extend_from_slice(&val.to_le_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn emit_i32_placeholder(&mut self) -> usize {
|
|
||||||
let offset = self.code.len();
|
|
||||||
self.code.extend_from_slice(&[0u8; 4]);
|
|
||||||
offset
|
|
||||||
}
|
|
||||||
#[inline]
|
|
||||||
fn patch_i32(&mut self, offset: usize, val: i32) {
|
|
||||||
self.code[offset..offset + 4].copy_from_slice(&val.to_le_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn emit_jump_placeholder(&mut self) -> usize {
|
|
||||||
self.emit_op(Op::Jump);
|
|
||||||
self.emit_i32_placeholder()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn patch_jump_target(&mut self, placeholder_offset: usize) {
|
|
||||||
let current_pos = self.code.len();
|
|
||||||
let relative_offset = (current_pos as i32) - (placeholder_offset as i32) - 4;
|
|
||||||
self.patch_i32(placeholder_offset, relative_offset);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn current_depth(&self) -> u16 {
|
|
||||||
self.scope_stack.last().map_or(0, |s| s.depth)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_thunk(&self, id: ThunkId) -> (u16, u32) {
|
|
||||||
for scope in self.scope_stack.iter().rev() {
|
|
||||||
if let Some(&local_idx) = scope.thunk_map.get(&id) {
|
|
||||||
let layer = self.current_depth() - scope.depth;
|
|
||||||
return (layer, local_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
panic!("ThunkId {:?} not found in any scope", id);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_arg(&self, id: ArgId) -> (u16, u32) {
|
|
||||||
for scope in self.scope_stack.iter().rev() {
|
|
||||||
if scope.arg_id == Some(id) {
|
|
||||||
let layer = self.current_depth() - scope.depth;
|
|
||||||
return (layer, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
panic!("ArgId {:?} not found in any scope", id);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_load(&mut self, layer: u16, local: u32) {
|
|
||||||
if layer == 0 {
|
|
||||||
self.emit_op(Op::LoadLocal);
|
|
||||||
self.emit_u32(local);
|
|
||||||
} else {
|
|
||||||
self.emit_op(Op::LoadOuter);
|
|
||||||
self.emit_u8(layer as u8);
|
|
||||||
self.emit_u32(local);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn count_with_thunks(&self, ir: RawIrRef<'_>) -> usize {
|
|
||||||
match ir.deref() {
|
|
||||||
Ir::With { thunks, body, .. } => thunks.len() + self.count_with_thunks(*body),
|
|
||||||
Ir::TopLevel { thunks, body } => thunks.len() + self.count_with_thunks(*body),
|
|
||||||
Ir::If { cond, consq, alter } => {
|
|
||||||
self.count_with_thunks(*cond)
|
|
||||||
+ self.count_with_thunks(*consq)
|
|
||||||
+ self.count_with_thunks(*alter)
|
|
||||||
}
|
|
||||||
Ir::BinOp { lhs, rhs, .. } => {
|
|
||||||
self.count_with_thunks(*lhs) + self.count_with_thunks(*rhs)
|
|
||||||
}
|
|
||||||
Ir::UnOp { rhs, .. } => self.count_with_thunks(*rhs),
|
|
||||||
Ir::Call { func, arg, .. } => {
|
|
||||||
self.count_with_thunks(*func) + self.count_with_thunks(*arg)
|
|
||||||
}
|
|
||||||
Ir::Assert {
|
|
||||||
assertion, expr, ..
|
|
||||||
} => self.count_with_thunks(*assertion) + self.count_with_thunks(*expr),
|
|
||||||
Ir::Select { expr, default, .. } => {
|
|
||||||
self.count_with_thunks(*expr) + default.map_or(0, |d| self.count_with_thunks(d))
|
|
||||||
}
|
|
||||||
Ir::HasAttr { lhs, .. } => self.count_with_thunks(*lhs),
|
|
||||||
Ir::ConcatStrings { parts, .. } => {
|
|
||||||
parts.iter().map(|p| self.count_with_thunks(*p)).sum()
|
|
||||||
}
|
|
||||||
Ir::Path(p) => self.count_with_thunks(*p),
|
|
||||||
Ir::List { items } => items.iter().map(|item| self.count_with_thunks(*item)).sum(),
|
|
||||||
Ir::AttrSet { stcs, dyns } => {
|
|
||||||
stcs.iter()
|
|
||||||
.map(|(_, &(val, _))| self.count_with_thunks(val))
|
|
||||||
.sum::<usize>()
|
|
||||||
+ dyns
|
|
||||||
.iter()
|
|
||||||
.map(|&(k, v, _)| self.count_with_thunks(k) + self.count_with_thunks(v))
|
|
||||||
.sum::<usize>()
|
|
||||||
}
|
|
||||||
_ => 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_all_thunks<'ir>(
|
|
||||||
&self,
|
|
||||||
own_thunks: &[(ThunkId, RawIrRef<'ir>)],
|
|
||||||
body: RawIrRef<'ir>,
|
|
||||||
) -> Vec<(ThunkId, RawIrRef<'ir>)> {
|
|
||||||
let mut all = Vec::from(own_thunks);
|
|
||||||
self.collect_with_thunks_recursive(body, &mut all);
|
|
||||||
let mut i = 0;
|
|
||||||
while i < all.len() {
|
|
||||||
let thunk_body = all[i].1;
|
|
||||||
self.collect_with_thunks_recursive(thunk_body, &mut all);
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
all
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_with_thunks_recursive<'ir>(
|
|
||||||
&self,
|
|
||||||
ir: RawIrRef<'ir>,
|
|
||||||
out: &mut Vec<(ThunkId, RawIrRef<'ir>)>,
|
|
||||||
) {
|
|
||||||
match ir.deref() {
|
|
||||||
Ir::With { thunks, body, .. } => {
|
|
||||||
for &(id, inner) in thunks.iter() {
|
|
||||||
out.push((id, inner));
|
|
||||||
}
|
|
||||||
self.collect_with_thunks_recursive(*body, out);
|
|
||||||
}
|
|
||||||
Ir::TopLevel { thunks, body } => {
|
|
||||||
for &(id, inner) in thunks.iter() {
|
|
||||||
out.push((id, inner));
|
|
||||||
}
|
|
||||||
self.collect_with_thunks_recursive(*body, out);
|
|
||||||
}
|
|
||||||
Ir::If { cond, consq, alter } => {
|
|
||||||
self.collect_with_thunks_recursive(*cond, out);
|
|
||||||
self.collect_with_thunks_recursive(*consq, out);
|
|
||||||
self.collect_with_thunks_recursive(*alter, out);
|
|
||||||
}
|
|
||||||
Ir::BinOp { lhs, rhs, .. } => {
|
|
||||||
self.collect_with_thunks_recursive(*lhs, out);
|
|
||||||
self.collect_with_thunks_recursive(*rhs, out);
|
|
||||||
}
|
|
||||||
Ir::UnOp { rhs, .. } => self.collect_with_thunks_recursive(*rhs, out),
|
|
||||||
Ir::Call { func, arg, .. } => {
|
|
||||||
self.collect_with_thunks_recursive(*func, out);
|
|
||||||
self.collect_with_thunks_recursive(*arg, out);
|
|
||||||
}
|
|
||||||
Ir::Assert {
|
|
||||||
assertion, expr, ..
|
|
||||||
} => {
|
|
||||||
self.collect_with_thunks_recursive(*assertion, out);
|
|
||||||
self.collect_with_thunks_recursive(*expr, out);
|
|
||||||
}
|
|
||||||
Ir::Select { expr, default, .. } => {
|
|
||||||
self.collect_with_thunks_recursive(*expr, out);
|
|
||||||
if let Some(d) = default {
|
|
||||||
self.collect_with_thunks_recursive(*d, out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ir::HasAttr { lhs, .. } => self.collect_with_thunks_recursive(*lhs, out),
|
|
||||||
Ir::ConcatStrings { parts, .. } => {
|
|
||||||
for p in parts.iter() {
|
|
||||||
self.collect_with_thunks_recursive(*p, out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ir::Path(p) => self.collect_with_thunks_recursive(*p, out),
|
|
||||||
Ir::List { items } => {
|
|
||||||
for item in items.iter() {
|
|
||||||
self.collect_with_thunks_recursive(*item, out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ir::AttrSet { stcs, dyns } => {
|
|
||||||
for (_, &(val, _)) in stcs.iter() {
|
|
||||||
self.collect_with_thunks_recursive(val, out);
|
|
||||||
}
|
|
||||||
for &(key, val, _) in dyns.iter() {
|
|
||||||
self.collect_with_thunks_recursive(key, out);
|
|
||||||
self.collect_with_thunks_recursive(val, out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push_scope(&mut self, has_arg: bool, arg_id: Option<ArgId>, thunk_ids: &[ThunkId]) {
|
|
||||||
let depth = self.scope_stack.len() as u16;
|
|
||||||
let thunk_base = if has_arg { 1u32 } else { 0u32 };
|
|
||||||
let thunk_map = thunk_ids
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, &id)| (id, thunk_base + i as u32))
|
|
||||||
.collect();
|
|
||||||
self.scope_stack.push(ScopeInfo {
|
|
||||||
depth,
|
|
||||||
arg_id,
|
|
||||||
thunk_map,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pop_scope(&mut self) {
|
|
||||||
self.scope_stack.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_toplevel(&mut self, ir: RawIrRef<'_>) {
|
|
||||||
match ir.deref() {
|
|
||||||
Ir::TopLevel { body, thunks } => {
|
|
||||||
let with_thunk_count = self.count_with_thunks(*body);
|
|
||||||
let total_slots = thunks.len() + with_thunk_count;
|
|
||||||
|
|
||||||
let all_thunks = self.collect_all_thunks(thunks, *body);
|
|
||||||
let thunk_ids: Vec<ThunkId> = all_thunks.iter().map(|&(id, _)| id).collect();
|
|
||||||
|
|
||||||
self.push_scope(false, None, &thunk_ids);
|
|
||||||
|
|
||||||
if total_slots > 0 {
|
|
||||||
self.emit_op(Op::AllocLocals);
|
|
||||||
self.emit_u32(total_slots as u32);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.emit_scope_thunks(thunks);
|
|
||||||
self.emit_expr(*body);
|
|
||||||
self.emit_op(Op::Return);
|
|
||||||
|
|
||||||
self.pop_scope();
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
self.push_scope(false, None, &[]);
|
|
||||||
self.emit_expr(ir);
|
|
||||||
self.emit_op(Op::Return);
|
|
||||||
self.pop_scope();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_toplevel_scoped(&mut self, ir: RawIrRef<'_>) {
|
|
||||||
match ir.deref() {
|
|
||||||
Ir::TopLevel { body, thunks } => {
|
|
||||||
let with_thunk_count = self.count_with_thunks(*body);
|
|
||||||
let total_slots = thunks.len() + with_thunk_count;
|
|
||||||
|
|
||||||
let all_thunks = self.collect_all_thunks(thunks, *body);
|
|
||||||
let thunk_ids: Vec<ThunkId> = all_thunks.iter().map(|&(id, _)| id).collect();
|
|
||||||
|
|
||||||
self.push_scope(false, None, &thunk_ids);
|
|
||||||
|
|
||||||
if total_slots > 0 {
|
|
||||||
self.emit_op(Op::AllocLocals);
|
|
||||||
self.emit_u32(total_slots as u32);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.emit_scope_thunks(thunks);
|
|
||||||
self.emit_expr(*body);
|
|
||||||
self.emit_op(Op::Return);
|
|
||||||
|
|
||||||
self.pop_scope();
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
self.push_scope(false, None, &[]);
|
|
||||||
self.emit_expr(ir);
|
|
||||||
self.emit_op(Op::Return);
|
|
||||||
self.pop_scope();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_scope_thunks(&mut self, thunks: &[(ThunkId, RawIrRef<'_>)]) {
|
|
||||||
for &(id, inner) in thunks {
|
|
||||||
let label = format!("e{}", id.0);
|
|
||||||
let label_idx = self.ctx.intern_string(&label);
|
|
||||||
|
|
||||||
let skip_patch = self.emit_jump_placeholder();
|
|
||||||
let entry_point = self.code.len() as u32;
|
|
||||||
self.emit_expr(inner);
|
|
||||||
self.emit_op(Op::Return);
|
|
||||||
self.patch_jump_target(skip_patch);
|
|
||||||
self.emit_op(Op::MakeThunk);
|
|
||||||
self.emit_u32(entry_point);
|
|
||||||
self.emit_u32(label_idx);
|
|
||||||
let (_, local_idx) = self.resolve_thunk(id);
|
|
||||||
self.emit_op(Op::StoreLocal);
|
|
||||||
self.emit_u32(local_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_expr(&mut self, ir: RawIrRef<'_>) {
|
|
||||||
match ir.deref() {
|
|
||||||
&Ir::Int(x) => {
|
|
||||||
let idx = self.ctx.intern_constant(Constant::Int(x));
|
|
||||||
self.emit_op(Op::PushConst);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
&Ir::Float(x) => {
|
|
||||||
let idx = self.ctx.intern_constant(Constant::Float(x.to_bits()));
|
|
||||||
self.emit_op(Op::PushConst);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
&Ir::Bool(true) => self.emit_op(Op::PushTrue),
|
|
||||||
&Ir::Bool(false) => self.emit_op(Op::PushFalse),
|
|
||||||
Ir::Null => self.emit_op(Op::PushNull),
|
|
||||||
Ir::Str(s) => {
|
|
||||||
let idx = self.ctx.intern_string(s.deref());
|
|
||||||
self.emit_op(Op::PushString);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
&Ir::Path(p) => {
|
|
||||||
self.emit_expr(p);
|
|
||||||
self.emit_op(Op::ResolvePath);
|
|
||||||
}
|
|
||||||
&Ir::If { cond, consq, alter } => {
|
|
||||||
self.emit_expr(cond);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
|
|
||||||
self.emit_op(Op::JumpIfFalse);
|
|
||||||
let else_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jif = self.code.len();
|
|
||||||
|
|
||||||
self.emit_expr(consq);
|
|
||||||
|
|
||||||
self.emit_op(Op::Jump);
|
|
||||||
let end_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jump = self.code.len();
|
|
||||||
|
|
||||||
let else_offset = (after_jump as i32) - (after_jif as i32);
|
|
||||||
self.patch_i32(else_placeholder, else_offset);
|
|
||||||
|
|
||||||
self.emit_expr(alter);
|
|
||||||
|
|
||||||
let end_offset = (self.code.len() as i32) - (after_jump as i32);
|
|
||||||
self.patch_i32(end_placeholder, end_offset);
|
|
||||||
}
|
|
||||||
&Ir::BinOp { lhs, rhs, kind } => {
|
|
||||||
self.emit_binop(lhs, rhs, kind);
|
|
||||||
}
|
|
||||||
&Ir::UnOp { rhs, kind } => match kind {
|
|
||||||
UnOpKind::Neg => {
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(Op::OpNeg);
|
|
||||||
}
|
|
||||||
UnOpKind::Not => {
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(Op::OpNot);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
&Ir::Func {
|
|
||||||
body,
|
|
||||||
ref param,
|
|
||||||
arg,
|
|
||||||
ref thunks,
|
|
||||||
} => {
|
|
||||||
self.emit_func(arg, thunks, param, body);
|
|
||||||
}
|
|
||||||
Ir::AttrSet { stcs, dyns } => {
|
|
||||||
self.emit_attrset(stcs, dyns);
|
|
||||||
}
|
|
||||||
Ir::List { items } => {
|
|
||||||
for &item in items.iter() {
|
|
||||||
self.emit_expr(item);
|
|
||||||
}
|
|
||||||
self.emit_op(Op::MakeList);
|
|
||||||
self.emit_u32(items.len() as u32);
|
|
||||||
}
|
|
||||||
&Ir::Call { func, arg, span } => {
|
|
||||||
self.emit_expr(func);
|
|
||||||
self.emit_expr(arg);
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
self.emit_op(Op::Call);
|
|
||||||
self.emit_u32(span_id);
|
|
||||||
}
|
|
||||||
&Ir::Arg(id) => {
|
|
||||||
let (layer, local) = self.resolve_arg(id);
|
|
||||||
self.emit_load(layer, local);
|
|
||||||
}
|
|
||||||
&Ir::TopLevel { body, ref thunks } => {
|
|
||||||
self.emit_toplevel_inner(body, thunks);
|
|
||||||
}
|
|
||||||
&Ir::Select {
|
|
||||||
expr,
|
|
||||||
ref attrpath,
|
|
||||||
default,
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
self.emit_select(expr, attrpath, default, span);
|
|
||||||
}
|
|
||||||
&Ir::Thunk(id) => {
|
|
||||||
let (layer, local) = self.resolve_thunk(id);
|
|
||||||
self.emit_load(layer, local);
|
|
||||||
}
|
|
||||||
Ir::Builtins => {
|
|
||||||
self.emit_op(Op::LoadBuiltins);
|
|
||||||
}
|
|
||||||
&Ir::Builtin(name) => {
|
|
||||||
let sym = self.ctx.get_sym(name).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&sym);
|
|
||||||
self.emit_op(Op::LoadBuiltin);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
&Ir::ConcatStrings {
|
|
||||||
ref parts,
|
|
||||||
force_string,
|
|
||||||
} => {
|
|
||||||
for &part in parts.iter() {
|
|
||||||
self.emit_expr(part);
|
|
||||||
}
|
|
||||||
self.emit_op(Op::ConcatStrings);
|
|
||||||
self.emit_u16(parts.len() as u16);
|
|
||||||
self.emit_u8(if force_string { 1 } else { 0 });
|
|
||||||
}
|
|
||||||
&Ir::HasAttr { lhs, ref rhs } => {
|
|
||||||
self.emit_has_attr(lhs, rhs);
|
|
||||||
}
|
|
||||||
Ir::Assert {
|
|
||||||
assertion,
|
|
||||||
expr,
|
|
||||||
assertion_raw,
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
let raw_idx = self.ctx.intern_string(assertion_raw);
|
|
||||||
let span_id = self.ctx.register_span(*span);
|
|
||||||
self.emit_expr(*assertion);
|
|
||||||
self.emit_expr(*expr);
|
|
||||||
self.emit_op(Op::Assert);
|
|
||||||
self.emit_u32(raw_idx);
|
|
||||||
self.emit_u32(span_id);
|
|
||||||
}
|
|
||||||
&Ir::CurPos(span) => {
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
self.emit_op(Op::MkPos);
|
|
||||||
self.emit_u32(span_id);
|
|
||||||
}
|
|
||||||
&Ir::ReplBinding(name) => {
|
|
||||||
let sym = self.ctx.get_sym(name).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&sym);
|
|
||||||
self.emit_op(Op::LoadReplBinding);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
&Ir::ScopedImportBinding(name) => {
|
|
||||||
let sym = self.ctx.get_sym(name).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&sym);
|
|
||||||
self.emit_op(Op::LoadScopedBinding);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
&Ir::With {
|
|
||||||
namespace,
|
|
||||||
body,
|
|
||||||
ref thunks,
|
|
||||||
} => {
|
|
||||||
self.emit_with(namespace, body, thunks);
|
|
||||||
}
|
|
||||||
&Ir::WithLookup(name) => {
|
|
||||||
let sym = self.ctx.get_sym(name).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&sym);
|
|
||||||
self.emit_op(Op::WithLookup);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_binop(&mut self, lhs: RawIrRef<'_>, rhs: RawIrRef<'_>, kind: BinOpKind) {
|
|
||||||
use BinOpKind::*;
|
|
||||||
match kind {
|
|
||||||
And => {
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
self.emit_op(Op::JumpIfFalse);
|
|
||||||
let skip_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jif = self.code.len();
|
|
||||||
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
self.emit_op(Op::Jump);
|
|
||||||
let end_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jump = self.code.len();
|
|
||||||
|
|
||||||
let false_offset = (after_jump as i32) - (after_jif as i32);
|
|
||||||
self.patch_i32(skip_placeholder, false_offset);
|
|
||||||
|
|
||||||
self.emit_op(Op::PushFalse);
|
|
||||||
|
|
||||||
let end_offset = (self.code.len() as i32) - (after_jump as i32);
|
|
||||||
self.patch_i32(end_placeholder, end_offset);
|
|
||||||
}
|
|
||||||
Or => {
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
self.emit_op(Op::JumpIfTrue);
|
|
||||||
let skip_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jit = self.code.len();
|
|
||||||
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
self.emit_op(Op::Jump);
|
|
||||||
let end_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jump = self.code.len();
|
|
||||||
|
|
||||||
let true_offset = (after_jump as i32) - (after_jit as i32);
|
|
||||||
self.patch_i32(skip_placeholder, true_offset);
|
|
||||||
|
|
||||||
self.emit_op(Op::PushTrue);
|
|
||||||
|
|
||||||
let end_offset = (self.code.len() as i32) - (after_jump as i32);
|
|
||||||
self.patch_i32(end_placeholder, end_offset);
|
|
||||||
}
|
|
||||||
Impl => {
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
self.emit_op(Op::JumpIfFalse);
|
|
||||||
let skip_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jif = self.code.len();
|
|
||||||
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(Op::ForceBool);
|
|
||||||
self.emit_op(Op::Jump);
|
|
||||||
let end_placeholder = self.emit_i32_placeholder();
|
|
||||||
let after_jump = self.code.len();
|
|
||||||
|
|
||||||
let true_offset = (after_jump as i32) - (after_jif as i32);
|
|
||||||
self.patch_i32(skip_placeholder, true_offset);
|
|
||||||
|
|
||||||
self.emit_op(Op::PushTrue);
|
|
||||||
|
|
||||||
let end_offset = (self.code.len() as i32) - (after_jump as i32);
|
|
||||||
self.patch_i32(end_placeholder, end_offset);
|
|
||||||
}
|
|
||||||
PipeL => {
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
self.emit_op(Op::CallNoSpan);
|
|
||||||
}
|
|
||||||
PipeR => {
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(Op::CallNoSpan);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
self.emit_expr(rhs);
|
|
||||||
self.emit_op(match kind {
|
|
||||||
Add => Op::OpAdd,
|
|
||||||
Sub => Op::OpSub,
|
|
||||||
Mul => Op::OpMul,
|
|
||||||
Div => Op::OpDiv,
|
|
||||||
Eq => Op::OpEq,
|
|
||||||
Neq => Op::OpNeq,
|
|
||||||
Lt => Op::OpLt,
|
|
||||||
Gt => Op::OpGt,
|
|
||||||
Leq => Op::OpLeq,
|
|
||||||
Geq => Op::OpGeq,
|
|
||||||
Con => Op::OpConcat,
|
|
||||||
Upd => Op::OpUpdate,
|
|
||||||
_ => unreachable!(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_func(
|
|
||||||
&mut self,
|
|
||||||
arg: ArgId,
|
|
||||||
thunks: &[(ThunkId, RawIrRef<'_>)],
|
|
||||||
param: &Option<Param<'_>>,
|
|
||||||
body: RawIrRef<'_>,
|
|
||||||
) {
|
|
||||||
let with_thunk_count = self.count_with_thunks(body);
|
|
||||||
let total_slots = thunks.len() + with_thunk_count;
|
|
||||||
|
|
||||||
let all_thunks = self.collect_all_thunks(thunks, body);
|
|
||||||
let thunk_ids: Vec<ThunkId> = all_thunks.iter().map(|&(id, _)| id).collect();
|
|
||||||
|
|
||||||
let skip_patch = self.emit_jump_placeholder();
|
|
||||||
let entry_point = self.code.len() as u32;
|
|
||||||
self.push_scope(true, Some(arg), &thunk_ids);
|
|
||||||
self.emit_scope_thunks(thunks);
|
|
||||||
self.emit_expr(body);
|
|
||||||
self.emit_op(Op::Return);
|
|
||||||
self.pop_scope();
|
|
||||||
self.patch_jump_target(skip_patch);
|
|
||||||
|
|
||||||
if let Some(Param {
|
|
||||||
required,
|
|
||||||
optional,
|
|
||||||
ellipsis,
|
|
||||||
}) = param
|
|
||||||
{
|
|
||||||
self.emit_op(Op::MakePatternClosure);
|
|
||||||
self.emit_u32(entry_point);
|
|
||||||
self.emit_u32(total_slots as u32);
|
|
||||||
self.emit_u16(required.len() as u16);
|
|
||||||
self.emit_u16(optional.len() as u16);
|
|
||||||
self.emit_u8(if *ellipsis { 1 } else { 0 });
|
|
||||||
|
|
||||||
for &(sym, _) in required.iter() {
|
|
||||||
let name = self.ctx.get_sym(sym).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&name);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
for &(sym, _) in optional.iter() {
|
|
||||||
let name = self.ctx.get_sym(sym).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&name);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
for &(sym, span) in required.iter().chain(optional.iter()) {
|
|
||||||
let name = self.ctx.get_sym(sym).to_string();
|
|
||||||
let name_idx = self.ctx.intern_string(&name);
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
self.emit_u32(name_idx);
|
|
||||||
self.emit_u32(span_id);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.emit_op(Op::MakeClosure);
|
|
||||||
self.emit_u32(entry_point);
|
|
||||||
self.emit_u32(total_slots as u32);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_attrset(
|
|
||||||
&mut self,
|
|
||||||
stcs: &crate::ir::HashMap<'_, SymId, (RawIrRef<'_>, TextRange)>,
|
|
||||||
dyns: &[(RawIrRef<'_>, RawIrRef<'_>, TextRange)],
|
|
||||||
) {
|
|
||||||
if stcs.is_empty() && dyns.is_empty() {
|
|
||||||
self.emit_op(Op::MakeEmptyAttrs);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !dyns.is_empty() {
|
|
||||||
for (&sym, &(val, _)) in stcs.iter() {
|
|
||||||
let key = self.ctx.get_sym(sym).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&key);
|
|
||||||
self.emit_op(Op::PushString);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
self.emit_expr(val);
|
|
||||||
}
|
|
||||||
for (_, &(_, span)) in stcs.iter() {
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
let idx = self.ctx.intern_constant(Constant::Int(span_id as i64));
|
|
||||||
self.emit_op(Op::PushConst);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
for &(key, val, span) in dyns.iter() {
|
|
||||||
self.emit_expr(key);
|
|
||||||
self.emit_expr(val);
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
let idx = self.ctx.intern_constant(Constant::Int(span_id as i64));
|
|
||||||
self.emit_op(Op::PushConst);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
self.emit_op(Op::MakeAttrsDyn);
|
|
||||||
self.emit_u32(stcs.len() as u32);
|
|
||||||
self.emit_u32(dyns.len() as u32);
|
|
||||||
} else {
|
|
||||||
for (&sym, &(val, _)) in stcs.iter() {
|
|
||||||
let key = self.ctx.get_sym(sym).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&key);
|
|
||||||
self.emit_op(Op::PushString);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
self.emit_expr(val);
|
|
||||||
}
|
|
||||||
for (_, &(_, span)) in stcs.iter() {
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
let idx = self.ctx.intern_constant(Constant::Int(span_id as i64));
|
|
||||||
self.emit_op(Op::PushConst);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
self.emit_op(Op::MakeAttrs);
|
|
||||||
self.emit_u32(stcs.len() as u32);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_select(
|
|
||||||
&mut self,
|
|
||||||
expr: RawIrRef<'_>,
|
|
||||||
attrpath: &[Attr<RawIrRef<'_>>],
|
|
||||||
default: Option<RawIrRef<'_>>,
|
|
||||||
span: TextRange,
|
|
||||||
) {
|
|
||||||
self.emit_expr(expr);
|
|
||||||
for attr in attrpath.iter() {
|
|
||||||
match attr {
|
|
||||||
Attr::Str(sym, _) => {
|
|
||||||
let key = self.ctx.get_sym(*sym).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&key);
|
|
||||||
self.emit_op(Op::PushString);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
Attr::Dynamic(expr, _) => {
|
|
||||||
self.emit_expr(*expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(default) = default {
|
|
||||||
self.emit_expr(default);
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
self.emit_op(Op::SelectDefault);
|
|
||||||
self.emit_u16(attrpath.len() as u16);
|
|
||||||
self.emit_u32(span_id);
|
|
||||||
} else {
|
|
||||||
let span_id = self.ctx.register_span(span);
|
|
||||||
self.emit_op(Op::Select);
|
|
||||||
self.emit_u16(attrpath.len() as u16);
|
|
||||||
self.emit_u32(span_id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_has_attr(&mut self, lhs: RawIrRef<'_>, rhs: &[Attr<RawIrRef<'_>>]) {
|
|
||||||
self.emit_expr(lhs);
|
|
||||||
for attr in rhs.iter() {
|
|
||||||
match attr {
|
|
||||||
Attr::Str(sym, _) => {
|
|
||||||
let key = self.ctx.get_sym(*sym).to_string();
|
|
||||||
let idx = self.ctx.intern_string(&key);
|
|
||||||
self.emit_op(Op::PushString);
|
|
||||||
self.emit_u32(idx);
|
|
||||||
}
|
|
||||||
Attr::Dynamic(expr, _) => {
|
|
||||||
self.emit_expr(*expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.emit_op(Op::HasAttr);
|
|
||||||
self.emit_u16(rhs.len() as u16);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_with(
|
|
||||||
&mut self,
|
|
||||||
namespace: RawIrRef<'_>,
|
|
||||||
body: RawIrRef<'_>,
|
|
||||||
thunks: &[(ThunkId, RawIrRef<'_>)],
|
|
||||||
) {
|
|
||||||
self.emit_expr(namespace);
|
|
||||||
self.emit_op(Op::PushWith);
|
|
||||||
self.emit_scope_thunks(thunks);
|
|
||||||
self.emit_expr(body);
|
|
||||||
self.emit_op(Op::PopWith);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_toplevel_inner(&mut self, body: RawIrRef<'_>, thunks: &[(ThunkId, RawIrRef<'_>)]) {
|
|
||||||
self.emit_scope_thunks(thunks);
|
|
||||||
self.emit_expr(body);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +1,6 @@
|
|||||||
use std::fmt::{self, Write as _};
|
use std::fmt::{self, Write as _};
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use rnix::TextRange;
|
|
||||||
|
|
||||||
use crate::ir::*;
|
use crate::ir::*;
|
||||||
use crate::value::Symbol;
|
use crate::value::Symbol;
|
||||||
|
|
||||||
@@ -29,23 +26,46 @@ macro_rules! code {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn compile<const SCOPED: bool>(expr: RawIrRef<'_>, ctx: &impl CodegenContext) -> String {
|
pub(crate) fn compile(expr: &Ir, ctx: &impl CodegenContext) -> String {
|
||||||
let mut buf = CodeBuffer::with_capacity(8192);
|
let mut buf = CodeBuffer::with_capacity(8192);
|
||||||
|
|
||||||
code!(
|
code!(&mut buf, ctx; "(()=>{");
|
||||||
&mut buf, ctx;
|
|
||||||
"((" { if SCOPED { "_s" } else { "" } } ")=>{"
|
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
|
||||||
"const _d="
|
code!(&mut buf, ctx; "Nix.DEBUG_THUNKS.enabled=true;");
|
||||||
|
}
|
||||||
|
|
||||||
|
code!(&mut buf, ctx;
|
||||||
|
"const __currentDir="
|
||||||
quoted(&ctx.get_current_dir().display().to_string())
|
quoted(&ctx.get_current_dir().display().to_string())
|
||||||
",_w=null;"
|
";const __with=null;return "
|
||||||
"return " expr
|
expr
|
||||||
"})" { if SCOPED { "" } else { "()" } }
|
"})()");
|
||||||
|
|
||||||
|
buf.into_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn compile_scoped(expr: &Ir, ctx: &impl CodegenContext) -> String {
|
||||||
|
let mut buf = CodeBuffer::with_capacity(8192);
|
||||||
|
|
||||||
|
code!(&mut buf, ctx; "((__scope)=>{");
|
||||||
|
|
||||||
|
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
|
||||||
|
code!(&mut buf, ctx; "Nix.DEBUG_THUNKS.enabled=true;");
|
||||||
|
}
|
||||||
|
|
||||||
|
code!(&mut buf, ctx;
|
||||||
|
"const __currentDir="
|
||||||
|
quoted(&ctx.get_current_dir().display().to_string())
|
||||||
|
";return "
|
||||||
|
expr
|
||||||
|
"})"
|
||||||
);
|
);
|
||||||
|
|
||||||
buf.into_string()
|
buf.into_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CodeBuffer {
|
pub(crate) struct CodeBuffer {
|
||||||
buf: String,
|
buf: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -171,16 +191,29 @@ where
|
|||||||
|
|
||||||
impl<Ctx: CodegenContext> Compile<Ctx> for rnix::TextRange {
|
impl<Ctx: CodegenContext> Compile<Ctx> for rnix::TextRange {
|
||||||
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
code!(buf, "{}", ctx.register_span(*self));
|
code!(
|
||||||
|
buf,
|
||||||
|
"\"{}:{}:{}\"",
|
||||||
|
ctx.get_current_source_id(),
|
||||||
|
usize::from(self.start()),
|
||||||
|
usize::from(self.end())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait CodegenContext {
|
pub(crate) trait CodegenContext {
|
||||||
|
fn get_ir(&self, id: ExprId) -> &Ir;
|
||||||
fn get_sym(&self, id: SymId) -> Symbol<'_>;
|
fn get_sym(&self, id: SymId) -> Symbol<'_>;
|
||||||
fn get_current_dir(&self) -> &Path;
|
fn get_current_dir(&self) -> &Path;
|
||||||
fn get_store_dir(&self) -> &str;
|
fn get_store_dir(&self) -> &str;
|
||||||
fn get_current_source_id(&self) -> usize;
|
fn get_current_source_id(&self) -> usize;
|
||||||
fn register_span(&self, range: rnix::TextRange) -> usize;
|
fn get_current_source(&self) -> crate::error::Source;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Ctx: CodegenContext> Compile<Ctx> for ExprId {
|
||||||
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
|
ctx.get_ir(*self).compile(ctx, buf);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> {
|
impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> {
|
||||||
@@ -189,90 +222,70 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Symbol<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Ctx: CodegenContext> Compile<Ctx> for RawIrRef<'_> {
|
impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
|
||||||
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
match self.deref() {
|
match self {
|
||||||
Ir::Int(int) => {
|
Ir::Int(int) => {
|
||||||
code!(buf, "{}n", int);
|
code!(buf, "{}n", int.inner);
|
||||||
}
|
}
|
||||||
Ir::Float(float) => {
|
Ir::Float(float) => {
|
||||||
code!(buf, "{}", float);
|
code!(buf, "{}", float.inner);
|
||||||
}
|
}
|
||||||
Ir::Bool(bool) => {
|
Ir::Bool(bool) => {
|
||||||
code!(buf, "{}", bool);
|
code!(buf, "{}", bool.inner);
|
||||||
}
|
}
|
||||||
Ir::Null => {
|
Ir::Null(_) => {
|
||||||
code!(buf, ctx; "null");
|
code!(buf, ctx; "null");
|
||||||
}
|
}
|
||||||
Ir::Str(s) => {
|
Ir::Str(s) => {
|
||||||
code!(buf, ctx; quoted(s));
|
code!(buf, ctx; quoted(&s.val));
|
||||||
}
|
}
|
||||||
Ir::Path(p) => {
|
Ir::Path(p) => {
|
||||||
// Nix.resolvePath
|
code!(buf, ctx; "Nix.resolvePath(__currentDir," ctx.get_ir(p.expr) ")");
|
||||||
code!(buf, ctx; "$r(_d," p ")");
|
|
||||||
}
|
}
|
||||||
Ir::If { cond, consq, alter } => {
|
Ir::If(x) => x.compile(ctx, buf),
|
||||||
code!(buf, ctx; "$fb(" cond ")?(" consq "):(" alter ")");
|
Ir::BinOp(x) => x.compile(ctx, buf),
|
||||||
|
Ir::UnOp(x) => x.compile(ctx, buf),
|
||||||
|
Ir::Func(x) => x.compile(ctx, buf),
|
||||||
|
Ir::AttrSet(x) => x.compile(ctx, buf),
|
||||||
|
Ir::List(x) => x.compile(ctx, buf),
|
||||||
|
Ir::Call(x) => x.compile(ctx, buf),
|
||||||
|
Ir::Arg(x) => {
|
||||||
|
code!(buf, "arg{}", x.inner.0);
|
||||||
}
|
}
|
||||||
&Ir::BinOp { lhs, rhs, kind } => compile_binop(lhs, rhs, kind, ctx, buf),
|
Ir::TopLevel(x) => x.compile(ctx, buf),
|
||||||
&Ir::UnOp { rhs, kind } => compile_unop(rhs, kind, ctx, buf),
|
Ir::Select(x) => x.compile(ctx, buf),
|
||||||
&Ir::Func {
|
&Ir::Thunk(Thunk { inner: expr_id, .. }) => {
|
||||||
body,
|
code!(buf, "expr{}", expr_id.0);
|
||||||
ref param,
|
}
|
||||||
arg,
|
Ir::Builtins(_) => {
|
||||||
ref thunks,
|
code!(buf, ctx; "Nix.builtins");
|
||||||
} => compile_func(arg, thunks, param, body, ctx, buf),
|
}
|
||||||
Ir::AttrSet { stcs, dyns } => compile_attrset(stcs, dyns, ctx, buf),
|
&Ir::Builtin(Builtin { inner: name, .. }) => {
|
||||||
Ir::List { items } => compile_list(items, ctx, buf),
|
|
||||||
Ir::Call { func, arg, span } => {
|
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$c("
|
"Nix.builtins.get("
|
||||||
func
|
ctx.get_sym(name)
|
||||||
","
|
|
||||||
arg
|
|
||||||
","
|
|
||||||
span
|
|
||||||
")"
|
")"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ir::Arg(x) => {
|
Ir::ConcatStrings(x) => x.compile(ctx, buf),
|
||||||
code!(buf, "a{}", x.0);
|
Ir::HasAttr(x) => x.compile(ctx, buf),
|
||||||
}
|
&Ir::Assert(Assert {
|
||||||
&Ir::TopLevel { body, ref thunks } => compile_toplevel(body, thunks, ctx, buf),
|
|
||||||
&Ir::Select {
|
|
||||||
expr,
|
|
||||||
ref attrpath,
|
|
||||||
default,
|
|
||||||
span,
|
|
||||||
} => compile_select(expr, attrpath, default, span, ctx, buf),
|
|
||||||
Ir::Thunk(ThunkId(id)) => {
|
|
||||||
code!(buf, "e{}", id);
|
|
||||||
}
|
|
||||||
Ir::Builtins => {
|
|
||||||
// Nix.builtins
|
|
||||||
code!(buf, ctx; "$b");
|
|
||||||
}
|
|
||||||
&Ir::Builtin(name) => {
|
|
||||||
// Nix.builtins
|
|
||||||
code!(buf, ctx; "$b.get(" ctx.get_sym(name) ")");
|
|
||||||
}
|
|
||||||
&Ir::ConcatStrings {
|
|
||||||
ref parts,
|
|
||||||
force_string,
|
|
||||||
} => compile_concat_strings(parts, force_string, ctx, buf),
|
|
||||||
&Ir::HasAttr { lhs, ref rhs } => compile_has_attr(lhs, rhs, ctx, buf),
|
|
||||||
Ir::Assert {
|
|
||||||
assertion,
|
assertion,
|
||||||
expr,
|
expr,
|
||||||
assertion_raw,
|
ref assertion_raw,
|
||||||
span: assert_span,
|
span: assert_span,
|
||||||
} => {
|
}) => {
|
||||||
// Nix.assert
|
let assertion_ir = ctx.get_ir(assertion);
|
||||||
|
let assertion_span = assertion_ir.span();
|
||||||
|
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$a("
|
"Nix.assert(Nix.withContext(\"while evaluating the condition of the assert statement\","
|
||||||
assertion
|
assertion_span
|
||||||
","
|
",()=>("
|
||||||
expr
|
assertion_ir
|
||||||
|
")),"
|
||||||
|
ctx.get_ir(expr)
|
||||||
","
|
","
|
||||||
quoted(assertion_raw)
|
quoted(assertion_raw)
|
||||||
","
|
","
|
||||||
@@ -280,127 +293,162 @@ impl<Ctx: CodegenContext> Compile<Ctx> for RawIrRef<'_> {
|
|||||||
")"
|
")"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ir::CurPos(span) => {
|
Ir::CurPos(cur_pos) => {
|
||||||
// Nix.mkPos
|
code!(buf, ctx;
|
||||||
code!(buf, ctx; "$mp(" span ")");
|
"Nix.mkPos("
|
||||||
|
cur_pos.span
|
||||||
|
")"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
&Ir::ReplBinding(name) => {
|
&Ir::ReplBinding(ReplBinding { inner: name, .. }) => {
|
||||||
// Nix.getReplBinding
|
code!(buf, ctx;
|
||||||
code!(buf, ctx; "$gb(" ctx.get_sym(name) ")");
|
"Nix.getReplBinding("
|
||||||
|
ctx.get_sym(name)
|
||||||
|
")"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
&Ir::ScopedImportBinding(name) => {
|
&Ir::ScopedImportBinding(ScopedImportBinding { inner: name, .. }) => {
|
||||||
code!(buf, ctx; "_s.get(" ctx.get_sym(name) ")");
|
code!(buf, ctx;
|
||||||
|
"__scope.get("
|
||||||
|
ctx.get_sym(name)
|
||||||
|
")"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
&Ir::With {
|
Ir::WithExpr(x) => x.compile(ctx, buf),
|
||||||
namespace,
|
&Ir::WithLookup(WithLookup { inner: name, .. }) => {
|
||||||
body,
|
code!(buf, ctx;
|
||||||
ref thunks,
|
"Nix.lookupWith("
|
||||||
} => compile_with(namespace, body, thunks, ctx, buf),
|
ctx.get_sym(name)
|
||||||
&Ir::WithLookup(name) => {
|
",__with)"
|
||||||
// Nix.lookupWith
|
);
|
||||||
code!(buf, ctx; "$l(" ctx.get_sym(name) ",_w)");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_binop<'ir>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for If {
|
||||||
lhs: RawIrRef<'ir>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
rhs: RawIrRef<'ir>,
|
let &If {
|
||||||
kind: BinOpKind,
|
cond,
|
||||||
ctx: &impl CodegenContext,
|
consq,
|
||||||
buf: &mut CodeBuffer,
|
alter,
|
||||||
) {
|
span: _,
|
||||||
|
} = self;
|
||||||
|
let cond_ir = ctx.get_ir(cond);
|
||||||
|
let cond_span = cond_ir.span();
|
||||||
|
|
||||||
|
code!(buf, ctx;
|
||||||
|
"(Nix.withContext(\"while evaluating a branch condition\"," cond_span ",()=>Nix.forceBool(" cond_ir ")))"
|
||||||
|
"?(" consq "):(" alter ")"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
|
||||||
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
use BinOpKind::*;
|
use BinOpKind::*;
|
||||||
match kind {
|
|
||||||
|
let lhs = ctx.get_ir(self.lhs);
|
||||||
|
let rhs = ctx.get_ir(self.rhs);
|
||||||
|
|
||||||
|
match self.kind {
|
||||||
Add | Sub | Mul | Div | Eq | Neq | Lt | Gt | Leq | Geq | Con | Upd => {
|
Add | Sub | Mul | Div | Eq | Neq | Lt | Gt | Leq | Geq | Con | Upd => {
|
||||||
let op_func = match kind {
|
let op_name = match self.kind {
|
||||||
Add => "$oa",
|
Add => "+",
|
||||||
Sub => "$os",
|
Sub => "-",
|
||||||
Mul => "$om",
|
Mul => "*",
|
||||||
Div => "$od",
|
Div => "/",
|
||||||
Eq => "$oe",
|
Eq => "==",
|
||||||
Neq => "!$oe",
|
Neq => "!=",
|
||||||
Lt => "$ol",
|
Lt => "<",
|
||||||
Gt => "$og",
|
Gt => ">",
|
||||||
Leq => "!$og",
|
Leq => "<=",
|
||||||
Geq => "!$ol",
|
Geq => ">=",
|
||||||
Con => "$oc",
|
Con => "++",
|
||||||
Upd => "$ou",
|
Upd => "//",
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
let op_func = match self.kind {
|
||||||
|
Add => "Nix.op.add",
|
||||||
|
Sub => "Nix.op.sub",
|
||||||
|
Mul => "Nix.op.mul",
|
||||||
|
Div => "Nix.op.div",
|
||||||
|
Eq => "Nix.op.eq",
|
||||||
|
Neq => "Nix.op.neq",
|
||||||
|
Lt => "Nix.op.lt",
|
||||||
|
Gt => "Nix.op.gt",
|
||||||
|
Leq => "Nix.op.lte",
|
||||||
|
Geq => "Nix.op.gte",
|
||||||
|
Con => "Nix.op.concat",
|
||||||
|
Upd => "Nix.op.update",
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
op_func "(" lhs "," rhs ")"
|
"Nix.withContext(\"while evaluating the " op_name " operator\"," self.span ",()=>(" op_func "(" lhs "," rhs ")))"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
And => {
|
And => {
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
"$fb(" lhs ")" "&&" "$fb(" rhs ")"
|
"Nix.withContext(\"while evaluating the && operator\"," self.span ",()=>(Nix.forceBool(" lhs ")&&Nix.forceBool(" rhs ")))"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Or => {
|
Or => {
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
"$fb(" lhs ")" "||" "$fb(" rhs ")"
|
"Nix.withContext(\"while evaluating the || operator\"," self.span ",()=>(Nix.forceBool(" lhs ")||Nix.forceBool(" rhs ")))"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Impl => {
|
Impl => {
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
"!$fb(" lhs ")" "||" "$fb(" rhs ")"
|
"Nix.withContext(\"while evaluating the -> operator\"," self.span ",()=>(!Nix.forceBool(" lhs ")||Nix.forceBool(" rhs ")))"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
PipeL => {
|
PipeL => {
|
||||||
code!(buf, ctx; "$c(" rhs "," lhs ")");
|
code!(buf, ctx; "Nix.call(" rhs "," lhs ")");
|
||||||
}
|
}
|
||||||
PipeR => {
|
PipeR => {
|
||||||
code!(buf, ctx; "$c(" lhs "," rhs ")");
|
code!(buf, ctx; "Nix.call(" lhs "," rhs ")");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_unop(
|
impl<Ctx: CodegenContext> Compile<Ctx> for UnOp {
|
||||||
rhs: RawIrRef<'_>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
kind: UnOpKind,
|
|
||||||
ctx: &impl CodegenContext,
|
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
use UnOpKind::*;
|
use UnOpKind::*;
|
||||||
match kind {
|
let rhs = ctx.get_ir(self.rhs);
|
||||||
|
match self.kind {
|
||||||
Neg => {
|
Neg => {
|
||||||
// 0 - rhs
|
code!(buf, ctx; "Nix.op.sub(0n," rhs ")");
|
||||||
code!(buf, ctx; "$os(0n," rhs ")");
|
|
||||||
}
|
}
|
||||||
Not => {
|
Not => {
|
||||||
code!(buf, ctx; "!$fb(" rhs ")");
|
code!(buf, ctx; "Nix.op.bnot(" ctx.get_ir(self.rhs) ")");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_func<'ir, Ctx: CodegenContext>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for Func {
|
||||||
ArgId(id): ArgId,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
thunks: &[(ThunkId, RawIrRef<'ir>)],
|
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().inner.0;
|
||||||
param: &Option<Param<'ir>>,
|
|
||||||
body: RawIrRef<'ir>,
|
let has_thunks = !self.thunks.is_empty();
|
||||||
ctx: &Ctx,
|
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
let has_thunks = !thunks.is_empty();
|
|
||||||
|
|
||||||
if let Some(Param {
|
if let Some(Param {
|
||||||
required,
|
required,
|
||||||
optional,
|
optional,
|
||||||
ellipsis,
|
ellipsis,
|
||||||
}) = ¶m
|
}) = &self.param
|
||||||
{
|
{
|
||||||
code!(buf, "$mf(a{}=>", id);
|
code!(buf, "Nix.mkFunction(arg{}=>", id);
|
||||||
if has_thunks {
|
if has_thunks {
|
||||||
code!(buf, ctx; "{" thunks "return " body "}");
|
code!(buf, ctx; "{" self.thunks "return " self.body "}");
|
||||||
} else {
|
} else {
|
||||||
code!(buf, ctx; "(" body ")");
|
code!(buf, ctx; "(" self.body ")");
|
||||||
}
|
}
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
",["
|
",["
|
||||||
@@ -411,208 +459,229 @@ fn compile_func<'ir, Ctx: CodegenContext>(
|
|||||||
joined(optional.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
|
joined(optional.iter(), ",", |ctx: &Ctx, buf, &(sym, _)| {
|
||||||
code!(buf, ctx; ctx.get_sym(sym));
|
code!(buf, ctx; ctx.get_sym(sym));
|
||||||
})
|
})
|
||||||
"],new Map(["
|
"],{"
|
||||||
joined(required.iter().chain(optional.iter()), ",", |ctx: &Ctx, buf, &(sym, span)| {
|
joined(required.iter().chain(optional.iter()), ",", |ctx: &Ctx, buf, &(sym, span)| {
|
||||||
code!(buf, ctx; "[" ctx.get_sym(sym) "," span "]");
|
code!(buf, ctx; ctx.get_sym(sym) ":" span);
|
||||||
})
|
})
|
||||||
"]),"
|
"},"
|
||||||
ellipsis
|
ellipsis
|
||||||
")"
|
")"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
code!(buf, "a{}=>", id);
|
code!(buf, "arg{}=>", id);
|
||||||
if has_thunks {
|
if has_thunks {
|
||||||
code!(buf, ctx; "{" thunks "return " body "}");
|
code!(buf, ctx; "{" self.thunks "return " self.body "}");
|
||||||
} else {
|
} else {
|
||||||
code!(buf, ctx; "(" body ")");
|
code!(buf, ctx; "(" self.body ")");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'ir, Ctx: CodegenContext> Compile<Ctx> for [(ThunkId, RawIrRef<'ir>)] {
|
impl<Ctx: CodegenContext> Compile<Ctx> for Call {
|
||||||
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
|
code!(buf, ctx;
|
||||||
|
"Nix.call("
|
||||||
|
ctx.get_ir(self.func)
|
||||||
|
","
|
||||||
|
ctx.get_ir(self.arg)
|
||||||
|
","
|
||||||
|
self.span
|
||||||
|
")"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Ctx: CodegenContext> Compile<Ctx> for [(ExprId, ExprId)] {
|
||||||
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
if self.is_empty() {
|
if self.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for &(slot, inner) in self {
|
||||||
|
let inner_ir = ctx.get_ir(inner);
|
||||||
|
let inner_span = inner_ir.span();
|
||||||
|
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
"const "
|
"let expr" slot.0 "=Nix.createThunk(()=>(" inner_ir "),"
|
||||||
joined(self.iter(), ",", |ctx: &Ctx, buf, &(slot, inner)| {
|
"\"expr" slot.0 " "
|
||||||
code!(buf, ctx; "e" slot.0 "=$t(()=>(" inner ")," "'e" slot.0 "')");
|
ctx.get_current_source().get_name() ":"
|
||||||
})
|
usize::from(inner_span.start()) ":"
|
||||||
";"
|
usize::from(inner_span.end())
|
||||||
|
"\");"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn compile_toplevel<'ir, Ctx: CodegenContext>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for TopLevel {
|
||||||
body: RawIrRef<'ir>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
thunks: &[(ThunkId, RawIrRef<'ir>)],
|
if self.thunks.is_empty() {
|
||||||
ctx: &Ctx,
|
ctx.get_ir(self.body).compile(ctx, buf);
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
if thunks.is_empty() {
|
|
||||||
body.compile(ctx, buf);
|
|
||||||
} else {
|
} else {
|
||||||
code!(buf, ctx; "(()=>{" thunks "return " body "})()");
|
let body = ctx.get_ir(self.body);
|
||||||
|
code!(buf, ctx; "(()=>{" self.thunks "return " body "})()");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_with<'ir>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for WithExpr {
|
||||||
namespace: RawIrRef<'ir>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
body: RawIrRef<'ir>,
|
let namespace = ctx.get_ir(self.namespace);
|
||||||
thunks: &[(ThunkId, RawIrRef<'ir>)],
|
let body = ctx.get_ir(self.body);
|
||||||
ctx: &impl CodegenContext,
|
let has_thunks = !self.thunks.is_empty();
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
let has_thunks = !thunks.is_empty();
|
|
||||||
if has_thunks {
|
if has_thunks {
|
||||||
code!(buf, ctx; "((_w)=>{" thunks "return " body "})({env:" namespace ",last:_w})");
|
code!(buf, ctx; "((__with)=>{" self.thunks "return " body "})({env:" namespace ",last:__with})");
|
||||||
} else {
|
} else {
|
||||||
code!(buf, ctx; "((_w)=>(" body "))({env:" namespace ",last:_w})");
|
code!(buf, ctx; "((__with)=>(" body "))({env:" namespace ",last:__with})");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_select<'ir, Ctx: CodegenContext>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for Select {
|
||||||
expr: RawIrRef<'ir>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
attrpath: &[Attr<RawIrRef<'ir>>],
|
if let Some(default) = self.default {
|
||||||
default: Option<RawIrRef<'ir>>,
|
|
||||||
span: TextRange,
|
|
||||||
ctx: &Ctx,
|
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
if let Some(default) = default {
|
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$sd("
|
"Nix.selectWithDefault("
|
||||||
expr
|
ctx.get_ir(self.expr)
|
||||||
",["
|
",["
|
||||||
joined(attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
|
joined(self.attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
|
||||||
match attr {
|
match attr {
|
||||||
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
|
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
|
||||||
Attr::Dynamic(expr_id, _) => code!(buf, ctx; *expr_id),
|
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
"],"
|
"],"
|
||||||
default
|
ctx.get_ir(default)
|
||||||
","
|
","
|
||||||
span
|
self.span
|
||||||
")"
|
")"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$s("
|
"Nix.select("
|
||||||
expr
|
ctx.get_ir(self.expr)
|
||||||
",["
|
",["
|
||||||
joined(attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
|
joined(self.attrpath.iter(), ",", |ctx: &Ctx, buf, attr| {
|
||||||
match attr {
|
match attr {
|
||||||
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
|
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
|
||||||
Attr::Dynamic(expr, _) => code!(buf, ctx; expr),
|
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
"],"
|
"],"
|
||||||
span
|
self.span
|
||||||
")"
|
")"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn compile_attrset<'ir, Ctx: CodegenContext>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
|
||||||
stcs: &HashMap<'ir, SymId, (RawIrRef<'ir>, TextRange)>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
dyns: &[(RawIrRef<'ir>, RawIrRef<'ir>, TextRange)],
|
if !self.dyns.is_empty() {
|
||||||
ctx: &Ctx,
|
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
if !dyns.is_empty() {
|
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$ma(new Map(["
|
"Nix.mkAttrsWithPos({"
|
||||||
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(val, _))| {
|
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(expr, _))| {
|
||||||
let key = ctx.get_sym(sym);
|
let key = ctx.get_sym(sym);
|
||||||
|
let val = ctx.get_ir(expr);
|
||||||
|
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
"[" key "," val "]"
|
key ":Nix.withContext(\"while evaluating the attribute '" escaped(&key) "'\"," val.span() ",()=>(" val "))"
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
"]),new Map(["
|
"},{"
|
||||||
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
|
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
|
||||||
code!(buf, ctx; "[" ctx.get_sym(sym) "," span "]");
|
code!(buf, ctx; ctx.get_sym(sym) ":" span);
|
||||||
})
|
})
|
||||||
"]),{dynKeys:["
|
"},{dynKeys:["
|
||||||
joined(dyns.iter(), ",", |ctx: &Ctx, buf, (key, _, _)| {
|
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (key, _, _)| {
|
||||||
code!(buf, ctx; key);
|
code!(buf, ctx; ctx.get_ir(*key));
|
||||||
})
|
})
|
||||||
"],dynVals:["
|
"],dynVals:["
|
||||||
joined(dyns.iter(), ",", |ctx: &Ctx, buf, (_, val, _)| {
|
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (_, val, _)| {
|
||||||
code!(buf, ctx; val);
|
let val = ctx.get_ir(*val);
|
||||||
|
code!(
|
||||||
|
buf, ctx;
|
||||||
|
"Nix.withContext(\"while evaluating a dynamic attribute\"," val.span() ",()=>(" val "))"
|
||||||
|
);
|
||||||
})
|
})
|
||||||
"],dynSpans:["
|
"],dynSpans:["
|
||||||
joined(dyns.iter(), ",", |ctx: &Ctx, buf, (_, _, attr_span)| {
|
joined(self.dyns.iter(), ",", |ctx: &Ctx, buf, (_, _, attr_span)| {
|
||||||
code!(buf, ctx; attr_span);
|
code!(buf, ctx; attr_span);
|
||||||
})
|
})
|
||||||
"]})"
|
"]})"
|
||||||
);
|
);
|
||||||
} else if !stcs.is_empty() {
|
} else if !self.stcs.is_empty() {
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$ma(new Map(["
|
"Nix.mkAttrsWithPos({"
|
||||||
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(val, _))| {
|
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(expr, _))| {
|
||||||
let key = ctx.get_sym(sym);
|
let key = ctx.get_sym(sym);
|
||||||
|
let val = ctx.get_ir(expr);
|
||||||
|
|
||||||
code!(
|
code!(
|
||||||
buf, ctx;
|
buf, ctx;
|
||||||
"[" key "," val "]"
|
key ":Nix.withContext(\"while evaluating the attribute '" escaped(&key) "'\"," val.span() ",()=>(" val "))"
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
"]),new Map(["
|
"},{"
|
||||||
joined(stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
|
joined(self.stcs.iter(), ",", |ctx: &Ctx, buf, (&sym, &(_, span))| {
|
||||||
code!(buf, ctx; "[" ctx.get_sym(sym) "," span "]");
|
code!(buf, ctx; ctx.get_sym(sym) ":" span);
|
||||||
})
|
})
|
||||||
"]))"
|
"})"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
code!(buf, ctx; "$e");
|
code!(buf, ctx; "new Map()");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_list<Ctx: CodegenContext>(items: &[RawIrRef<'_>], ctx: &Ctx, buf: &mut CodeBuffer) {
|
impl<Ctx: CodegenContext> Compile<Ctx> for List {
|
||||||
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"["
|
"["
|
||||||
joined(items.iter(), ",", |ctx: &Ctx, buf, item| {
|
joined(self.items.iter().enumerate(), ",", |ctx: &Ctx, buf, (idx, item)| {
|
||||||
code!(buf, ctx; item);
|
let item = ctx.get_ir(*item);
|
||||||
|
code!(
|
||||||
|
buf, ctx;
|
||||||
|
"Nix.withContext(\"while evaluating list element " idx "\"," item.span() ",()=>(" item "))"
|
||||||
|
);
|
||||||
})
|
})
|
||||||
"]"
|
"]"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_concat_strings<Ctx: CodegenContext>(
|
|
||||||
parts: &[RawIrRef<'_>],
|
|
||||||
force_string: bool,
|
|
||||||
ctx: &Ctx,
|
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
code!(buf, ctx;
|
|
||||||
"$cs(["
|
|
||||||
joined(parts.iter(), ",", |ctx: &Ctx, buf, part| {
|
|
||||||
code!(buf, ctx; part);
|
|
||||||
})
|
|
||||||
"]," force_string ")"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_has_attr<'ir, Ctx: CodegenContext>(
|
impl<Ctx: CodegenContext> Compile<Ctx> for ConcatStrings {
|
||||||
lhs: RawIrRef<'ir>,
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
rhs: &[Attr<RawIrRef<'ir>>],
|
|
||||||
ctx: &Ctx,
|
|
||||||
buf: &mut CodeBuffer,
|
|
||||||
) {
|
|
||||||
code!(buf, ctx;
|
code!(buf, ctx;
|
||||||
"$h("
|
"Nix.concatStringsWithContext(["
|
||||||
lhs
|
joined(self.parts.iter(), ",", |ctx: &Ctx, buf, part| {
|
||||||
|
let part = ctx.get_ir(*part);
|
||||||
|
code!(
|
||||||
|
buf, ctx;
|
||||||
|
"Nix.withContext(\"while evaluating a path segment\"," part.span() ",()=>(" part "))"
|
||||||
|
);
|
||||||
|
})
|
||||||
|
"]," self.force_string ")"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Ctx: CodegenContext> Compile<Ctx> for HasAttr {
|
||||||
|
fn compile(&self, ctx: &Ctx, buf: &mut CodeBuffer) {
|
||||||
|
code!(buf, ctx;
|
||||||
|
"Nix.hasAttr("
|
||||||
|
ctx.get_ir(self.lhs)
|
||||||
",["
|
",["
|
||||||
joined(rhs.iter(), ",", |ctx: &Ctx, buf, attr| {
|
joined(self.rhs.iter(), ",", |ctx: &Ctx, buf, attr| {
|
||||||
match attr {
|
match attr {
|
||||||
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
|
Attr::Str(sym, _) => code!(buf, ctx; ctx.get_sym(*sym)),
|
||||||
Attr::Dynamic(expr, _) => code!(buf, ctx; expr),
|
Attr::Dynamic(expr_id, _) => code!(buf, ctx; ctx.get_ir(*expr_id)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
"])"
|
"])"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,22 +1,20 @@
|
|||||||
use std::cell::UnsafeCell;
|
|
||||||
use std::hash::BuildHasher;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::ptr::NonNull;
|
||||||
|
|
||||||
use bumpalo::Bump;
|
use hashbrown::{HashMap, HashSet};
|
||||||
use ghost_cell::{GhostCell, GhostToken};
|
|
||||||
use hashbrown::{DefaultHashBuilder, HashMap, HashSet, HashTable};
|
|
||||||
use rnix::TextRange;
|
use rnix::TextRange;
|
||||||
use string_interner::DefaultStringInterner;
|
use string_interner::DefaultStringInterner;
|
||||||
|
|
||||||
use crate::bytecode::{self, Bytecode, BytecodeContext, Constant};
|
use crate::codegen::{CodegenContext, compile, compile_scoped};
|
||||||
use crate::codegen::{CodegenContext, compile};
|
|
||||||
use crate::disassembler::{Disassembler, DisassemblerContext};
|
|
||||||
use crate::downgrade::*;
|
use crate::downgrade::*;
|
||||||
use crate::error::{Error, Result, Source};
|
use crate::error::{Error, Result, Source};
|
||||||
use crate::ir::{ArgId, Ir, IrKey, IrRef, RawIrRef, SymId, ThunkId, ir_content_eq};
|
use crate::ir::{
|
||||||
|
Arg, ArgId, Bool, Builtin, ExprId, Ir, Null, ReplBinding, ScopedImportBinding, SymId, Thunk,
|
||||||
|
ToIr as _, WithLookup,
|
||||||
|
};
|
||||||
#[cfg(feature = "inspector")]
|
#[cfg(feature = "inspector")]
|
||||||
use crate::runtime::inspector::InspectorServer;
|
use crate::runtime::inspector::InspectorServer;
|
||||||
use crate::runtime::{ForceMode, Runtime, RuntimeContext};
|
use crate::runtime::{Runtime, RuntimeContext};
|
||||||
use crate::store::{DaemonStore, Store, StoreConfig};
|
use crate::store::{DaemonStore, Store, StoreConfig};
|
||||||
use crate::value::{Symbol, Value};
|
use crate::value::{Symbol, Value};
|
||||||
|
|
||||||
@@ -55,16 +53,16 @@ pub struct Context {
|
|||||||
_inspector_server: Option<InspectorServer>,
|
_inspector_server: Option<InspectorServer>,
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! eval_bc {
|
macro_rules! eval {
|
||||||
($name:ident, $mode:expr) => {
|
($name:ident, $wrapper:literal) => {
|
||||||
pub fn $name(&mut self, source: Source) -> Result<Value> {
|
pub fn $name(&mut self, source: Source) -> Result<Value> {
|
||||||
tracing::info!("Starting evaluation");
|
tracing::info!("Starting evaluation");
|
||||||
|
|
||||||
tracing::debug!("Compiling bytecode");
|
tracing::debug!("Compiling code");
|
||||||
let bytecode = self.ctx.compile_bytecode(source)?;
|
let code = self.compile(source)?;
|
||||||
|
|
||||||
tracing::debug!("Executing bytecode");
|
tracing::debug!("Executing JavaScript");
|
||||||
self.runtime.eval_bytecode(bytecode, &mut self.ctx, $mode)
|
self.runtime.eval(format!($wrapper, code), &mut self.ctx)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -125,23 +123,19 @@ impl Context {
|
|||||||
let code = self.ctx.compile(source, None)?;
|
let code = self.ctx.compile(source, None)?;
|
||||||
self.runtime.eval(
|
self.runtime.eval(
|
||||||
format!(
|
format!(
|
||||||
"Nix.builtins.set('derivation',({}));Nix.builtins.set('storeDir','{}');{}0n",
|
"Nix.builtins.set('derivation',({}));Nix.builtins.set('storeDir','{}')",
|
||||||
code,
|
code,
|
||||||
self.get_store_dir(),
|
self.get_store_dir()
|
||||||
if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
|
|
||||||
"Nix.DEBUG_THUNKS.enabled=true;"
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
),
|
),
|
||||||
&mut self.ctx,
|
&mut self.ctx,
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
eval_bc!(eval, ForceMode::Force);
|
eval!(eval, "Nix.force({})");
|
||||||
eval_bc!(eval_shallow, ForceMode::ForceShallow);
|
eval!(eval_shallow, "Nix.forceShallow({})");
|
||||||
eval_bc!(eval_deep, ForceMode::ForceDeep);
|
eval!(eval_deep, "Nix.forceDeep({})");
|
||||||
|
|
||||||
pub fn eval_repl<'a>(&'a mut self, source: Source, scope: &'a HashSet<SymId>) -> Result<Value> {
|
pub fn eval_repl<'a>(&'a mut self, source: Source, scope: &'a HashSet<SymId>) -> Result<Value> {
|
||||||
tracing::info!("Starting evaluation");
|
tracing::info!("Starting evaluation");
|
||||||
|
|
||||||
@@ -157,18 +151,6 @@ impl Context {
|
|||||||
self.ctx.compile(source, None)
|
self.ctx.compile(source, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode> {
|
|
||||||
self.ctx.compile_bytecode(source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn disassemble(&self, bytecode: &Bytecode) -> String {
|
|
||||||
Disassembler::new(bytecode, &self.ctx).disassemble()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn disassemble_colored(&self, bytecode: &Bytecode) -> String {
|
|
||||||
Disassembler::new(bytecode, &self.ctx).disassemble_colored()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_store_dir(&self) -> &str {
|
pub fn get_store_dir(&self) -> &str {
|
||||||
self.ctx.get_store_dir()
|
self.ctx.get_store_dir()
|
||||||
}
|
}
|
||||||
@@ -194,44 +176,32 @@ impl Context {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Ctx {
|
pub(crate) struct Ctx {
|
||||||
|
irs: Vec<Ir>,
|
||||||
symbols: DefaultStringInterner,
|
symbols: DefaultStringInterner,
|
||||||
global: HashMap<SymId, Ir<'static, RawIrRef<'static>>>,
|
global: NonNull<HashMap<SymId, ExprId>>,
|
||||||
sources: Vec<Source>,
|
sources: Vec<Source>,
|
||||||
store: DaemonStore,
|
store: DaemonStore,
|
||||||
spans: UnsafeCell<Vec<(usize, TextRange)>>,
|
|
||||||
thunk_count: usize,
|
|
||||||
global_strings: Vec<String>,
|
|
||||||
global_string_map: HashMap<String, u32>,
|
|
||||||
global_constants: Vec<Constant>,
|
|
||||||
global_constant_map: HashMap<Constant, u32>,
|
|
||||||
synced_strings: usize,
|
|
||||||
synced_constants: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Owns the bump allocator and a read-only reference into it.
|
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
/// The `ir` field points into `_bump`'s storage. We use `'static` as a sentinel
|
|
||||||
/// lifetime because the struct owns the backing memory. The `as_ref` method
|
|
||||||
/// re-binds the lifetime to `&self`, preventing use-after-free.
|
|
||||||
struct OwnedIr {
|
|
||||||
_bump: Bump,
|
|
||||||
ir: RawIrRef<'static>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OwnedIr {
|
|
||||||
fn as_ref(&self) -> RawIrRef<'_> {
|
|
||||||
self.ir
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Ctx {
|
impl Ctx {
|
||||||
fn new() -> Result<Self> {
|
fn new() -> Result<Self> {
|
||||||
|
use crate::ir::{Builtins, ToIr as _};
|
||||||
|
|
||||||
let mut symbols = DefaultStringInterner::new();
|
let mut symbols = DefaultStringInterner::new();
|
||||||
|
let mut irs = Vec::new();
|
||||||
let mut global = HashMap::new();
|
let mut global = HashMap::new();
|
||||||
|
|
||||||
|
irs.push(
|
||||||
|
Builtins {
|
||||||
|
span: rnix::TextRange::default(),
|
||||||
|
}
|
||||||
|
.to_ir(),
|
||||||
|
);
|
||||||
|
let builtins_expr = ExprId(0);
|
||||||
|
|
||||||
let builtins_sym = symbols.get_or_intern("builtins");
|
let builtins_sym = symbols.get_or_intern("builtins");
|
||||||
global.insert(builtins_sym, Ir::Builtins);
|
global.insert(builtins_sym, builtins_expr);
|
||||||
|
|
||||||
let free_globals = [
|
let free_globals = [
|
||||||
"abort",
|
"abort",
|
||||||
@@ -255,19 +225,48 @@ impl Ctx {
|
|||||||
"toString",
|
"toString",
|
||||||
];
|
];
|
||||||
let consts = [
|
let consts = [
|
||||||
("true", Ir::Bool(true)),
|
(
|
||||||
("false", Ir::Bool(false)),
|
"true",
|
||||||
("null", Ir::Null),
|
Bool {
|
||||||
|
inner: true,
|
||||||
|
span: rnix::TextRange::default(),
|
||||||
|
}
|
||||||
|
.to_ir(),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"false",
|
||||||
|
Bool {
|
||||||
|
inner: false,
|
||||||
|
span: rnix::TextRange::default(),
|
||||||
|
}
|
||||||
|
.to_ir(),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"null",
|
||||||
|
Null {
|
||||||
|
span: rnix::TextRange::default(),
|
||||||
|
}
|
||||||
|
.to_ir(),
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
for name in free_globals {
|
for name in free_globals {
|
||||||
let name = symbols.get_or_intern(name);
|
let name_sym = symbols.get_or_intern(name);
|
||||||
let value = Ir::Builtin(name);
|
let id = ExprId(irs.len());
|
||||||
global.insert(name, value);
|
irs.push(
|
||||||
|
Builtin {
|
||||||
|
inner: name_sym,
|
||||||
|
span: rnix::TextRange::default(),
|
||||||
|
}
|
||||||
|
.to_ir(),
|
||||||
|
);
|
||||||
|
global.insert(name_sym, id);
|
||||||
}
|
}
|
||||||
for (name, value) in consts {
|
for (name, value) in consts {
|
||||||
let name = symbols.get_or_intern(name);
|
let name_sym = symbols.get_or_intern(name);
|
||||||
global.insert(name, value);
|
let id = ExprId(irs.len());
|
||||||
|
irs.push(value);
|
||||||
|
global.insert(name_sym, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = StoreConfig::from_env();
|
let config = StoreConfig::from_env();
|
||||||
@@ -275,39 +274,19 @@ impl Ctx {
|
|||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
symbols,
|
symbols,
|
||||||
global,
|
irs,
|
||||||
|
global: unsafe { NonNull::new_unchecked(Box::leak(Box::new(global))) },
|
||||||
sources: Vec::new(),
|
sources: Vec::new(),
|
||||||
store,
|
store,
|
||||||
spans: UnsafeCell::new(Vec::new()),
|
|
||||||
thunk_count: 0,
|
|
||||||
global_strings: Vec::new(),
|
|
||||||
global_string_map: HashMap::new(),
|
|
||||||
global_constants: Vec::new(),
|
|
||||||
global_constant_map: HashMap::new(),
|
|
||||||
synced_strings: 0,
|
|
||||||
synced_constants: 0,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn downgrade_ctx<'ctx, 'id, 'ir>(
|
fn downgrade_ctx<'a>(&'a mut self, extra_scope: Option<Scope<'a>>) -> DowngradeCtx<'a> {
|
||||||
&'ctx mut self,
|
let global_ref = unsafe { self.global.as_ref() };
|
||||||
bump: &'ir Bump,
|
DowngradeCtx::new(self, global_ref, extra_scope)
|
||||||
token: GhostToken<'id>,
|
|
||||||
extra_scope: Option<Scope<'ctx>>,
|
|
||||||
) -> DowngradeCtx<'ctx, 'id, 'ir> {
|
|
||||||
let source = self.get_current_source();
|
|
||||||
DowngradeCtx::new(
|
|
||||||
bump,
|
|
||||||
token,
|
|
||||||
&mut self.symbols,
|
|
||||||
&self.global,
|
|
||||||
extra_scope,
|
|
||||||
&mut self.thunk_count,
|
|
||||||
source,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_dir(&self) -> &Path {
|
pub(crate) fn get_current_dir(&self) -> &Path {
|
||||||
self.sources
|
self.sources
|
||||||
.last()
|
.last()
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@@ -315,18 +294,18 @@ impl Ctx {
|
|||||||
.get_dir()
|
.get_dir()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_current_source(&self) -> Source {
|
pub(crate) fn get_current_source(&self) -> Source {
|
||||||
self.sources
|
self.sources
|
||||||
.last()
|
.last()
|
||||||
.expect("current_source is not set")
|
.expect("current_source is not set")
|
||||||
.clone()
|
.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn downgrade<'ctx>(
|
pub(crate) fn get_source(&self, id: usize) -> Source {
|
||||||
&'ctx mut self,
|
self.sources.get(id).expect("source not found").clone()
|
||||||
source: Source,
|
}
|
||||||
extra_scope: Option<Scope<'ctx>>,
|
|
||||||
) -> Result<OwnedIr> {
|
fn downgrade<'a>(&mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<ExprId> {
|
||||||
tracing::debug!("Parsing Nix expression");
|
tracing::debug!("Parsing Nix expression");
|
||||||
|
|
||||||
self.sources.push(source.clone());
|
self.sources.push(source.clone());
|
||||||
@@ -339,29 +318,18 @@ impl Ctx {
|
|||||||
.tree()
|
.tree()
|
||||||
.expr()
|
.expr()
|
||||||
.ok_or_else(|| Error::parse_error("unexpected EOF".into()))?;
|
.ok_or_else(|| Error::parse_error("unexpected EOF".into()))?;
|
||||||
let bump = Bump::new();
|
self.downgrade_ctx(extra_scope).downgrade(expr)
|
||||||
GhostToken::new(|token| {
|
|
||||||
let ir = self
|
|
||||||
.downgrade_ctx(&bump, token, extra_scope)
|
|
||||||
.downgrade_toplevel(expr)?;
|
|
||||||
let ir = unsafe { std::mem::transmute::<RawIrRef<'_>, RawIrRef<'static>>(ir) };
|
|
||||||
Ok(OwnedIr { _bump: bump, ir })
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile<'ctx>(
|
fn compile<'a>(&'a mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<String> {
|
||||||
&'ctx mut self,
|
|
||||||
source: Source,
|
|
||||||
extra_scope: Option<Scope<'ctx>>,
|
|
||||||
) -> Result<String> {
|
|
||||||
let root = self.downgrade(source, extra_scope)?;
|
let root = self.downgrade(source, extra_scope)?;
|
||||||
tracing::debug!("Generating JavaScript code");
|
tracing::debug!("Generating JavaScript code");
|
||||||
let code = compile::<false>(root.as_ref(), self);
|
let code = compile(self.get_ir(root), self);
|
||||||
tracing::debug!("Generated code: {}", &code);
|
tracing::debug!("Generated code: {}", &code);
|
||||||
Ok(code)
|
Ok(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
|
pub(crate) fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
|
||||||
let scope = Scope::ScopedImport(
|
let scope = Scope::ScopedImport(
|
||||||
scope
|
scope
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -370,33 +338,16 @@ impl Ctx {
|
|||||||
);
|
);
|
||||||
let root = self.downgrade(source, Some(scope))?;
|
let root = self.downgrade(source, Some(scope))?;
|
||||||
tracing::debug!("Generating JavaScript code for scoped import");
|
tracing::debug!("Generating JavaScript code for scoped import");
|
||||||
let code = compile::<true>(root.as_ref(), self);
|
let code = compile_scoped(self.get_ir(root), self);
|
||||||
tracing::debug!("Generated scoped code: {}", &code);
|
tracing::debug!("Generated scoped code: {}", &code);
|
||||||
Ok(code)
|
Ok(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode> {
|
|
||||||
let root = self.downgrade(source, None)?;
|
|
||||||
tracing::debug!("Generating bytecode");
|
|
||||||
let bytecode = bytecode::compile_bytecode(root.as_ref(), self);
|
|
||||||
tracing::debug!("Compiled bytecode: {:#04X?}", bytecode.code);
|
|
||||||
Ok(bytecode)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compile_bytecode_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<Bytecode> {
|
|
||||||
let scope = Scope::ScopedImport(
|
|
||||||
scope
|
|
||||||
.into_iter()
|
|
||||||
.map(|k| self.symbols.get_or_intern(k))
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
let root = self.downgrade(source, Some(scope))?;
|
|
||||||
tracing::debug!("Generating bytecode for scoped import");
|
|
||||||
Ok(bytecode::compile_bytecode_scoped(root.as_ref(), self))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CodegenContext for Ctx {
|
impl CodegenContext for Ctx {
|
||||||
|
fn get_ir(&self, id: ExprId) -> &Ir {
|
||||||
|
self.irs.get(id.0).expect("ExprId out of bounds")
|
||||||
|
}
|
||||||
fn get_sym(&self, id: SymId) -> Symbol<'_> {
|
fn get_sym(&self, id: SymId) -> Symbol<'_> {
|
||||||
self.symbols
|
self.symbols
|
||||||
.resolve(id)
|
.resolve(id)
|
||||||
@@ -412,49 +363,12 @@ impl CodegenContext for Ctx {
|
|||||||
.checked_sub(1)
|
.checked_sub(1)
|
||||||
.expect("current_source not set")
|
.expect("current_source not set")
|
||||||
}
|
}
|
||||||
|
fn get_current_source(&self) -> crate::error::Source {
|
||||||
|
self.sources.last().expect("current_source not set").clone()
|
||||||
|
}
|
||||||
fn get_store_dir(&self) -> &str {
|
fn get_store_dir(&self) -> &str {
|
||||||
self.store.get_store_dir()
|
self.store.get_store_dir()
|
||||||
}
|
}
|
||||||
fn register_span(&self, range: rnix::TextRange) -> usize {
|
|
||||||
let spans = unsafe { &mut *self.spans.get() };
|
|
||||||
let id = spans.len();
|
|
||||||
spans.push((self.get_current_source_id(), range));
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BytecodeContext for Ctx {
|
|
||||||
fn intern_string(&mut self, s: &str) -> u32 {
|
|
||||||
if let Some(&idx) = self.global_string_map.get(s) {
|
|
||||||
return idx;
|
|
||||||
}
|
|
||||||
let idx = self.global_strings.len() as u32;
|
|
||||||
self.global_strings.push(s.to_string());
|
|
||||||
self.global_string_map.insert(s.to_string(), idx);
|
|
||||||
idx
|
|
||||||
}
|
|
||||||
|
|
||||||
fn intern_constant(&mut self, c: Constant) -> u32 {
|
|
||||||
if let Some(&idx) = self.global_constant_map.get(&c) {
|
|
||||||
return idx;
|
|
||||||
}
|
|
||||||
let idx = self.global_constants.len() as u32;
|
|
||||||
self.global_constants.push(c.clone());
|
|
||||||
self.global_constant_map.insert(c, idx);
|
|
||||||
idx
|
|
||||||
}
|
|
||||||
|
|
||||||
fn register_span(&self, range: TextRange) -> u32 {
|
|
||||||
CodegenContext::register_span(self, range) as u32
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_sym(&self, id: SymId) -> &str {
|
|
||||||
self.symbols.resolve(id).expect("SymId out of bounds")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_current_dir(&self) -> &Path {
|
|
||||||
Ctx::get_current_dir(self)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RuntimeContext for Ctx {
|
impl RuntimeContext for Ctx {
|
||||||
@@ -470,239 +384,156 @@ impl RuntimeContext for Ctx {
|
|||||||
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
|
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
|
||||||
self.compile_scoped(source, scope)
|
self.compile_scoped(source, scope)
|
||||||
}
|
}
|
||||||
fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode> {
|
|
||||||
self.compile_bytecode(source)
|
|
||||||
}
|
|
||||||
fn compile_bytecode_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<Bytecode> {
|
|
||||||
self.compile_bytecode_scoped(source, scope)
|
|
||||||
}
|
|
||||||
fn get_source(&self, id: usize) -> Source {
|
fn get_source(&self, id: usize) -> Source {
|
||||||
self.sources.get(id).expect("source not found").clone()
|
self.get_source(id)
|
||||||
}
|
}
|
||||||
fn get_store(&self) -> &DaemonStore {
|
fn get_store(&self) -> &DaemonStore {
|
||||||
&self.store
|
&self.store
|
||||||
}
|
}
|
||||||
fn get_span(&self, id: usize) -> (usize, TextRange) {
|
|
||||||
let spans = unsafe { &*self.spans.get() };
|
|
||||||
spans[id]
|
|
||||||
}
|
|
||||||
fn get_unsynced(&mut self) -> (&[String], &[Constant], usize, usize) {
|
|
||||||
let strings_base = self.synced_strings;
|
|
||||||
let constants_base = self.synced_constants;
|
|
||||||
let new_strings = &self.global_strings[strings_base..];
|
|
||||||
let new_constants = &self.global_constants[constants_base..];
|
|
||||||
self.synced_strings = self.global_strings.len();
|
|
||||||
self.synced_constants = self.global_constants.len();
|
|
||||||
(new_strings, new_constants, strings_base, constants_base)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DisassemblerContext for Ctx {
|
|
||||||
fn lookup_string(&self, id: u32) -> &str {
|
|
||||||
self.global_strings
|
|
||||||
.get(id as usize)
|
|
||||||
.expect("string not found")
|
|
||||||
}
|
|
||||||
fn lookup_constant(&self, id: u32) -> &Constant {
|
|
||||||
self.global_constants
|
|
||||||
.get(id as usize)
|
|
||||||
.expect("constant not found")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Scope<'ctx> {
|
enum Scope<'ctx> {
|
||||||
Global(&'ctx HashMap<SymId, Ir<'static, RawIrRef<'static>>>),
|
Global(&'ctx HashMap<SymId, ExprId>),
|
||||||
Repl(&'ctx HashSet<SymId>),
|
Repl(&'ctx HashSet<SymId>),
|
||||||
ScopedImport(HashSet<SymId>),
|
ScopedImport(HashSet<SymId>),
|
||||||
Let(HashMap<SymId, ThunkId>),
|
Let(HashMap<SymId, ExprId>),
|
||||||
Param(SymId, ArgId),
|
Param(SymId, ExprId),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ScopeGuard<'a, 'ctx, 'id, 'ir> {
|
struct ScopeGuard<'a, 'ctx> {
|
||||||
ctx: &'a mut DowngradeCtx<'ctx, 'id, 'ir>,
|
ctx: &'a mut DowngradeCtx<'ctx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for ScopeGuard<'_, '_, '_, '_> {
|
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.ctx.scopes.pop();
|
self.ctx.scopes.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'id, 'ir, 'ctx> ScopeGuard<'_, 'ctx, 'id, 'ir> {
|
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
|
||||||
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx, 'id, 'ir> {
|
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> {
|
||||||
self.ctx
|
self.ctx
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ThunkScope<'id, 'ir> {
|
pub struct DowngradeCtx<'ctx> {
|
||||||
bindings: bumpalo::collections::Vec<'ir, (ThunkId, IrRef<'id, 'ir>)>,
|
ctx: &'ctx mut Ctx,
|
||||||
cache: HashTable<(IrRef<'id, 'ir>, ThunkId)>,
|
irs: Vec<Ir>,
|
||||||
hasher: DefaultHashBuilder,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'id, 'ir> ThunkScope<'id, 'ir> {
|
|
||||||
fn new_in(bump: &'ir Bump) -> Self {
|
|
||||||
Self {
|
|
||||||
bindings: bumpalo::collections::Vec::new_in(bump),
|
|
||||||
cache: HashTable::new(),
|
|
||||||
hasher: DefaultHashBuilder::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lookup_cache(&self, key: IrRef<'id, 'ir>, token: &GhostToken<'id>) -> Option<ThunkId> {
|
|
||||||
let hash = self.hasher.hash_one(IrKey(key, token));
|
|
||||||
self.cache
|
|
||||||
.find(hash, |&(ir, _)| ir_content_eq(key, ir, token))
|
|
||||||
.map(|&(_, id)| id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_binding(&mut self, id: ThunkId, ir: IrRef<'id, 'ir>, token: &GhostToken<'id>) {
|
|
||||||
self.bindings.push((id, ir));
|
|
||||||
let hash = self.hasher.hash_one(IrKey(ir, token));
|
|
||||||
self.cache.insert_unique(hash, (ir, id), |&(ir, _)| {
|
|
||||||
self.hasher.hash_one(IrKey(ir, token))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extend_bindings(&mut self, iter: impl IntoIterator<Item = (ThunkId, IrRef<'id, 'ir>)>) {
|
|
||||||
self.bindings.extend(iter);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct DowngradeCtx<'ctx, 'id, 'ir> {
|
|
||||||
bump: &'ir Bump,
|
|
||||||
token: GhostToken<'id>,
|
|
||||||
symbols: &'ctx mut DefaultStringInterner,
|
|
||||||
source: Source,
|
|
||||||
scopes: Vec<Scope<'ctx>>,
|
scopes: Vec<Scope<'ctx>>,
|
||||||
with_scope_count: usize,
|
with_scope_count: usize,
|
||||||
arg_count: usize,
|
arg_id: usize,
|
||||||
thunk_count: &'ctx mut usize,
|
thunk_scopes: Vec<Vec<(ExprId, ExprId)>>,
|
||||||
thunk_scopes: Vec<ThunkScope<'id, 'ir>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_thunk<'id>(ir: IrRef<'id, '_>, token: &GhostToken<'id>) -> bool {
|
impl<'ctx> DowngradeCtx<'ctx> {
|
||||||
!matches!(
|
|
||||||
ir.borrow(token),
|
|
||||||
Ir::Builtin(_)
|
|
||||||
| Ir::Builtins
|
|
||||||
| Ir::Int(_)
|
|
||||||
| Ir::Float(_)
|
|
||||||
| Ir::Bool(_)
|
|
||||||
| Ir::Null
|
|
||||||
| Ir::Str(_)
|
|
||||||
| Ir::Thunk(_)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'ctx, 'id, 'ir> DowngradeCtx<'ctx, 'id, 'ir> {
|
|
||||||
fn new(
|
fn new(
|
||||||
bump: &'ir Bump,
|
ctx: &'ctx mut Ctx,
|
||||||
token: GhostToken<'id>,
|
global: &'ctx HashMap<SymId, ExprId>,
|
||||||
symbols: &'ctx mut DefaultStringInterner,
|
|
||||||
global: &'ctx HashMap<SymId, Ir<'static, RawIrRef<'static>>>,
|
|
||||||
extra_scope: Option<Scope<'ctx>>,
|
extra_scope: Option<Scope<'ctx>>,
|
||||||
thunk_count: &'ctx mut usize,
|
|
||||||
source: Source,
|
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
bump,
|
|
||||||
token,
|
|
||||||
symbols,
|
|
||||||
source,
|
|
||||||
scopes: std::iter::once(Scope::Global(global))
|
scopes: std::iter::once(Scope::Global(global))
|
||||||
.chain(extra_scope)
|
.chain(extra_scope)
|
||||||
.collect(),
|
.collect(),
|
||||||
thunk_count,
|
irs: vec![],
|
||||||
arg_count: 0,
|
arg_id: 0,
|
||||||
with_scope_count: 0,
|
with_scope_count: 0,
|
||||||
thunk_scopes: vec![ThunkScope::new_in(bump)],
|
thunk_scopes: vec![Vec::new()],
|
||||||
|
ctx,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'ctx: 'ir, 'id, 'ir> DowngradeContext<'id, 'ir> for DowngradeCtx<'ctx, 'id, 'ir> {
|
impl DowngradeContext for DowngradeCtx<'_> {
|
||||||
fn new_expr(&self, expr: Ir<'ir, IrRef<'id, 'ir>>) -> IrRef<'id, 'ir> {
|
fn new_expr(&mut self, expr: Ir) -> ExprId {
|
||||||
IrRef::new(self.bump.alloc(GhostCell::new(expr)))
|
self.irs.push(expr);
|
||||||
|
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_arg(&mut self) -> ArgId {
|
fn new_arg(&mut self, span: TextRange) -> ExprId {
|
||||||
self.arg_count += 1;
|
self.irs.push(
|
||||||
ArgId(self.arg_count - 1)
|
Arg {
|
||||||
|
inner: ArgId(self.arg_id),
|
||||||
|
span,
|
||||||
|
}
|
||||||
|
.to_ir(),
|
||||||
|
);
|
||||||
|
self.arg_id += 1;
|
||||||
|
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_thunk(&mut self, ir: IrRef<'id, 'ir>) -> IrRef<'id, 'ir> {
|
fn get_ir(&self, id: ExprId) -> &Ir {
|
||||||
if !should_thunk(ir, &self.token) {
|
if id.0 < self.ctx.irs.len() {
|
||||||
return ir;
|
self.ctx.irs.get(id.0).expect("unreachable")
|
||||||
|
} else {
|
||||||
|
self.irs
|
||||||
|
.get(id.0 - self.ctx.irs.len())
|
||||||
|
.expect("ExprId out of bounds")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let cached = self
|
fn maybe_thunk(&mut self, id: ExprId) -> ExprId {
|
||||||
.thunk_scopes
|
let ir = self.get_ir(id);
|
||||||
.last()
|
match ir {
|
||||||
.expect("no active cache scope")
|
Ir::Builtin(_)
|
||||||
.lookup_cache(ir, &self.token);
|
| Ir::Builtins(_)
|
||||||
|
| Ir::Int(_)
|
||||||
if let Some(id) = cached {
|
| Ir::Float(_)
|
||||||
return IrRef::alloc(self.bump, Ir::Thunk(id));
|
| Ir::Bool(_)
|
||||||
|
| Ir::Null(_)
|
||||||
|
| Ir::Str(_)
|
||||||
|
| Ir::Thunk(_) => id,
|
||||||
|
_ => {
|
||||||
|
let span = ir.span();
|
||||||
|
let slot = self.reserve_slots(1).next().expect("reserve_slots failed");
|
||||||
|
self.replace_ir(slot, Thunk { inner: slot, span }.to_ir());
|
||||||
|
self.register_thunk(slot, id);
|
||||||
|
slot
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let id = ThunkId(*self.thunk_count);
|
|
||||||
*self.thunk_count = self.thunk_count.checked_add(1).expect("thunk id overflow");
|
|
||||||
self.thunk_scopes
|
|
||||||
.last_mut()
|
|
||||||
.expect("no active cache scope")
|
|
||||||
.add_binding(id, ir, &self.token);
|
|
||||||
IrRef::alloc(self.bump, Ir::Thunk(id))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_sym(&mut self, sym: String) -> SymId {
|
fn new_sym(&mut self, sym: String) -> SymId {
|
||||||
self.symbols.get_or_intern(sym)
|
self.ctx.symbols.get_or_intern(sym)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_sym(&self, id: SymId) -> Symbol<'_> {
|
fn get_sym(&self, id: SymId) -> Symbol<'_> {
|
||||||
self.symbols.resolve(id).expect("no symbol found").into()
|
self.ctx.get_sym(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup(&self, sym: SymId, span: TextRange) -> Result<IrRef<'id, 'ir>> {
|
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId> {
|
||||||
for scope in self.scopes.iter().rev() {
|
for scope in self.scopes.iter().rev() {
|
||||||
match scope {
|
match scope {
|
||||||
&Scope::Global(global_scope) => {
|
&Scope::Global(global_scope) => {
|
||||||
if let Some(expr) = global_scope.get(&sym) {
|
if let Some(&expr) = global_scope.get(&sym) {
|
||||||
let ir = match expr {
|
return Ok(expr);
|
||||||
Ir::Builtins => Ir::Builtins,
|
|
||||||
Ir::Builtin(s) => Ir::Builtin(*s),
|
|
||||||
Ir::Bool(b) => Ir::Bool(*b),
|
|
||||||
Ir::Null => Ir::Null,
|
|
||||||
_ => unreachable!("globals should only contain leaf IR nodes"),
|
|
||||||
};
|
|
||||||
return Ok(self.new_expr(ir));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&Scope::Repl(repl_bindings) => {
|
&Scope::Repl(repl_bindings) => {
|
||||||
if repl_bindings.contains(&sym) {
|
if repl_bindings.contains(&sym) {
|
||||||
return Ok(self.new_expr(Ir::ReplBinding(sym)));
|
return Ok(self.new_expr(ReplBinding { inner: sym, span }.to_ir()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Scope::ScopedImport(scoped_bindings) => {
|
Scope::ScopedImport(scoped_bindings) => {
|
||||||
if scoped_bindings.contains(&sym) {
|
if scoped_bindings.contains(&sym) {
|
||||||
return Ok(self.new_expr(Ir::ScopedImportBinding(sym)));
|
return Ok(self.new_expr(ScopedImportBinding { inner: sym, span }.to_ir()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Scope::Let(let_scope) => {
|
Scope::Let(let_scope) => {
|
||||||
if let Some(&expr) = let_scope.get(&sym) {
|
if let Some(&expr) = let_scope.get(&sym) {
|
||||||
return Ok(self.new_expr(Ir::Thunk(expr)));
|
return Ok(self.new_expr(Thunk { inner: expr, span }.to_ir()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&Scope::Param(param_sym, id) => {
|
&Scope::Param(param_sym, expr) => {
|
||||||
if param_sym == sym {
|
if param_sym == sym {
|
||||||
return Ok(self.new_expr(Ir::Arg(id)));
|
return Ok(expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.with_scope_count > 0 {
|
if self.with_scope_count > 0 {
|
||||||
Ok(self.new_expr(Ir::WithLookup(sym)))
|
Ok(self.new_expr(WithLookup { inner: sym, span }.to_ir()))
|
||||||
} else {
|
} else {
|
||||||
Err(Error::downgrade_error(
|
Err(Error::downgrade_error(
|
||||||
format!("'{}' not found", self.get_sym(sym)),
|
format!("'{}' not found", self.get_sym(sym)),
|
||||||
@@ -712,37 +543,49 @@ impl<'ctx: 'ir, 'id, 'ir> DowngradeContext<'id, 'ir> for DowngradeCtx<'ctx, 'id,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn replace_ir(&mut self, id: ExprId, expr: Ir) {
|
||||||
|
let local_id = id.0 - self.ctx.irs.len();
|
||||||
|
*self.irs.get_mut(local_id).expect("ExprId out of bounds") = expr;
|
||||||
|
}
|
||||||
|
|
||||||
fn get_current_source(&self) -> Source {
|
fn get_current_source(&self) -> Source {
|
||||||
self.source.clone()
|
self.ctx.get_current_source()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_let_scope<F, R>(&mut self, keys: &[SymId], f: F) -> Result<R>
|
#[allow(refining_impl_trait)]
|
||||||
|
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
|
||||||
|
let start = self.ctx.irs.len() + self.irs.len();
|
||||||
|
let range = (start..start + slots).map(ExprId);
|
||||||
|
let span = rnix::TextRange::default();
|
||||||
|
// Fill reserved slots with placeholder value
|
||||||
|
self.irs.extend(
|
||||||
|
range
|
||||||
|
.clone()
|
||||||
|
.map(|slot| Thunk { inner: slot, span }.to_ir()),
|
||||||
|
);
|
||||||
|
range
|
||||||
|
}
|
||||||
|
|
||||||
|
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
|
||||||
|
use crate::ir::TopLevel;
|
||||||
|
let body = root.downgrade(&mut self)?;
|
||||||
|
let thunks = self.thunk_scopes.pop().expect("no thunk scope left???");
|
||||||
|
let span = self.get_ir(body).span();
|
||||||
|
let top_level = self.new_expr(TopLevel { body, thunks, span }.to_ir());
|
||||||
|
self.ctx.irs.extend(self.irs);
|
||||||
|
Ok(top_level)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> Result<(bumpalo::collections::Vec<'ir, IrRef<'id, 'ir>>, R)>,
|
F: FnOnce(&mut Self) -> R,
|
||||||
{
|
{
|
||||||
let base = *self.thunk_count;
|
self.scopes.push(Scope::Let(bindings));
|
||||||
*self.thunk_count = self
|
|
||||||
.thunk_count
|
|
||||||
.checked_add(keys.len())
|
|
||||||
.expect("thunk id overflow");
|
|
||||||
let iter = keys.iter().enumerate().map(|(offset, &key)| {
|
|
||||||
(
|
|
||||||
key,
|
|
||||||
ThunkId(unsafe { base.checked_add(offset).unwrap_unchecked() }),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
self.scopes.push(Scope::Let(iter.collect()));
|
|
||||||
let (vals, ret) = {
|
|
||||||
let mut guard = ScopeGuard { ctx: self };
|
let mut guard = ScopeGuard { ctx: self };
|
||||||
f(guard.as_ctx())?
|
f(guard.as_ctx())
|
||||||
};
|
|
||||||
assert_eq!(keys.len(), vals.len());
|
|
||||||
let scope = self.thunk_scopes.last_mut().expect("no active thunk scope");
|
|
||||||
scope.extend_bindings((base..base + keys.len()).map(ThunkId).zip(vals));
|
|
||||||
Ok(ret)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ArgId, f: F) -> R
|
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> R,
|
F: FnOnce(&mut Self) -> R,
|
||||||
{
|
{
|
||||||
@@ -761,41 +604,22 @@ impl<'ctx: 'ir, 'id, 'ir> DowngradeContext<'id, 'ir> for DowngradeCtx<'ctx, 'id,
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_thunk_scope<F, R>(
|
fn with_thunk_scope<F, R>(&mut self, f: F) -> (R, Vec<(ExprId, ExprId)>)
|
||||||
&mut self,
|
|
||||||
f: F,
|
|
||||||
) -> (
|
|
||||||
R,
|
|
||||||
bumpalo::collections::Vec<'ir, (ThunkId, IrRef<'id, 'ir>)>,
|
|
||||||
)
|
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> R,
|
F: FnOnce(&mut Self) -> R,
|
||||||
{
|
{
|
||||||
self.thunk_scopes.push(ThunkScope::new_in(self.bump));
|
self.thunk_scopes.push(Vec::new());
|
||||||
let ret = f(self);
|
let ret = f(self);
|
||||||
(
|
(
|
||||||
ret,
|
ret,
|
||||||
self.thunk_scopes
|
self.thunk_scopes.pop().expect("no thunk scope left???"),
|
||||||
.pop()
|
|
||||||
.expect("no thunk scope left???")
|
|
||||||
.bindings,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&self) -> &'ir bumpalo::Bump {
|
fn register_thunk(&mut self, slot: ExprId, inner: ExprId) {
|
||||||
self.bump
|
self.thunk_scopes
|
||||||
}
|
.last_mut()
|
||||||
}
|
.expect("register_thunk without active scope")
|
||||||
|
.push((slot, inner));
|
||||||
impl<'id, 'ir, 'ctx: 'ir> DowngradeCtx<'ctx, 'id, 'ir> {
|
|
||||||
fn downgrade_toplevel(mut self, root: rnix::ast::Expr) -> Result<RawIrRef<'ir>> {
|
|
||||||
let body = root.downgrade(&mut self)?;
|
|
||||||
let thunks = self
|
|
||||||
.thunk_scopes
|
|
||||||
.pop()
|
|
||||||
.expect("no thunk scope left???")
|
|
||||||
.bindings;
|
|
||||||
let ir = IrRef::alloc(self.bump, Ir::TopLevel { body, thunks });
|
|
||||||
Ok(ir.freeze(self.token))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,354 +0,0 @@
|
|||||||
use std::fmt::Write;
|
|
||||||
|
|
||||||
use colored::Colorize;
|
|
||||||
use num_enum::TryFromPrimitive;
|
|
||||||
|
|
||||||
use crate::bytecode::{Bytecode, Constant, Op};
|
|
||||||
|
|
||||||
pub(crate) trait DisassemblerContext {
|
|
||||||
fn lookup_string(&self, id: u32) -> &str;
|
|
||||||
fn lookup_constant(&self, id: u32) -> &Constant;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct Disassembler<'a, Ctx> {
|
|
||||||
code: &'a [u8],
|
|
||||||
ctx: &'a Ctx,
|
|
||||||
pos: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, Ctx: DisassemblerContext> Disassembler<'a, Ctx> {
|
|
||||||
pub fn new(bytecode: &'a Bytecode, ctx: &'a Ctx) -> Self {
|
|
||||||
Self {
|
|
||||||
code: &bytecode.code,
|
|
||||||
ctx,
|
|
||||||
pos: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_u8(&mut self) -> u8 {
|
|
||||||
let b = self.code[self.pos];
|
|
||||||
self.pos += 1;
|
|
||||||
b
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_u16(&mut self) -> u16 {
|
|
||||||
let bytes = self.code[self.pos..self.pos + 2]
|
|
||||||
.try_into()
|
|
||||||
.expect("no enough bytes");
|
|
||||||
self.pos += 2;
|
|
||||||
u16::from_le_bytes(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_u32(&mut self) -> u32 {
|
|
||||||
let bytes = self.code[self.pos..self.pos + 4]
|
|
||||||
.try_into()
|
|
||||||
.expect("no enough bytes");
|
|
||||||
self.pos += 4;
|
|
||||||
u32::from_le_bytes(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_i32(&mut self) -> i32 {
|
|
||||||
let bytes = self.code[self.pos..self.pos + 4]
|
|
||||||
.try_into()
|
|
||||||
.expect("no enough bytes");
|
|
||||||
self.pos += 4;
|
|
||||||
i32::from_le_bytes(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn disassemble(&mut self) -> String {
|
|
||||||
self.disassemble_impl(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn disassemble_colored(&mut self) -> String {
|
|
||||||
self.disassemble_impl(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn disassemble_impl(&mut self, color: bool) -> String {
|
|
||||||
let mut out = String::new();
|
|
||||||
if color {
|
|
||||||
let _ = writeln!(out, "{}", "=== Bytecode Disassembly ===".bold().white());
|
|
||||||
let _ = writeln!(
|
|
||||||
out,
|
|
||||||
"{} {}",
|
|
||||||
"Length:".white(),
|
|
||||||
format!("{} bytes", self.code.len()).cyan()
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
let _ = writeln!(out, "=== Bytecode Disassembly ===");
|
|
||||||
let _ = writeln!(out, "Length: {} bytes", self.code.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
while self.pos < self.code.len() {
|
|
||||||
let start_pos = self.pos;
|
|
||||||
let op_byte = self.read_u8();
|
|
||||||
let (mnemonic, args) = self.decode_instruction(op_byte, start_pos);
|
|
||||||
|
|
||||||
let bytes_slice = &self.code[start_pos + 1..self.pos];
|
|
||||||
|
|
||||||
for (i, chunk) in bytes_slice.chunks(4).enumerate() {
|
|
||||||
let bytes_str = {
|
|
||||||
let mut temp = String::new();
|
|
||||||
if i == 0 {
|
|
||||||
let _ = write!(&mut temp, "{:02x}", self.code[start_pos]);
|
|
||||||
} else {
|
|
||||||
let _ = write!(&mut temp, " ");
|
|
||||||
}
|
|
||||||
for b in chunk.iter() {
|
|
||||||
let _ = write!(&mut temp, " {:02x}", b);
|
|
||||||
}
|
|
||||||
temp
|
|
||||||
};
|
|
||||||
|
|
||||||
if i == 0 {
|
|
||||||
if color {
|
|
||||||
let sep = if args.is_empty() { "" } else { " " };
|
|
||||||
let _ = writeln!(
|
|
||||||
out,
|
|
||||||
"{} {:<14} | {}{}{}",
|
|
||||||
format!("{:04x}", start_pos).dimmed(),
|
|
||||||
bytes_str.green(),
|
|
||||||
mnemonic.yellow().bold(),
|
|
||||||
sep,
|
|
||||||
args.cyan()
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
let op_str = if args.is_empty() {
|
|
||||||
mnemonic.to_string()
|
|
||||||
} else {
|
|
||||||
format!("{} {}", mnemonic, args)
|
|
||||||
};
|
|
||||||
let _ = writeln!(out, "{:04x} {:<14} | {}", start_pos, bytes_str, op_str);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let extra_width = start_pos.ilog2() >> 4;
|
|
||||||
if color {
|
|
||||||
let _ = write!(out, " ");
|
|
||||||
for _ in 0..extra_width {
|
|
||||||
let _ = write!(out, " ");
|
|
||||||
}
|
|
||||||
let _ = writeln!(out, " {:<14} |", bytes_str.green());
|
|
||||||
} else {
|
|
||||||
let _ = write!(out, " ");
|
|
||||||
for _ in 0..extra_width {
|
|
||||||
let _ = write!(out, " ");
|
|
||||||
}
|
|
||||||
let _ = writeln!(out, " {:<14} |", bytes_str);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decode_instruction(&mut self, op_byte: u8, current_pc: usize) -> (&'static str, String) {
|
|
||||||
let op = Op::try_from_primitive(op_byte).expect("invalid op code");
|
|
||||||
|
|
||||||
match op {
|
|
||||||
Op::PushConst => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
let val = self.ctx.lookup_constant(idx);
|
|
||||||
let val_str = match val {
|
|
||||||
Constant::Int(i) => format!("Int({})", i),
|
|
||||||
Constant::Float(f) => format!("Float(bits: {})", f),
|
|
||||||
};
|
|
||||||
("PushConst", format!("@{} ({})", idx, val_str))
|
|
||||||
}
|
|
||||||
Op::PushString => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
let s = self.ctx.lookup_string(idx);
|
|
||||||
let len = s.len();
|
|
||||||
let mut s_fmt = format!("{:?}", s);
|
|
||||||
if s_fmt.len() > 60 {
|
|
||||||
s_fmt.truncate(57);
|
|
||||||
#[allow(clippy::unwrap_used)]
|
|
||||||
write!(s_fmt, "...\" (total {len} bytes)").unwrap();
|
|
||||||
}
|
|
||||||
("PushString", format!("@{} {}", idx, s_fmt))
|
|
||||||
}
|
|
||||||
Op::PushNull => ("PushNull", String::new()),
|
|
||||||
Op::PushTrue => ("PushTrue", String::new()),
|
|
||||||
Op::PushFalse => ("PushFalse", String::new()),
|
|
||||||
|
|
||||||
Op::LoadLocal => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
("LoadLocal", format!("[{}]", idx))
|
|
||||||
}
|
|
||||||
Op::LoadOuter => {
|
|
||||||
let depth = self.read_u8();
|
|
||||||
let idx = self.read_u32();
|
|
||||||
("LoadOuter", format!("depth={} [{}]", depth, idx))
|
|
||||||
}
|
|
||||||
Op::StoreLocal => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
("StoreLocal", format!("[{}]", idx))
|
|
||||||
}
|
|
||||||
Op::AllocLocals => {
|
|
||||||
let count = self.read_u32();
|
|
||||||
("AllocLocals", format!("count={}", count))
|
|
||||||
}
|
|
||||||
|
|
||||||
Op::MakeThunk => {
|
|
||||||
let offset = self.read_u32();
|
|
||||||
let label_idx = self.read_u32();
|
|
||||||
let label = self.ctx.lookup_string(label_idx);
|
|
||||||
("MakeThunk", format!("-> {:04x} label={}", offset, label))
|
|
||||||
}
|
|
||||||
Op::MakeClosure => {
|
|
||||||
let offset = self.read_u32();
|
|
||||||
let slots = self.read_u32();
|
|
||||||
("MakeClosure", format!("-> {:04x} slots={}", offset, slots))
|
|
||||||
}
|
|
||||||
Op::MakePatternClosure => {
|
|
||||||
let offset = self.read_u32();
|
|
||||||
let slots = self.read_u32();
|
|
||||||
let req_count = self.read_u16();
|
|
||||||
let opt_count = self.read_u16();
|
|
||||||
let ellipsis = self.read_u8() != 0;
|
|
||||||
|
|
||||||
let mut arg_str = format!(
|
|
||||||
"-> {:04x} slots={} req={} opt={} ...={})",
|
|
||||||
offset, slots, req_count, opt_count, ellipsis
|
|
||||||
);
|
|
||||||
|
|
||||||
arg_str.push_str(" Args=[");
|
|
||||||
for _ in 0..req_count {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
arg_str.push_str(&format!("Req({}) ", self.ctx.lookup_string(idx)));
|
|
||||||
}
|
|
||||||
for _ in 0..opt_count {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
arg_str.push_str(&format!("Opt({}) ", self.ctx.lookup_string(idx)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let total_args = req_count + opt_count;
|
|
||||||
for _ in 0..total_args {
|
|
||||||
let _name_idx = self.read_u32();
|
|
||||||
let _span_id = self.read_u32();
|
|
||||||
}
|
|
||||||
arg_str.push(']');
|
|
||||||
|
|
||||||
("MakePatternClosure", arg_str)
|
|
||||||
}
|
|
||||||
|
|
||||||
Op::Call => {
|
|
||||||
let span_id = self.read_u32();
|
|
||||||
("Call", format!("span={}", span_id))
|
|
||||||
}
|
|
||||||
Op::CallNoSpan => ("CallNoSpan", String::new()),
|
|
||||||
|
|
||||||
Op::MakeAttrs => {
|
|
||||||
let count = self.read_u32();
|
|
||||||
("MakeAttrs", format!("size={}", count))
|
|
||||||
}
|
|
||||||
Op::MakeAttrsDyn => {
|
|
||||||
let static_count = self.read_u32();
|
|
||||||
let dyn_count = self.read_u32();
|
|
||||||
(
|
|
||||||
"MakeAttrsDyn",
|
|
||||||
format!("static={} dyn={}", static_count, dyn_count),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Op::MakeEmptyAttrs => ("MakeEmptyAttrs", String::new()),
|
|
||||||
|
|
||||||
Op::Select => {
|
|
||||||
let path_len = self.read_u16();
|
|
||||||
let span_id = self.read_u32();
|
|
||||||
("Select", format!("path_len={} span={}", path_len, span_id))
|
|
||||||
}
|
|
||||||
Op::SelectDefault => {
|
|
||||||
let path_len = self.read_u16();
|
|
||||||
let span_id = self.read_u32();
|
|
||||||
(
|
|
||||||
"SelectDefault",
|
|
||||||
format!("path_len={} span={}", path_len, span_id),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Op::HasAttr => {
|
|
||||||
let path_len = self.read_u16();
|
|
||||||
("HasAttr", format!("path_len={}", path_len))
|
|
||||||
}
|
|
||||||
|
|
||||||
Op::MakeList => {
|
|
||||||
let count = self.read_u32();
|
|
||||||
("MakeList", format!("size={}", count))
|
|
||||||
}
|
|
||||||
|
|
||||||
Op::OpAdd => ("OpAdd", String::new()),
|
|
||||||
Op::OpSub => ("OpSub", String::new()),
|
|
||||||
Op::OpMul => ("OpMul", String::new()),
|
|
||||||
Op::OpDiv => ("OpDiv", String::new()),
|
|
||||||
Op::OpEq => ("OpEq", String::new()),
|
|
||||||
Op::OpNeq => ("OpNeq", String::new()),
|
|
||||||
Op::OpLt => ("OpLt", String::new()),
|
|
||||||
Op::OpGt => ("OpGt", String::new()),
|
|
||||||
Op::OpLeq => ("OpLeq", String::new()),
|
|
||||||
Op::OpGeq => ("OpGeq", String::new()),
|
|
||||||
Op::OpConcat => ("OpConcat", String::new()),
|
|
||||||
Op::OpUpdate => ("OpUpdate", String::new()),
|
|
||||||
Op::OpNeg => ("OpNeg", String::new()),
|
|
||||||
Op::OpNot => ("OpNot", String::new()),
|
|
||||||
|
|
||||||
Op::ForceBool => ("ForceBool", String::new()),
|
|
||||||
|
|
||||||
Op::JumpIfFalse => {
|
|
||||||
let offset = self.read_i32();
|
|
||||||
let target = (current_pc as isize + 1 + 4 + offset as isize) as usize;
|
|
||||||
(
|
|
||||||
"JumpIfFalse",
|
|
||||||
format!("-> {:04x} offset={}", target, offset),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Op::JumpIfTrue => {
|
|
||||||
let offset = self.read_i32();
|
|
||||||
let target = (current_pc as isize + 1 + 4 + offset as isize) as usize;
|
|
||||||
("JumpIfTrue", format!("-> {:04x} offset={}", target, offset))
|
|
||||||
}
|
|
||||||
Op::Jump => {
|
|
||||||
let offset = self.read_i32();
|
|
||||||
let target = (current_pc as isize + 1 + 4 + offset as isize) as usize;
|
|
||||||
("Jump", format!("-> {:04x} offset={}", target, offset))
|
|
||||||
}
|
|
||||||
|
|
||||||
Op::ConcatStrings => {
|
|
||||||
let count = self.read_u16();
|
|
||||||
let force = self.read_u8();
|
|
||||||
("ConcatStrings", format!("count={} force={}", count, force))
|
|
||||||
}
|
|
||||||
Op::ResolvePath => ("ResolvePath", String::new()),
|
|
||||||
Op::Assert => {
|
|
||||||
let raw_idx = self.read_u32();
|
|
||||||
let span_id = self.read_u32();
|
|
||||||
("Assert", format!("text_id={} span={}", raw_idx, span_id))
|
|
||||||
}
|
|
||||||
Op::PushWith => ("PushWith", String::new()),
|
|
||||||
Op::PopWith => ("PopWith", String::new()),
|
|
||||||
Op::WithLookup => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
let name = self.ctx.lookup_string(idx);
|
|
||||||
("WithLookup", format!("{:?}", name))
|
|
||||||
}
|
|
||||||
|
|
||||||
Op::LoadBuiltins => ("LoadBuiltins", String::new()),
|
|
||||||
Op::LoadBuiltin => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
let name = self.ctx.lookup_string(idx);
|
|
||||||
("LoadBuiltin", format!("{:?}", name))
|
|
||||||
}
|
|
||||||
Op::MkPos => {
|
|
||||||
let span_id = self.read_u32();
|
|
||||||
("MkPos", format!("id={}", span_id))
|
|
||||||
}
|
|
||||||
Op::LoadReplBinding => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
let name = self.ctx.lookup_string(idx);
|
|
||||||
("LoadReplBinding", format!("{:?}", name))
|
|
||||||
}
|
|
||||||
Op::LoadScopedBinding => {
|
|
||||||
let idx = self.read_u32();
|
|
||||||
let name = self.ctx.lookup_string(idx);
|
|
||||||
("LoadScopedBinding", format!("{:?}", name))
|
|
||||||
}
|
|
||||||
Op::Return => ("Return", String::new()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -292,32 +292,43 @@ fn parse_frames(stack: &str, ctx: &impl RuntimeContext) -> Vec<NixStackFrame> {
|
|||||||
let mut frames = Vec::new();
|
let mut frames = Vec::new();
|
||||||
|
|
||||||
for line in stack.lines() {
|
for line in stack.lines() {
|
||||||
// Format: NIX_STACK_FRAME:span_id:message
|
// Format: NIX_STACK_FRAME:source_id:start:end[:extra_data]
|
||||||
let Some(rest) = line.strip_prefix("NIX_STACK_FRAME:") else {
|
let Some(rest) = line.strip_prefix("NIX_STACK_FRAME:") else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let parts: Vec<&str> = rest.splitn(2, ':').collect();
|
let parts: Vec<&str> = rest.splitn(4, ':').collect();
|
||||||
|
|
||||||
if parts.is_empty() {
|
if parts.len() < 3 {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let span_id: usize = match parts[0].parse() {
|
let src = match parts[0].parse() {
|
||||||
Ok(id) => id,
|
Ok(id) => ctx.get_source(id),
|
||||||
|
Err(_) => continue,
|
||||||
|
};
|
||||||
|
let start: u32 = match parts[1].parse() {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(_) => continue,
|
||||||
|
};
|
||||||
|
let end: u32 = match parts[2].parse() {
|
||||||
|
Ok(v) => v,
|
||||||
Err(_) => continue,
|
Err(_) => continue,
|
||||||
};
|
};
|
||||||
let (source_id, span) = ctx.get_span(span_id);
|
|
||||||
let src = ctx.get_source(source_id);
|
|
||||||
|
|
||||||
let message = if parts.len() == 2 {
|
let span = rnix::TextRange::new(rnix::TextSize::from(start), rnix::TextSize::from(end));
|
||||||
parts[1].to_string()
|
|
||||||
|
let message = {
|
||||||
|
if parts.len() == 4 {
|
||||||
|
parts[3].to_string()
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
frames.push(NixStackFrame { span, message, src });
|
frames.push(NixStackFrame { span, message, src });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Deduplicate consecutive identical frames
|
||||||
frames.dedup_by(|a, b| a.span == b.span && a.message == b.message);
|
frames.dedup_by(|a, b| a.span == b.span && a.message == b.message);
|
||||||
|
|
||||||
frames
|
frames
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use deno_core::OpState;
|
use deno_core::OpState;
|
||||||
use deno_core::ToV8;
|
|
||||||
use deno_core::op2;
|
use deno_core::op2;
|
||||||
use nix_compat::nixhash::HashAlgo;
|
use nix_compat::nixhash::HashAlgo;
|
||||||
use nix_compat::nixhash::NixHash;
|
use nix_compat::nixhash::NixHash;
|
||||||
|
use serde::Serialize;
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{debug, info, warn};
|
||||||
|
|
||||||
use crate::runtime::OpStateExt;
|
use crate::runtime::OpStateExt;
|
||||||
@@ -22,19 +22,19 @@ pub use metadata_cache::MetadataCache;
|
|||||||
use crate::nar;
|
use crate::nar;
|
||||||
use crate::runtime::NixRuntimeError;
|
use crate::runtime::NixRuntimeError;
|
||||||
|
|
||||||
#[derive(ToV8)]
|
#[derive(Serialize)]
|
||||||
pub struct FetchUrlResult {
|
pub struct FetchUrlResult {
|
||||||
pub store_path: String,
|
pub store_path: String,
|
||||||
pub hash: String,
|
pub hash: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(ToV8)]
|
#[derive(Serialize)]
|
||||||
pub struct FetchTarballResult {
|
pub struct FetchTarballResult {
|
||||||
pub store_path: String,
|
pub store_path: String,
|
||||||
pub nar_hash: String,
|
pub nar_hash: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(ToV8)]
|
#[derive(Serialize)]
|
||||||
pub struct FetchGitResult {
|
pub struct FetchGitResult {
|
||||||
pub out_path: String,
|
pub out_path: String,
|
||||||
pub rev: String,
|
pub rev: String,
|
||||||
@@ -47,6 +47,7 @@ pub struct FetchGitResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[op2]
|
#[op2]
|
||||||
|
#[serde]
|
||||||
pub fn op_fetch_url<Ctx: RuntimeContext>(
|
pub fn op_fetch_url<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] url: String,
|
#[string] url: String,
|
||||||
@@ -151,6 +152,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[op2]
|
#[op2]
|
||||||
|
#[serde]
|
||||||
pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] url: String,
|
#[string] url: String,
|
||||||
@@ -264,6 +266,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[op2]
|
#[op2]
|
||||||
|
#[serde]
|
||||||
pub fn op_fetch_git<Ctx: RuntimeContext>(
|
pub fn op_fetch_git<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] url: String,
|
#[string] url: String,
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use reqwest::blocking::Client;
|
use reqwest::blocking::Client;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
pub struct Downloader {
|
pub struct Downloader {
|
||||||
client: Client,
|
client: Client,
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
|
||||||
|
|
||||||
use rusqlite::{Connection, OptionalExtension, params};
|
use rusqlite::{Connection, OptionalExtension, params};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum CacheError {
|
pub enum CacheError {
|
||||||
|
|||||||
605
nix-js/src/ir.rs
605
nix-js/src/ir.rs
@@ -1,158 +1,46 @@
|
|||||||
use std::{
|
use derive_more::{IsVariant, TryUnwrap, Unwrap};
|
||||||
hash::{Hash, Hasher},
|
use hashbrown::HashMap;
|
||||||
ops::Deref,
|
|
||||||
};
|
|
||||||
|
|
||||||
use bumpalo::{Bump, boxed::Box, collections::Vec};
|
|
||||||
use ghost_cell::{GhostCell, GhostToken};
|
|
||||||
use rnix::{TextRange, ast};
|
use rnix::{TextRange, ast};
|
||||||
use string_interner::symbol::SymbolU32;
|
use string_interner::symbol::SymbolU32;
|
||||||
|
|
||||||
pub type HashMap<'ir, K, V> = hashbrown::HashMap<K, V, hashbrown::DefaultHashBuilder, &'ir Bump>;
|
use nix_js_macros::ir;
|
||||||
|
|
||||||
#[repr(transparent)]
|
ir! {
|
||||||
#[derive(Clone, Copy)]
|
Ir,
|
||||||
pub struct IrRef<'id, 'ir>(&'ir GhostCell<'id, Ir<'ir, Self>>);
|
|
||||||
|
|
||||||
impl<'id, 'ir> IrRef<'id, 'ir> {
|
|
||||||
pub fn new(ir: &'ir GhostCell<'id, Ir<'ir, Self>>) -> Self {
|
|
||||||
Self(ir)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn alloc(bump: &'ir Bump, ir: Ir<'ir, Self>) -> Self {
|
|
||||||
Self(bump.alloc(GhostCell::new(ir)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Freeze a mutable IR reference into a read-only one, consuming the
|
|
||||||
/// `GhostToken` to prevent any further mutation.
|
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
/// The transmute is sound because:
|
|
||||||
/// - `GhostCell<'id, T>` is `#[repr(transparent)]` over `T`
|
|
||||||
/// - `IrRef<'id, 'ir>` is `#[repr(transparent)]` over
|
|
||||||
/// `&'ir GhostCell<'id, Ir<'ir, Self>>`
|
|
||||||
/// - `RawIrRef<'ir>` is `#[repr(transparent)]` over `&'ir Ir<'ir, Self>`
|
|
||||||
/// - `Ir<'ir, Ref>` is `#[repr(C)]` and both ref types are pointer-sized
|
|
||||||
///
|
|
||||||
/// Consuming the `GhostToken` guarantees no `borrow_mut` calls can occur
|
|
||||||
/// afterwards, so the shared `&Ir` references from `RawIrRef::Deref` can
|
|
||||||
/// never alias with mutable references.
|
|
||||||
pub fn freeze(self, _token: GhostToken<'id>) -> RawIrRef<'ir> {
|
|
||||||
unsafe { std::mem::transmute(self) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'id, 'ir> Deref for IrRef<'id, 'ir> {
|
|
||||||
type Target = GhostCell<'id, Ir<'ir, IrRef<'id, 'ir>>>;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[repr(transparent)]
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
pub struct RawIrRef<'ir>(&'ir Ir<'ir, Self>);
|
|
||||||
|
|
||||||
impl<'ir> Deref for RawIrRef<'ir> {
|
|
||||||
type Target = Ir<'ir, RawIrRef<'ir>>;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[repr(C)]
|
|
||||||
pub enum Ir<'ir, Ref> {
|
|
||||||
Int(i64),
|
Int(i64),
|
||||||
Float(f64),
|
Float(f64),
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
Null,
|
Null,
|
||||||
Str(Box<'ir, String>),
|
Str { pub val: String },
|
||||||
AttrSet {
|
AttrSet { pub stcs: HashMap<SymId, (ExprId, rnix::TextRange)>, pub dyns: Vec<(ExprId, ExprId, rnix::TextRange)> },
|
||||||
stcs: HashMap<'ir, SymId, (Ref, TextRange)>,
|
List { pub items: Vec<ExprId> },
|
||||||
dyns: Vec<'ir, (Ref, Ref, TextRange)>,
|
|
||||||
},
|
|
||||||
List {
|
|
||||||
items: Vec<'ir, Ref>,
|
|
||||||
},
|
|
||||||
Path(Ref),
|
|
||||||
ConcatStrings {
|
|
||||||
parts: Vec<'ir, Ref>,
|
|
||||||
force_string: bool,
|
|
||||||
},
|
|
||||||
|
|
||||||
// OPs
|
HasAttr { pub lhs: ExprId, pub rhs: Vec<Attr> },
|
||||||
UnOp {
|
BinOp { pub lhs: ExprId, pub rhs: ExprId, pub kind: BinOpKind },
|
||||||
rhs: Ref,
|
UnOp { pub rhs: ExprId, pub kind: UnOpKind },
|
||||||
kind: UnOpKind,
|
Select { pub expr: ExprId, pub attrpath: Vec<Attr>, pub default: Option<ExprId> },
|
||||||
},
|
If { pub cond: ExprId, pub consq: ExprId, pub alter: ExprId },
|
||||||
BinOp {
|
Call { pub func: ExprId, pub arg: ExprId },
|
||||||
lhs: Ref,
|
Assert { pub assertion: ExprId, pub expr: ExprId, pub assertion_raw: String },
|
||||||
rhs: Ref,
|
ConcatStrings { pub parts: Vec<ExprId>, pub force_string: bool },
|
||||||
kind: BinOpKind,
|
Path { pub expr: ExprId },
|
||||||
},
|
Func { pub body: ExprId, pub param: Option<Param>, pub arg: ExprId, pub thunks: Vec<(ExprId, ExprId)> },
|
||||||
HasAttr {
|
TopLevel { pub body: ExprId, pub thunks: Vec<(ExprId, ExprId)> },
|
||||||
lhs: Ref,
|
|
||||||
rhs: Vec<'ir, Attr<Ref>>,
|
|
||||||
},
|
|
||||||
Select {
|
|
||||||
expr: Ref,
|
|
||||||
attrpath: Vec<'ir, Attr<Ref>>,
|
|
||||||
default: Option<Ref>,
|
|
||||||
span: TextRange,
|
|
||||||
},
|
|
||||||
|
|
||||||
// Conditionals
|
|
||||||
If {
|
|
||||||
cond: Ref,
|
|
||||||
consq: Ref,
|
|
||||||
alter: Ref,
|
|
||||||
},
|
|
||||||
Assert {
|
|
||||||
assertion: Ref,
|
|
||||||
expr: Ref,
|
|
||||||
assertion_raw: String,
|
|
||||||
span: TextRange,
|
|
||||||
},
|
|
||||||
|
|
||||||
With {
|
|
||||||
namespace: Ref,
|
|
||||||
body: Ref,
|
|
||||||
thunks: Vec<'ir, (ThunkId, Ref)>,
|
|
||||||
},
|
|
||||||
WithLookup(SymId),
|
|
||||||
|
|
||||||
// Function related
|
|
||||||
Func {
|
|
||||||
body: Ref,
|
|
||||||
param: Option<Param<'ir>>,
|
|
||||||
arg: ArgId,
|
|
||||||
thunks: Vec<'ir, (ThunkId, Ref)>,
|
|
||||||
},
|
|
||||||
Arg(ArgId),
|
Arg(ArgId),
|
||||||
Call {
|
Thunk(ExprId),
|
||||||
func: Ref,
|
|
||||||
arg: Ref,
|
|
||||||
span: TextRange,
|
|
||||||
},
|
|
||||||
|
|
||||||
// Builtins
|
|
||||||
Builtins,
|
Builtins,
|
||||||
Builtin(SymId),
|
Builtin(SymId),
|
||||||
|
CurPos,
|
||||||
// Misc
|
|
||||||
TopLevel {
|
|
||||||
body: Ref,
|
|
||||||
thunks: Vec<'ir, (ThunkId, Ref)>,
|
|
||||||
},
|
|
||||||
Thunk(ThunkId),
|
|
||||||
CurPos(TextRange),
|
|
||||||
ReplBinding(SymId),
|
ReplBinding(SymId),
|
||||||
ScopedImportBinding(SymId),
|
ScopedImportBinding(SymId),
|
||||||
|
WithExpr { pub namespace: ExprId, pub body: ExprId, pub thunks: Vec<(ExprId, ExprId)> },
|
||||||
|
WithLookup(SymId),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
pub struct ThunkId(pub usize);
|
pub struct ExprId(pub usize);
|
||||||
|
|
||||||
pub type SymId = SymbolU32;
|
pub type SymId = SymbolU32;
|
||||||
|
|
||||||
@@ -162,18 +50,18 @@ pub struct ArgId(pub usize);
|
|||||||
|
|
||||||
/// Represents a key in an attribute path.
|
/// Represents a key in an attribute path.
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug, TryUnwrap)]
|
||||||
pub enum Attr<Ref> {
|
pub enum Attr {
|
||||||
/// A dynamic attribute key, which is an expression that must evaluate to a string.
|
/// A dynamic attribute key, which is an expression that must evaluate to a string.
|
||||||
/// Example: `attrs.${key}`
|
/// Example: `attrs.${key}`
|
||||||
Dynamic(Ref, TextRange),
|
Dynamic(ExprId, TextRange),
|
||||||
/// A static attribute key.
|
/// A static attribute key.
|
||||||
/// Example: `attrs.key`
|
/// Example: `attrs.key`
|
||||||
Str(SymId, TextRange),
|
Str(SymId, TextRange),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The kinds of binary operations supported in Nix.
|
/// The kinds of binary operations supported in Nix.
|
||||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum BinOpKind {
|
pub enum BinOpKind {
|
||||||
// Arithmetic
|
// Arithmetic
|
||||||
Add,
|
Add,
|
||||||
@@ -230,7 +118,7 @@ impl From<ast::BinOpKind> for BinOpKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The kinds of unary operations.
|
/// The kinds of unary operations.
|
||||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum UnOpKind {
|
pub enum UnOpKind {
|
||||||
Neg, // Negation (`-`)
|
Neg, // Negation (`-`)
|
||||||
Not, // Logical not (`!`)
|
Not, // Logical not (`!`)
|
||||||
@@ -247,437 +135,8 @@ impl From<ast::UnaryOpKind> for UnOpKind {
|
|||||||
|
|
||||||
/// Describes the parameters of a function.
|
/// Describes the parameters of a function.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Param<'ir> {
|
pub struct Param {
|
||||||
pub required: Vec<'ir, (SymId, TextRange)>,
|
pub required: Vec<(SymId, TextRange)>,
|
||||||
pub optional: Vec<'ir, (SymId, TextRange)>,
|
pub optional: Vec<(SymId, TextRange)>,
|
||||||
pub ellipsis: bool,
|
pub ellipsis: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
pub(crate) struct IrKey<'id, 'ir, 'a>(pub IrRef<'id, 'ir>, pub &'a GhostToken<'id>);
|
|
||||||
|
|
||||||
impl std::hash::Hash for IrKey<'_, '_, '_> {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
ir_content_hash(self.0, self.1, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for IrKey<'_, '_, '_> {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
ir_content_eq(self.0, other.0, self.1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for IrKey<'_, '_, '_> {}
|
|
||||||
|
|
||||||
fn attr_content_hash<'id>(
|
|
||||||
attr: &Attr<IrRef<'id, '_>>,
|
|
||||||
token: &GhostToken<'id>,
|
|
||||||
state: &mut impl Hasher,
|
|
||||||
) {
|
|
||||||
core::mem::discriminant(attr).hash(state);
|
|
||||||
match attr {
|
|
||||||
Attr::Dynamic(expr, _) => ir_content_hash(*expr, token, state),
|
|
||||||
Attr::Str(sym, _) => sym.hash(state),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn attr_content_eq<'id, 'ir>(
|
|
||||||
a: &Attr<IrRef<'id, 'ir>>,
|
|
||||||
b: &Attr<IrRef<'id, 'ir>>,
|
|
||||||
token: &GhostToken<'id>,
|
|
||||||
) -> bool {
|
|
||||||
match (a, b) {
|
|
||||||
(Attr::Dynamic(ae, _), Attr::Dynamic(be, _)) => ir_content_eq(*ae, *be, token),
|
|
||||||
(Attr::Str(a, _), Attr::Str(b, _)) => a == b,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn param_content_hash(param: &Param<'_>, state: &mut impl Hasher) {
|
|
||||||
param.required.len().hash(state);
|
|
||||||
for (sym, _) in param.required.iter() {
|
|
||||||
sym.hash(state);
|
|
||||||
}
|
|
||||||
param.optional.len().hash(state);
|
|
||||||
for (sym, _) in param.optional.iter() {
|
|
||||||
sym.hash(state);
|
|
||||||
}
|
|
||||||
param.ellipsis.hash(state);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn param_content_eq(a: &Param<'_>, b: &Param<'_>) -> bool {
|
|
||||||
a.ellipsis == b.ellipsis
|
|
||||||
&& a.required.len() == b.required.len()
|
|
||||||
&& a.optional.len() == b.optional.len()
|
|
||||||
&& a.required
|
|
||||||
.iter()
|
|
||||||
.zip(b.required.iter())
|
|
||||||
.all(|((a, _), (b, _))| a == b)
|
|
||||||
&& a.optional
|
|
||||||
.iter()
|
|
||||||
.zip(b.optional.iter())
|
|
||||||
.all(|((a, _), (b, _))| a == b)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn thunks_content_hash<'id>(
|
|
||||||
thunks: &[(ThunkId, IrRef<'id, '_>)],
|
|
||||||
token: &GhostToken<'id>,
|
|
||||||
state: &mut impl Hasher,
|
|
||||||
) {
|
|
||||||
thunks.len().hash(state);
|
|
||||||
for &(id, ir) in thunks {
|
|
||||||
id.hash(state);
|
|
||||||
ir_content_hash(ir, token, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn thunks_content_eq<'id, 'ir>(
|
|
||||||
a: &[(ThunkId, IrRef<'id, 'ir>)],
|
|
||||||
b: &[(ThunkId, IrRef<'id, 'ir>)],
|
|
||||||
token: &GhostToken<'id>,
|
|
||||||
) -> bool {
|
|
||||||
a.len() == b.len()
|
|
||||||
&& a.iter()
|
|
||||||
.zip(b.iter())
|
|
||||||
.all(|(&(ai, ae), &(bi, be))| ai == bi && ir_content_eq(ae, be, token))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ir_content_hash<'id>(ir: IrRef<'id, '_>, token: &GhostToken<'id>, state: &mut impl Hasher) {
|
|
||||||
let ir = ir.borrow(token);
|
|
||||||
core::mem::discriminant(ir).hash(state);
|
|
||||||
match ir {
|
|
||||||
Ir::Int(x) => x.hash(state),
|
|
||||||
Ir::Float(x) => x.to_bits().hash(state),
|
|
||||||
Ir::Bool(x) => x.hash(state),
|
|
||||||
Ir::Null => {}
|
|
||||||
Ir::Str(x) => x.hash(state),
|
|
||||||
Ir::AttrSet { stcs, dyns } => {
|
|
||||||
stcs.len().hash(state);
|
|
||||||
let mut combined: u64 = 0;
|
|
||||||
for (&key, &(val, _)) in stcs.iter() {
|
|
||||||
let mut h = std::hash::DefaultHasher::new();
|
|
||||||
key.hash(&mut h);
|
|
||||||
ir_content_hash(val, token, &mut h);
|
|
||||||
combined = combined.wrapping_add(h.finish());
|
|
||||||
}
|
|
||||||
combined.hash(state);
|
|
||||||
dyns.len().hash(state);
|
|
||||||
for &(k, v, _) in dyns.iter() {
|
|
||||||
ir_content_hash(k, token, state);
|
|
||||||
ir_content_hash(v, token, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ir::List { items } => {
|
|
||||||
items.len().hash(state);
|
|
||||||
for &item in items.iter() {
|
|
||||||
ir_content_hash(item, token, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ir::HasAttr { lhs, rhs } => {
|
|
||||||
ir_content_hash(*lhs, token, state);
|
|
||||||
rhs.len().hash(state);
|
|
||||||
for attr in rhs.iter() {
|
|
||||||
attr_content_hash(attr, token, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&Ir::BinOp { lhs, rhs, kind } => {
|
|
||||||
ir_content_hash(lhs, token, state);
|
|
||||||
ir_content_hash(rhs, token, state);
|
|
||||||
kind.hash(state);
|
|
||||||
}
|
|
||||||
&Ir::UnOp { rhs, kind } => {
|
|
||||||
ir_content_hash(rhs, token, state);
|
|
||||||
kind.hash(state);
|
|
||||||
}
|
|
||||||
Ir::Select {
|
|
||||||
expr,
|
|
||||||
attrpath,
|
|
||||||
default,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
ir_content_hash(*expr, token, state);
|
|
||||||
attrpath.len().hash(state);
|
|
||||||
for attr in attrpath.iter() {
|
|
||||||
attr_content_hash(attr, token, state);
|
|
||||||
}
|
|
||||||
default.is_some().hash(state);
|
|
||||||
if let Some(d) = default {
|
|
||||||
ir_content_hash(*d, token, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&Ir::If { cond, consq, alter } => {
|
|
||||||
ir_content_hash(cond, token, state);
|
|
||||||
ir_content_hash(consq, token, state);
|
|
||||||
ir_content_hash(alter, token, state);
|
|
||||||
}
|
|
||||||
&Ir::Call { func, arg, .. } => {
|
|
||||||
ir_content_hash(func, token, state);
|
|
||||||
ir_content_hash(arg, token, state);
|
|
||||||
}
|
|
||||||
Ir::Assert {
|
|
||||||
assertion,
|
|
||||||
expr,
|
|
||||||
assertion_raw,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
ir_content_hash(*assertion, token, state);
|
|
||||||
ir_content_hash(*expr, token, state);
|
|
||||||
assertion_raw.hash(state);
|
|
||||||
}
|
|
||||||
Ir::ConcatStrings {
|
|
||||||
force_string,
|
|
||||||
parts,
|
|
||||||
} => {
|
|
||||||
force_string.hash(state);
|
|
||||||
parts.len().hash(state);
|
|
||||||
for &part in parts.iter() {
|
|
||||||
ir_content_hash(part, token, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&Ir::Path(expr) => ir_content_hash(expr, token, state),
|
|
||||||
Ir::Func {
|
|
||||||
body,
|
|
||||||
arg,
|
|
||||||
param,
|
|
||||||
thunks,
|
|
||||||
} => {
|
|
||||||
ir_content_hash(*body, token, state);
|
|
||||||
arg.hash(state);
|
|
||||||
param.is_some().hash(state);
|
|
||||||
if let Some(p) = param {
|
|
||||||
param_content_hash(p, state);
|
|
||||||
}
|
|
||||||
thunks_content_hash(thunks, token, state);
|
|
||||||
}
|
|
||||||
Ir::TopLevel { body, thunks } => {
|
|
||||||
ir_content_hash(*body, token, state);
|
|
||||||
thunks_content_hash(thunks, token, state);
|
|
||||||
}
|
|
||||||
Ir::Arg(x) => x.hash(state),
|
|
||||||
Ir::Thunk(x) => x.hash(state),
|
|
||||||
Ir::Builtins => {}
|
|
||||||
Ir::Builtin(x) => x.hash(state),
|
|
||||||
Ir::CurPos(x) => x.hash(state),
|
|
||||||
Ir::ReplBinding(x) => x.hash(state),
|
|
||||||
Ir::ScopedImportBinding(x) => x.hash(state),
|
|
||||||
&Ir::With {
|
|
||||||
namespace,
|
|
||||||
body,
|
|
||||||
ref thunks,
|
|
||||||
} => {
|
|
||||||
ir_content_hash(namespace, token, state);
|
|
||||||
ir_content_hash(body, token, state);
|
|
||||||
thunks_content_hash(thunks, token, state);
|
|
||||||
}
|
|
||||||
Ir::WithLookup(x) => x.hash(state),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn ir_content_eq<'id, 'ir>(
|
|
||||||
a: IrRef<'id, 'ir>,
|
|
||||||
b: IrRef<'id, 'ir>,
|
|
||||||
token: &GhostToken<'id>,
|
|
||||||
) -> bool {
|
|
||||||
std::ptr::eq(a.0, b.0)
|
|
||||||
|| match (a.borrow(token), b.borrow(token)) {
|
|
||||||
(Ir::Int(a), Ir::Int(b)) => a == b,
|
|
||||||
(Ir::Float(a), Ir::Float(b)) => a.to_bits() == b.to_bits(),
|
|
||||||
(Ir::Bool(a), Ir::Bool(b)) => a == b,
|
|
||||||
(Ir::Null, Ir::Null) => true,
|
|
||||||
(Ir::Str(a), Ir::Str(b)) => **a == **b,
|
|
||||||
(
|
|
||||||
Ir::AttrSet {
|
|
||||||
stcs: a_stcs,
|
|
||||||
dyns: a_dyns,
|
|
||||||
},
|
|
||||||
Ir::AttrSet {
|
|
||||||
stcs: b_stcs,
|
|
||||||
dyns: b_dyns,
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
a_stcs.len() == b_stcs.len()
|
|
||||||
&& a_dyns.len() == b_dyns.len()
|
|
||||||
&& a_stcs.iter().all(|(&k, &(av, _))| {
|
|
||||||
b_stcs
|
|
||||||
.get(&k)
|
|
||||||
.is_some_and(|&(bv, _)| ir_content_eq(av, bv, token))
|
|
||||||
})
|
|
||||||
&& a_dyns
|
|
||||||
.iter()
|
|
||||||
.zip(b_dyns.iter())
|
|
||||||
.all(|(&(ak, av, _), &(bk, bv, _))| {
|
|
||||||
ir_content_eq(ak, bk, token) && ir_content_eq(av, bv, token)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
(Ir::List { items: a }, Ir::List { items: b }) => {
|
|
||||||
a.len() == b.len()
|
|
||||||
&& a.iter()
|
|
||||||
.zip(b.iter())
|
|
||||||
.all(|(&a, &b)| ir_content_eq(a, b, token))
|
|
||||||
}
|
|
||||||
(Ir::HasAttr { lhs: al, rhs: ar }, Ir::HasAttr { lhs: bl, rhs: br }) => {
|
|
||||||
ir_content_eq(*al, *bl, token)
|
|
||||||
&& ar.len() == br.len()
|
|
||||||
&& ar
|
|
||||||
.iter()
|
|
||||||
.zip(br.iter())
|
|
||||||
.all(|(a, b)| attr_content_eq(a, b, token))
|
|
||||||
}
|
|
||||||
(
|
|
||||||
&Ir::BinOp {
|
|
||||||
lhs: al,
|
|
||||||
rhs: ar,
|
|
||||||
kind: ak,
|
|
||||||
},
|
|
||||||
&Ir::BinOp {
|
|
||||||
lhs: bl,
|
|
||||||
rhs: br,
|
|
||||||
kind: bk,
|
|
||||||
},
|
|
||||||
) => ak == bk && ir_content_eq(al, bl, token) && ir_content_eq(ar, br, token),
|
|
||||||
(&Ir::UnOp { rhs: ar, kind: ak }, &Ir::UnOp { rhs: br, kind: bk }) => {
|
|
||||||
ak == bk && ir_content_eq(ar, br, token)
|
|
||||||
}
|
|
||||||
(
|
|
||||||
Ir::Select {
|
|
||||||
expr: ae,
|
|
||||||
attrpath: aa,
|
|
||||||
default: ad,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
Ir::Select {
|
|
||||||
expr: be,
|
|
||||||
attrpath: ba,
|
|
||||||
default: bd,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
ir_content_eq(*ae, *be, token)
|
|
||||||
&& aa.len() == ba.len()
|
|
||||||
&& aa
|
|
||||||
.iter()
|
|
||||||
.zip(ba.iter())
|
|
||||||
.all(|(a, b)| attr_content_eq(a, b, token))
|
|
||||||
&& match (ad, bd) {
|
|
||||||
(Some(a), Some(b)) => ir_content_eq(*a, *b, token),
|
|
||||||
(None, None) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(
|
|
||||||
&Ir::If {
|
|
||||||
cond: ac,
|
|
||||||
consq: acs,
|
|
||||||
alter: aa,
|
|
||||||
},
|
|
||||||
&Ir::If {
|
|
||||||
cond: bc,
|
|
||||||
consq: bcs,
|
|
||||||
alter: ba,
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
ir_content_eq(ac, bc, token)
|
|
||||||
&& ir_content_eq(acs, bcs, token)
|
|
||||||
&& ir_content_eq(aa, ba, token)
|
|
||||||
}
|
|
||||||
(
|
|
||||||
&Ir::Call {
|
|
||||||
func: af, arg: aa, ..
|
|
||||||
},
|
|
||||||
&Ir::Call {
|
|
||||||
func: bf, arg: ba, ..
|
|
||||||
},
|
|
||||||
) => ir_content_eq(af, bf, token) && ir_content_eq(aa, ba, token),
|
|
||||||
(
|
|
||||||
Ir::Assert {
|
|
||||||
assertion: aa,
|
|
||||||
expr: ae,
|
|
||||||
assertion_raw: ar,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
Ir::Assert {
|
|
||||||
assertion: ba,
|
|
||||||
expr: be,
|
|
||||||
assertion_raw: br,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
) => ar == br && ir_content_eq(*aa, *ba, token) && ir_content_eq(*ae, *be, token),
|
|
||||||
(
|
|
||||||
Ir::ConcatStrings {
|
|
||||||
force_string: af,
|
|
||||||
parts: ap,
|
|
||||||
},
|
|
||||||
Ir::ConcatStrings {
|
|
||||||
force_string: bf,
|
|
||||||
parts: bp,
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
af == bf
|
|
||||||
&& ap.len() == bp.len()
|
|
||||||
&& ap
|
|
||||||
.iter()
|
|
||||||
.zip(bp.iter())
|
|
||||||
.all(|(&a, &b)| ir_content_eq(a, b, token))
|
|
||||||
}
|
|
||||||
(&Ir::Path(a), &Ir::Path(b)) => ir_content_eq(a, b, token),
|
|
||||||
(
|
|
||||||
Ir::Func {
|
|
||||||
body: ab,
|
|
||||||
arg: aa,
|
|
||||||
param: ap,
|
|
||||||
thunks: at,
|
|
||||||
},
|
|
||||||
Ir::Func {
|
|
||||||
body: bb,
|
|
||||||
arg: ba,
|
|
||||||
param: bp,
|
|
||||||
thunks: bt,
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
ir_content_eq(*ab, *bb, token)
|
|
||||||
&& aa == ba
|
|
||||||
&& match (ap, bp) {
|
|
||||||
(Some(a), Some(b)) => param_content_eq(a, b),
|
|
||||||
(None, None) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
&& thunks_content_eq(at, bt, token)
|
|
||||||
}
|
|
||||||
(
|
|
||||||
Ir::TopLevel {
|
|
||||||
body: ab,
|
|
||||||
thunks: at,
|
|
||||||
},
|
|
||||||
Ir::TopLevel {
|
|
||||||
body: bb,
|
|
||||||
thunks: bt,
|
|
||||||
},
|
|
||||||
) => ir_content_eq(*ab, *bb, token) && thunks_content_eq(at, bt, token),
|
|
||||||
(Ir::Arg(a), Ir::Arg(b)) => a == b,
|
|
||||||
(Ir::Thunk(a), Ir::Thunk(b)) => a == b,
|
|
||||||
(Ir::Builtins, Ir::Builtins) => true,
|
|
||||||
(Ir::Builtin(a), Ir::Builtin(b)) => a == b,
|
|
||||||
(Ir::CurPos(a), Ir::CurPos(b)) => a == b,
|
|
||||||
(Ir::ReplBinding(a), Ir::ReplBinding(b)) => a == b,
|
|
||||||
(Ir::ScopedImportBinding(a), Ir::ScopedImportBinding(b)) => a == b,
|
|
||||||
(
|
|
||||||
Ir::With {
|
|
||||||
namespace: a_ns,
|
|
||||||
body: a_body,
|
|
||||||
thunks: a_thunks,
|
|
||||||
},
|
|
||||||
Ir::With {
|
|
||||||
namespace: b_ns,
|
|
||||||
body: b_body,
|
|
||||||
thunks: b_thunks,
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
ir_content_eq(*a_ns, *b_ns, token)
|
|
||||||
&& ir_content_eq(*a_body, *b_body, token)
|
|
||||||
&& thunks_content_eq(a_thunks, b_thunks, token)
|
|
||||||
}
|
|
||||||
(Ir::WithLookup(a), Ir::WithLookup(b)) => a == b,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -5,10 +5,8 @@ pub mod error;
|
|||||||
pub mod logging;
|
pub mod logging;
|
||||||
pub mod value;
|
pub mod value;
|
||||||
|
|
||||||
mod bytecode;
|
|
||||||
mod codegen;
|
mod codegen;
|
||||||
mod derivation;
|
mod derivation;
|
||||||
mod disassembler;
|
|
||||||
mod downgrade;
|
mod downgrade;
|
||||||
mod fetcher;
|
mod fetcher;
|
||||||
mod ir;
|
mod ir;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
use std::io::IsTerminal;
|
use std::io::IsTerminal;
|
||||||
|
|
||||||
use tracing_subscriber::{EnvFilter, Layer, fmt, layer::SubscriberExt, util::SubscriberInitExt};
|
use tracing_subscriber::{EnvFilter, Layer, fmt, layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
pub fn init_logging() {
|
pub fn init_logging() {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use std::path::PathBuf;
|
|||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use clap::{Args, Parser, Subcommand};
|
use clap::{Parser, Subcommand, Args};
|
||||||
use hashbrown::HashSet;
|
use hashbrown::HashSet;
|
||||||
use nix_js::context::Context;
|
use nix_js::context::Context;
|
||||||
use nix_js::error::Source;
|
use nix_js::error::Source;
|
||||||
@@ -26,15 +26,9 @@ struct Cli {
|
|||||||
|
|
||||||
#[derive(Subcommand)]
|
#[derive(Subcommand)]
|
||||||
enum Command {
|
enum Command {
|
||||||
Compile {
|
|
||||||
#[clap(flatten)]
|
|
||||||
source: ExprSource,
|
|
||||||
#[arg(long)]
|
|
||||||
silent: bool,
|
|
||||||
},
|
|
||||||
Eval {
|
Eval {
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
source: ExprSource,
|
source: ExprSource
|
||||||
},
|
},
|
||||||
Repl,
|
Repl,
|
||||||
}
|
}
|
||||||
@@ -45,7 +39,7 @@ struct ExprSource {
|
|||||||
#[clap(short, long)]
|
#[clap(short, long)]
|
||||||
expr: Option<String>,
|
expr: Option<String>,
|
||||||
#[clap(short, long)]
|
#[clap(short, long)]
|
||||||
file: Option<PathBuf>,
|
file: Option<PathBuf>
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_context(#[cfg(feature = "inspector")] cli: &Cli) -> Result<Context> {
|
fn create_context(#[cfg(feature = "inspector")] cli: &Cli) -> Result<Context> {
|
||||||
@@ -69,30 +63,6 @@ fn create_context(#[cfg(feature = "inspector")] cli: &Cli) -> Result<Context> {
|
|||||||
Ok(Context::new()?)
|
Ok(Context::new()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_compile(context: &mut Context, src: ExprSource, silent: bool) -> Result<()> {
|
|
||||||
let src = if let Some(expr) = src.expr {
|
|
||||||
Source::new_eval(expr)?
|
|
||||||
} else if let Some(file) = src.file {
|
|
||||||
Source::new_file(file)?
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
match context.compile_bytecode(src) {
|
|
||||||
Ok(compiled) => {
|
|
||||||
if !silent {
|
|
||||||
println!("{}", context.disassemble_colored(&compiled));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("{:?}", miette::Report::new(*err));
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
#[cfg(feature = "inspector")]
|
|
||||||
context.wait_for_inspector_disconnect();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_eval(context: &mut Context, src: ExprSource) -> Result<()> {
|
fn run_eval(context: &mut Context, src: ExprSource) -> Result<()> {
|
||||||
let src = if let Some(expr) = src.expr {
|
let src = if let Some(expr) = src.expr {
|
||||||
Source::new_eval(expr)?
|
Source::new_eval(expr)?
|
||||||
@@ -101,9 +71,9 @@ fn run_eval(context: &mut Context, src: ExprSource) -> Result<()> {
|
|||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
match context.eval_deep(src) {
|
match context.eval_shallow(src) {
|
||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
println!("{}", value.display_compat());
|
println!("{value}");
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
eprintln!("{:?}", miette::Report::new(*err));
|
eprintln!("{:?}", miette::Report::new(*err));
|
||||||
@@ -180,8 +150,9 @@ fn main() -> Result<()> {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
match cli.command {
|
match cli.command {
|
||||||
Command::Compile { source, silent } => run_compile(&mut context, source, silent),
|
Command::Eval { source } => {
|
||||||
Command::Eval { source } => run_eval(&mut context, source),
|
run_eval(&mut context, source)
|
||||||
|
}
|
||||||
Command::Repl => run_repl(&mut context),
|
Command::Repl => run_repl(&mut context),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
use std::io::Read;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use nix_nar::Encoder;
|
use nix_nar::Encoder;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::io::Read;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
|
|
||||||
@@ -28,13 +27,11 @@ pub fn pack_nar(path: &Path) -> Result<Vec<u8>> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[allow(clippy::unwrap_used)]
|
#[allow(clippy::unwrap_used)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use super::*;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use super::*;
|
#[test]
|
||||||
|
|
||||||
#[test_log::test]
|
|
||||||
fn test_simple_file() {
|
fn test_simple_file() {
|
||||||
let temp = TempDir::new().unwrap();
|
let temp = TempDir::new().unwrap();
|
||||||
let file_path = temp.path().join("test.txt");
|
let file_path = temp.path().join("test.txt");
|
||||||
@@ -49,7 +46,7 @@ mod tests {
|
|||||||
assert_eq!(hash.len(), 64);
|
assert_eq!(hash.len(), 64);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn test_directory() {
|
fn test_directory() {
|
||||||
let temp = TempDir::new().unwrap();
|
let temp = TempDir::new().unwrap();
|
||||||
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
|
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ use std::path::Path;
|
|||||||
use deno_core::PollEventLoopOptions;
|
use deno_core::PollEventLoopOptions;
|
||||||
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
|
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
|
||||||
|
|
||||||
use crate::bytecode::{Bytecode, Constant};
|
|
||||||
use crate::error::{Error, Result, Source};
|
use crate::error::{Error, Result, Source};
|
||||||
use crate::store::DaemonStore;
|
use crate::store::DaemonStore;
|
||||||
use crate::value::{AttrSet, List, Symbol, Value};
|
use crate::value::{AttrSet, List, Symbol, Value};
|
||||||
@@ -25,12 +24,8 @@ pub(crate) trait RuntimeContext: 'static {
|
|||||||
fn add_source(&mut self, path: Source);
|
fn add_source(&mut self, path: Source);
|
||||||
fn compile(&mut self, source: Source) -> Result<String>;
|
fn compile(&mut self, source: Source) -> Result<String>;
|
||||||
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String>;
|
fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String>;
|
||||||
fn compile_bytecode(&mut self, source: Source) -> Result<Bytecode>;
|
|
||||||
fn compile_bytecode_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<Bytecode>;
|
|
||||||
fn get_source(&self, id: usize) -> Source;
|
fn get_source(&self, id: usize) -> Source;
|
||||||
fn get_store(&self) -> &DaemonStore;
|
fn get_store(&self) -> &DaemonStore;
|
||||||
fn get_span(&self, id: usize) -> (usize, rnix::TextRange);
|
|
||||||
fn get_unsynced(&mut self) -> (&[String], &[Constant], usize, usize);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait OpStateExt<Ctx: RuntimeContext> {
|
pub(crate) trait OpStateExt<Ctx: RuntimeContext> {
|
||||||
@@ -95,7 +90,7 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
|
|||||||
mod private {
|
mod private {
|
||||||
use deno_error::js_error_wrapper;
|
use deno_error::js_error_wrapper;
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SimpleErrorWrapper(String);
|
pub struct SimpleErrorWrapper(pub(crate) String);
|
||||||
impl std::fmt::Display for SimpleErrorWrapper {
|
impl std::fmt::Display for SimpleErrorWrapper {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Display::fmt(&self.0, f)
|
std::fmt::Display::fmt(&self.0, f)
|
||||||
@@ -120,12 +115,14 @@ pub(crate) use private::NixRuntimeError;
|
|||||||
|
|
||||||
pub(crate) struct Runtime<Ctx: RuntimeContext> {
|
pub(crate) struct Runtime<Ctx: RuntimeContext> {
|
||||||
js_runtime: JsRuntime,
|
js_runtime: JsRuntime,
|
||||||
#[cfg(feature = "inspector")]
|
|
||||||
rt: tokio::runtime::Runtime,
|
rt: tokio::runtime::Runtime,
|
||||||
#[cfg(feature = "inspector")]
|
#[cfg(feature = "inspector")]
|
||||||
wait_for_inspector: bool,
|
wait_for_inspector: bool,
|
||||||
symbols: GlobalSymbols,
|
is_thunk_symbol: v8::Global<v8::Symbol>,
|
||||||
cached_fns: CachedFunctions,
|
primop_metadata_symbol: v8::Global<v8::Symbol>,
|
||||||
|
has_context_symbol: v8::Global<v8::Symbol>,
|
||||||
|
is_path_symbol: v8::Global<v8::Symbol>,
|
||||||
|
is_cycle_symbol: v8::Global<v8::Symbol>,
|
||||||
_marker: PhantomData<Ctx>,
|
_marker: PhantomData<Ctx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,12 +142,7 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
|
|||||||
static INIT: Once = Once::new();
|
static INIT: Once = Once::new();
|
||||||
INIT.call_once(|| {
|
INIT.call_once(|| {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
deno_core::v8_set_flags(vec![
|
deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]),
|
||||||
"".into(),
|
|
||||||
format!("--stack-size={}", 8 * 1024),
|
|
||||||
#[cfg(feature = "prof")]
|
|
||||||
("--prof".into())
|
|
||||||
]),
|
|
||||||
[""]
|
[""]
|
||||||
);
|
);
|
||||||
JsRuntime::init_platform(Some(v8::new_default_platform(0, false).make_shared()));
|
JsRuntime::init_platform(Some(v8::new_default_platform(0, false).make_shared()));
|
||||||
@@ -167,24 +159,30 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
|
|||||||
js_runtime.op_state().borrow_mut().put(RegexCache::new());
|
js_runtime.op_state().borrow_mut().put(RegexCache::new());
|
||||||
js_runtime.op_state().borrow_mut().put(DrvHashCache::new());
|
js_runtime.op_state().borrow_mut().put(DrvHashCache::new());
|
||||||
|
|
||||||
let (symbols, cached_fns) = {
|
let (
|
||||||
|
is_thunk_symbol,
|
||||||
|
primop_metadata_symbol,
|
||||||
|
has_context_symbol,
|
||||||
|
is_path_symbol,
|
||||||
|
is_cycle_symbol,
|
||||||
|
) = {
|
||||||
deno_core::scope!(scope, &mut js_runtime);
|
deno_core::scope!(scope, &mut js_runtime);
|
||||||
let symbols = Self::get_symbols(scope)?;
|
Self::get_symbols(scope)?
|
||||||
let cached_fns = Self::get_cached_functions(scope)?;
|
|
||||||
(symbols, cached_fns)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
js_runtime,
|
js_runtime,
|
||||||
#[cfg(feature = "inspector")]
|
|
||||||
rt: tokio::runtime::Builder::new_current_thread()
|
rt: tokio::runtime::Builder::new_current_thread()
|
||||||
.enable_all()
|
.enable_all()
|
||||||
.build()
|
.build()
|
||||||
.expect("failed to build tokio runtime"),
|
.expect("failed to build tokio runtime"),
|
||||||
#[cfg(feature = "inspector")]
|
#[cfg(feature = "inspector")]
|
||||||
wait_for_inspector: inspector_options.wait,
|
wait_for_inspector: inspector_options.wait,
|
||||||
symbols,
|
is_thunk_symbol,
|
||||||
cached_fns,
|
primop_metadata_symbol,
|
||||||
|
has_context_symbol,
|
||||||
|
is_path_symbol,
|
||||||
|
is_cycle_symbol,
|
||||||
_marker: PhantomData,
|
_marker: PhantomData,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -226,97 +224,54 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
|
|||||||
|
|
||||||
crate::error::parse_js_error(error, ctx)
|
crate::error::parse_js_error(error, ctx)
|
||||||
})?;
|
})?;
|
||||||
|
let global_value = self
|
||||||
|
.rt
|
||||||
|
.block_on(self.js_runtime.resolve(global_value))
|
||||||
|
.map_err(|error| {
|
||||||
|
let op_state = self.js_runtime.op_state();
|
||||||
|
let op_state_borrow = op_state.borrow();
|
||||||
|
let ctx: &Ctx = op_state_borrow.get_ctx();
|
||||||
|
|
||||||
|
crate::error::parse_js_error(error, ctx)
|
||||||
|
})?;
|
||||||
|
#[cfg(feature = "inspector")]
|
||||||
|
{
|
||||||
|
let _ = self
|
||||||
|
.rt
|
||||||
|
.block_on(self.js_runtime.run_event_loop(Default::default()));
|
||||||
|
}
|
||||||
|
|
||||||
// Retrieve scope from JsRuntime
|
// Retrieve scope from JsRuntime
|
||||||
deno_core::scope!(scope, self.js_runtime);
|
deno_core::scope!(scope, self.js_runtime);
|
||||||
let local_value = v8::Local::new(scope, &global_value);
|
let local_value = v8::Local::new(scope, &global_value);
|
||||||
let symbols = &self.symbols.local(scope);
|
let is_thunk_symbol = v8::Local::new(scope, &self.is_thunk_symbol);
|
||||||
|
let primop_metadata_symbol = v8::Local::new(scope, &self.primop_metadata_symbol);
|
||||||
|
let has_context_symbol = v8::Local::new(scope, &self.has_context_symbol);
|
||||||
|
let is_path_symbol = v8::Local::new(scope, &self.is_path_symbol);
|
||||||
|
let is_cycle_symbol = v8::Local::new(scope, &self.is_cycle_symbol);
|
||||||
|
|
||||||
Ok(to_value(local_value, scope, symbols))
|
Ok(to_value(
|
||||||
}
|
local_value,
|
||||||
|
scope,
|
||||||
pub(crate) fn eval_bytecode(
|
is_thunk_symbol,
|
||||||
&mut self,
|
primop_metadata_symbol,
|
||||||
result: Bytecode,
|
has_context_symbol,
|
||||||
ctx: &mut Ctx,
|
is_path_symbol,
|
||||||
force_mode: ForceMode,
|
is_cycle_symbol,
|
||||||
) -> Result<Value> {
|
|
||||||
let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) };
|
|
||||||
{
|
|
||||||
deno_core::scope!(scope, self.js_runtime);
|
|
||||||
sync_global_tables(scope, &self.cached_fns, ctx);
|
|
||||||
}
|
|
||||||
let op_state = self.js_runtime.op_state();
|
|
||||||
op_state.borrow_mut().put(ctx);
|
|
||||||
|
|
||||||
#[cfg(feature = "inspector")]
|
|
||||||
if self.wait_for_inspector {
|
|
||||||
self.js_runtime
|
|
||||||
.inspector()
|
|
||||||
.wait_for_session_and_break_on_next_statement();
|
|
||||||
} else {
|
|
||||||
self.js_runtime.inspector().wait_for_session();
|
|
||||||
}
|
|
||||||
|
|
||||||
deno_core::scope!(scope, self.js_runtime);
|
|
||||||
|
|
||||||
let store = v8::ArrayBuffer::new_backing_store_from_boxed_slice(result.code);
|
|
||||||
let ab = v8::ArrayBuffer::with_backing_store(scope, &store.make_shared());
|
|
||||||
let u8a = v8::Uint8Array::new(scope, ab, 0, ab.byte_length())
|
|
||||||
.ok_or_else(|| Error::internal("failed to create Uint8Array".into()))?;
|
|
||||||
|
|
||||||
let dir = v8::String::new(scope, &result.current_dir)
|
|
||||||
.ok_or_else(|| Error::internal("failed to create dir string".into()))?;
|
|
||||||
|
|
||||||
let undef = v8::undefined(scope);
|
|
||||||
let tc = std::pin::pin!(v8::TryCatch::new(scope));
|
|
||||||
let scope = &mut tc.init();
|
|
||||||
|
|
||||||
let exec_bytecode = v8::Local::new(scope, &self.cached_fns.exec_bytecode);
|
|
||||||
let raw_result = exec_bytecode
|
|
||||||
.call(scope, undef.into(), &[u8a.into(), dir.into()])
|
|
||||||
.ok_or_else(|| {
|
|
||||||
scope
|
|
||||||
.exception()
|
|
||||||
.map(|e| {
|
|
||||||
let op_state_borrow = op_state.borrow();
|
|
||||||
let ctx: &Ctx = op_state_borrow.get_ctx();
|
|
||||||
Box::new(crate::error::parse_js_error(
|
|
||||||
deno_core::error::JsError::from_v8_exception(scope, e),
|
|
||||||
ctx,
|
|
||||||
))
|
))
|
||||||
})
|
|
||||||
.unwrap_or_else(|| Error::internal("bytecode execution failed".into()))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let force_fn = match force_mode {
|
|
||||||
ForceMode::Force => &self.cached_fns.force_fn,
|
|
||||||
ForceMode::ForceShallow => &self.cached_fns.force_shallow_fn,
|
|
||||||
ForceMode::ForceDeep => &self.cached_fns.force_deep_fn,
|
|
||||||
};
|
|
||||||
let force_fn = v8::Local::new(scope, force_fn);
|
|
||||||
|
|
||||||
let forced = force_fn
|
|
||||||
.call(scope, undef.into(), &[raw_result])
|
|
||||||
.ok_or_else(|| {
|
|
||||||
scope
|
|
||||||
.exception()
|
|
||||||
.map(|e| {
|
|
||||||
let op_state_borrow = op_state.borrow();
|
|
||||||
let ctx: &Ctx = op_state_borrow.get_ctx();
|
|
||||||
Box::new(crate::error::parse_js_error(
|
|
||||||
deno_core::error::JsError::from_v8_exception(scope, e),
|
|
||||||
ctx,
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| Error::internal("force failed".into()))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let symbols = &self.symbols.local(scope);
|
|
||||||
Ok(to_value(forced, scope, symbols))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_symbols(scope: &ScopeRef) -> Result<GlobalSymbols> {
|
/// get (IS_THUNK, PRIMOP_METADATA, HAS_CONTEXT, IS_PATH, IS_CYCLE)
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
fn get_symbols(
|
||||||
|
scope: &ScopeRef,
|
||||||
|
) -> Result<(
|
||||||
|
v8::Global<v8::Symbol>,
|
||||||
|
v8::Global<v8::Symbol>,
|
||||||
|
v8::Global<v8::Symbol>,
|
||||||
|
v8::Global<v8::Symbol>,
|
||||||
|
v8::Global<v8::Symbol>,
|
||||||
|
)> {
|
||||||
let global = scope.get_current_context().global(scope);
|
let global = scope.get_current_context().global(scope);
|
||||||
let nix_key = v8::String::new(scope, "Nix")
|
let nix_key = v8::String::new(scope, "Nix")
|
||||||
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
|
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
|
||||||
@@ -348,148 +303,18 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
|
|||||||
let is_path = get_symbol("IS_PATH")?;
|
let is_path = get_symbol("IS_PATH")?;
|
||||||
let is_cycle = get_symbol("IS_CYCLE")?;
|
let is_cycle = get_symbol("IS_CYCLE")?;
|
||||||
|
|
||||||
Ok(GlobalSymbols {
|
Ok((is_thunk, primop_metadata, has_context, is_path, is_cycle))
|
||||||
is_thunk,
|
|
||||||
primop_metadata,
|
|
||||||
has_context,
|
|
||||||
is_path,
|
|
||||||
is_cycle,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_cached_functions(scope: &ScopeRef) -> Result<CachedFunctions> {
|
|
||||||
let global = scope.get_current_context().global(scope);
|
|
||||||
let nix_key = v8::String::new(scope, "Nix")
|
|
||||||
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
|
|
||||||
let nix_obj = global
|
|
||||||
.get(scope, nix_key.into())
|
|
||||||
.ok_or_else(|| Error::internal("failed to get global Nix object".into()))?
|
|
||||||
.to_object(scope)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
Error::internal("failed to convert global Nix Value to object".into())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let get_fn = |name: &str| -> Result<v8::Global<v8::Function>> {
|
|
||||||
let key = v8::String::new(scope, name)
|
|
||||||
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
|
|
||||||
let val = nix_obj
|
|
||||||
.get(scope, key.into())
|
|
||||||
.ok_or_else(|| Error::internal(format!("failed to get Nix.{name}")))?;
|
|
||||||
let func = val
|
|
||||||
.try_cast::<v8::Function>()
|
|
||||||
.map_err(|err| Error::internal(format!("Nix.{name} is not a function ({err})")))?;
|
|
||||||
Ok(v8::Global::new(scope, func))
|
|
||||||
};
|
|
||||||
|
|
||||||
let exec_bytecode = get_fn("execBytecode")?;
|
|
||||||
let force_fn = get_fn("force")?;
|
|
||||||
let force_shallow_fn = get_fn("forceShallow")?;
|
|
||||||
let force_deep_fn = get_fn("forceDeep")?;
|
|
||||||
|
|
||||||
let strings_key = v8::String::new(scope, "strings")
|
|
||||||
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
|
|
||||||
let strings_array = nix_obj
|
|
||||||
.get(scope, strings_key.into())
|
|
||||||
.ok_or_else(|| Error::internal("failed to get Nix.strings".into()))?
|
|
||||||
.try_cast::<v8::Array>()
|
|
||||||
.map_err(|err| Error::internal(format!("Nix.strings is not an array ({err})")))?;
|
|
||||||
|
|
||||||
let constants_key = v8::String::new(scope, "constants")
|
|
||||||
.ok_or_else(|| Error::internal("failed to create V8 String".into()))?;
|
|
||||||
let constants_array = nix_obj
|
|
||||||
.get(scope, constants_key.into())
|
|
||||||
.ok_or_else(|| Error::internal("failed to get Nix.constants".into()))?
|
|
||||||
.try_cast::<v8::Array>()
|
|
||||||
.map_err(|err| Error::internal(format!("Nix.constants is not an array ({err})")))?;
|
|
||||||
|
|
||||||
Ok(CachedFunctions {
|
|
||||||
exec_bytecode,
|
|
||||||
force_fn,
|
|
||||||
force_shallow_fn,
|
|
||||||
force_deep_fn,
|
|
||||||
strings_array: v8::Global::new(scope, strings_array),
|
|
||||||
constants_array: v8::Global::new(scope, constants_array),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct GlobalSymbols {
|
|
||||||
is_thunk: v8::Global<v8::Symbol>,
|
|
||||||
primop_metadata: v8::Global<v8::Symbol>,
|
|
||||||
has_context: v8::Global<v8::Symbol>,
|
|
||||||
is_path: v8::Global<v8::Symbol>,
|
|
||||||
is_cycle: v8::Global<v8::Symbol>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GlobalSymbols {
|
|
||||||
fn local<'a>(&self, scope: &ScopeRef<'a, '_>) -> LocalSymbols<'a> {
|
|
||||||
LocalSymbols {
|
|
||||||
is_thunk: v8::Local::new(scope, &self.is_thunk),
|
|
||||||
primop_metadata: v8::Local::new(scope, &self.primop_metadata),
|
|
||||||
has_context: v8::Local::new(scope, &self.has_context),
|
|
||||||
is_path: v8::Local::new(scope, &self.is_path),
|
|
||||||
is_cycle: v8::Local::new(scope, &self.is_cycle),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct LocalSymbols<'a> {
|
|
||||||
is_thunk: v8::Local<'a, v8::Symbol>,
|
|
||||||
primop_metadata: v8::Local<'a, v8::Symbol>,
|
|
||||||
has_context: v8::Local<'a, v8::Symbol>,
|
|
||||||
is_path: v8::Local<'a, v8::Symbol>,
|
|
||||||
is_cycle: v8::Local<'a, v8::Symbol>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct CachedFunctions {
|
|
||||||
exec_bytecode: v8::Global<v8::Function>,
|
|
||||||
force_fn: v8::Global<v8::Function>,
|
|
||||||
force_shallow_fn: v8::Global<v8::Function>,
|
|
||||||
force_deep_fn: v8::Global<v8::Function>,
|
|
||||||
strings_array: v8::Global<v8::Array>,
|
|
||||||
constants_array: v8::Global<v8::Array>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) enum ForceMode {
|
|
||||||
Force,
|
|
||||||
ForceShallow,
|
|
||||||
ForceDeep,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sync_global_tables<Ctx: RuntimeContext>(
|
|
||||||
scope: &ScopeRef,
|
|
||||||
cached: &CachedFunctions,
|
|
||||||
ctx: &mut Ctx,
|
|
||||||
) {
|
|
||||||
let (new_strings, new_constants, strings_base, constants_base) = ctx.get_unsynced();
|
|
||||||
|
|
||||||
if !new_strings.is_empty() {
|
|
||||||
let s_array = v8::Local::new(scope, &cached.strings_array);
|
|
||||||
for (i, s) in new_strings.iter().enumerate() {
|
|
||||||
let idx = (strings_base + i) as u32;
|
|
||||||
#[allow(clippy::unwrap_used)]
|
|
||||||
let val = v8::String::new(scope, s).unwrap();
|
|
||||||
s_array.set_index(scope, idx, val.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !new_constants.is_empty() {
|
|
||||||
let k_array = v8::Local::new(scope, &cached.constants_array);
|
|
||||||
for (i, c) in new_constants.iter().enumerate() {
|
|
||||||
let idx = (constants_base + i) as u32;
|
|
||||||
let val: v8::Local<v8::Value> = match c {
|
|
||||||
Constant::Int(n) => v8::BigInt::new_from_i64(scope, *n).into(),
|
|
||||||
Constant::Float(bits) => v8::Number::new(scope, f64::from_bits(*bits)).into(),
|
|
||||||
};
|
|
||||||
k_array.set_index(scope, idx, val);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_value<'a>(
|
fn to_value<'a>(
|
||||||
val: LocalValue<'a>,
|
val: LocalValue<'a>,
|
||||||
scope: &ScopeRef<'a, '_>,
|
scope: &ScopeRef<'a, '_>,
|
||||||
symbols: &LocalSymbols<'a>,
|
is_thunk_symbol: LocalSymbol<'a>,
|
||||||
|
primop_metadata_symbol: LocalSymbol<'a>,
|
||||||
|
has_context_symbol: LocalSymbol<'a>,
|
||||||
|
is_path_symbol: LocalSymbol<'a>,
|
||||||
|
is_cycle_symbol: LocalSymbol<'a>,
|
||||||
) -> Value {
|
) -> Value {
|
||||||
match () {
|
match () {
|
||||||
_ if val.is_big_int() => {
|
_ if val.is_big_int() => {
|
||||||
@@ -519,13 +344,21 @@ fn to_value<'a>(
|
|||||||
let list = (0..len)
|
let list = (0..len)
|
||||||
.map(|i| {
|
.map(|i| {
|
||||||
let val = val.get_index(scope, i).expect("infallible index operation");
|
let val = val.get_index(scope, i).expect("infallible index operation");
|
||||||
to_value(val, scope, symbols)
|
to_value(
|
||||||
|
val,
|
||||||
|
scope,
|
||||||
|
is_thunk_symbol,
|
||||||
|
primop_metadata_symbol,
|
||||||
|
has_context_symbol,
|
||||||
|
is_path_symbol,
|
||||||
|
is_cycle_symbol,
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Value::List(List::new(list))
|
Value::List(List::new(list))
|
||||||
}
|
}
|
||||||
_ if val.is_function() => {
|
_ if val.is_function() => {
|
||||||
if let Some(primop) = to_primop(val, scope, symbols.primop_metadata) {
|
if let Some(primop) = to_primop(val, scope, primop_metadata_symbol) {
|
||||||
primop
|
primop
|
||||||
} else {
|
} else {
|
||||||
Value::Func
|
Value::Func
|
||||||
@@ -537,33 +370,36 @@ fn to_value<'a>(
|
|||||||
let array = val.as_array(scope);
|
let array = val.as_array(scope);
|
||||||
let attrs = (0..size)
|
let attrs = (0..size)
|
||||||
.map(|i| {
|
.map(|i| {
|
||||||
let key = array
|
let key = array.get_index(scope, i * 2).expect("infallible index operation");
|
||||||
.get_index(scope, i * 2)
|
|
||||||
.expect("infallible index operation");
|
|
||||||
let key = key.to_rust_string_lossy(scope);
|
let key = key.to_rust_string_lossy(scope);
|
||||||
let val = array
|
let val = array.get_index(scope, i * 2 + 1).expect("infallible index operation");
|
||||||
.get_index(scope, i * 2 + 1)
|
let val = to_value(
|
||||||
.expect("infallible index operation");
|
val,
|
||||||
let val = to_value(val, scope, symbols);
|
scope,
|
||||||
|
is_thunk_symbol,
|
||||||
|
primop_metadata_symbol,
|
||||||
|
has_context_symbol,
|
||||||
|
is_path_symbol,
|
||||||
|
is_cycle_symbol,
|
||||||
|
);
|
||||||
(Symbol::new(Cow::Owned(key)), val)
|
(Symbol::new(Cow::Owned(key)), val)
|
||||||
})
|
}).collect();
|
||||||
.collect();
|
|
||||||
Value::AttrSet(AttrSet::new(attrs))
|
Value::AttrSet(AttrSet::new(attrs))
|
||||||
}
|
}
|
||||||
_ if val.is_object() => {
|
_ if val.is_object() => {
|
||||||
if is_thunk(val, scope, symbols.is_thunk) {
|
if is_thunk(val, scope, is_thunk_symbol) {
|
||||||
return Value::Thunk;
|
return Value::Thunk;
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_cycle(val, scope, symbols.is_cycle) {
|
if is_cycle(val, scope, is_cycle_symbol) {
|
||||||
return Value::Repeated;
|
return Value::Repeated;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(path_val) = extract_path(val, scope, symbols.is_path) {
|
if let Some(path_val) = extract_path(val, scope, is_path_symbol) {
|
||||||
return Value::Path(path_val);
|
return Value::Path(path_val);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(string_val) = extract_string_with_context(val, scope, symbols.has_context) {
|
if let Some(string_val) = extract_string_with_context(val, scope, has_context_symbol) {
|
||||||
return Value::String(string_val);
|
return Value::String(string_val);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -579,7 +415,18 @@ fn to_value<'a>(
|
|||||||
.expect("infallible index operation");
|
.expect("infallible index operation");
|
||||||
let val = val.get(scope, key).expect("infallible operation");
|
let val = val.get(scope, key).expect("infallible operation");
|
||||||
let key = key.to_rust_string_lossy(scope);
|
let key = key.to_rust_string_lossy(scope);
|
||||||
(Symbol::from(key), to_value(val, scope, symbols))
|
(
|
||||||
|
Symbol::from(key),
|
||||||
|
to_value(
|
||||||
|
val,
|
||||||
|
scope,
|
||||||
|
is_thunk_symbol,
|
||||||
|
primop_metadata_symbol,
|
||||||
|
has_context_symbol,
|
||||||
|
is_path_symbol,
|
||||||
|
is_cycle_symbol,
|
||||||
|
),
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Value::AttrSet(AttrSet::new(attrs))
|
Value::AttrSet(AttrSet::new(attrs))
|
||||||
|
|||||||
@@ -2,14 +2,6 @@
|
|||||||
|
|
||||||
// Alias for the future `!` type.
|
// Alias for the future `!` type.
|
||||||
use core::convert::Infallible as Never;
|
use core::convert::Infallible as Never;
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::pin::pin;
|
|
||||||
use std::process;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::task::Poll;
|
|
||||||
use std::thread;
|
|
||||||
|
|
||||||
use deno_core::InspectorMsg;
|
use deno_core::InspectorMsg;
|
||||||
use deno_core::InspectorSessionChannels;
|
use deno_core::InspectorSessionChannels;
|
||||||
use deno_core::InspectorSessionKind;
|
use deno_core::InspectorSessionKind;
|
||||||
@@ -29,9 +21,16 @@ use deno_core::url::Url;
|
|||||||
use fastwebsockets::Frame;
|
use fastwebsockets::Frame;
|
||||||
use fastwebsockets::OpCode;
|
use fastwebsockets::OpCode;
|
||||||
use fastwebsockets::WebSocket;
|
use fastwebsockets::WebSocket;
|
||||||
use hashbrown::HashMap;
|
|
||||||
use hyper::body::Bytes;
|
use hyper::body::Bytes;
|
||||||
use hyper_util::rt::TokioIo;
|
use hyper_util::rt::TokioIo;
|
||||||
|
use std::cell::RefCell;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
use std::pin::pin;
|
||||||
|
use std::process;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::task::Poll;
|
||||||
|
use std::thread;
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
use tokio::sync::broadcast;
|
use tokio::sync::broadcast;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|||||||
@@ -1,16 +1,13 @@
|
|||||||
use std::collections::BTreeMap;
|
|
||||||
use std::convert::Infallible;
|
|
||||||
use std::path::{Component, Path, PathBuf};
|
use std::path::{Component, Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use deno_core::error::JsError;
|
use hashbrown::hash_map::{Entry, HashMap};
|
||||||
use deno_core::{FromV8, OpState, ToV8, v8};
|
|
||||||
use hashbrown::{HashMap, HashSet, hash_map::Entry};
|
use deno_core::{FromV8, OpState, v8};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rust_embed::Embed;
|
use rust_embed::Embed;
|
||||||
|
|
||||||
use super::{NixRuntimeError, OpStateExt, RuntimeContext};
|
use super::{NixRuntimeError, OpStateExt, RuntimeContext};
|
||||||
use crate::bytecode::{Bytecode, Constant};
|
|
||||||
use crate::error::Source;
|
use crate::error::Source;
|
||||||
use crate::store::Store as _;
|
use crate::store::Store as _;
|
||||||
|
|
||||||
@@ -39,116 +36,16 @@ impl RegexCache {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct Map<K, V>(HashMap<K, V>);
|
|
||||||
impl<'a, K, V> ToV8<'a> for Map<K, V>
|
|
||||||
where
|
|
||||||
K: ToV8<'a>,
|
|
||||||
K::Error: ToString,
|
|
||||||
V: ToV8<'a>,
|
|
||||||
V::Error: ToString,
|
|
||||||
{
|
|
||||||
type Error = NixRuntimeError;
|
|
||||||
fn to_v8<'i>(self, scope: &mut v8::PinScope<'a, 'i>) -> Result<v8::Local<'a, v8::Value>> {
|
|
||||||
let map = v8::Map::new(scope);
|
|
||||||
for (k, v) in self.0 {
|
|
||||||
let k = k.to_v8(scope).map_err(|err| err.to_string())?;
|
|
||||||
let v = v.to_v8(scope).map_err(|err| err.to_string())?;
|
|
||||||
map.set(scope, k, v).ok_or("Failed to set V8 Map KV")?;
|
|
||||||
}
|
|
||||||
Ok(map.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Embed)]
|
#[derive(Embed)]
|
||||||
#[folder = "src/runtime/corepkgs"]
|
#[folder = "src/runtime/corepkgs"]
|
||||||
struct CorePkgs;
|
pub(crate) struct CorePkgs;
|
||||||
|
|
||||||
fn new_simple_jserror(msg: String) -> Box<JsError> {
|
#[deno_core::op2]
|
||||||
JsError {
|
#[string]
|
||||||
message: Some(msg.clone()),
|
|
||||||
|
|
||||||
name: None,
|
|
||||||
stack: None,
|
|
||||||
cause: None,
|
|
||||||
exception_message: msg,
|
|
||||||
frames: Vec::new(),
|
|
||||||
source_line: None,
|
|
||||||
source_line_frame_index: None,
|
|
||||||
aggregated: None,
|
|
||||||
additional_properties: Vec::new(),
|
|
||||||
}
|
|
||||||
.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BytecodeRet {
|
|
||||||
bytecode: Bytecode,
|
|
||||||
new_strings: *const [String],
|
|
||||||
new_constants: *const [Constant],
|
|
||||||
strings_base: usize,
|
|
||||||
constants_base: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ToV8<'a> for BytecodeRet {
|
|
||||||
type Error = Box<JsError>;
|
|
||||||
#[allow(clippy::unwrap_used)]
|
|
||||||
fn to_v8<'i>(
|
|
||||||
self,
|
|
||||||
scope: &mut v8::PinScope<'a, 'i>,
|
|
||||||
) -> std::result::Result<v8::Local<'a, v8::Value>, Self::Error> {
|
|
||||||
let global = scope.get_current_context().global(scope);
|
|
||||||
let nix_key = v8::String::new(scope, "Nix")
|
|
||||||
.ok_or_else(|| new_simple_jserror("failed to create v8 string".into()))?;
|
|
||||||
let nix_obj = global
|
|
||||||
.get(scope, nix_key.into())
|
|
||||||
.ok_or_else(|| new_simple_jserror("failed to get Nix global".into()))?
|
|
||||||
.to_object(scope)
|
|
||||||
.ok_or_else(|| new_simple_jserror("Nix is not an object".into()))?;
|
|
||||||
|
|
||||||
let s_key = v8::String::new(scope, "strings").unwrap();
|
|
||||||
let s_array: v8::Local<v8::Array> = nix_obj
|
|
||||||
.get(scope, s_key.into())
|
|
||||||
.unwrap()
|
|
||||||
.try_into()
|
|
||||||
.unwrap();
|
|
||||||
for (i, s) in unsafe { &*self.new_strings }.iter().enumerate() {
|
|
||||||
let idx = (self.strings_base + i) as u32;
|
|
||||||
let val = v8::String::new(scope, s).unwrap();
|
|
||||||
s_array.set_index(scope, idx, val.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let k_key = v8::String::new(scope, "constants").unwrap();
|
|
||||||
let k_array: v8::Local<v8::Array> = nix_obj
|
|
||||||
.get(scope, k_key.into())
|
|
||||||
.unwrap()
|
|
||||||
.try_into()
|
|
||||||
.unwrap();
|
|
||||||
for (i, c) in unsafe { &*self.new_constants }.iter().enumerate() {
|
|
||||||
let idx = (self.constants_base + i) as u32;
|
|
||||||
let val: v8::Local<v8::Value> = match c {
|
|
||||||
Constant::Int(n) => v8::BigInt::new_from_i64(scope, *n).into(),
|
|
||||||
Constant::Float(bits) => v8::Number::new(scope, f64::from_bits(*bits)).into(),
|
|
||||||
};
|
|
||||||
k_array.set_index(scope, idx, val);
|
|
||||||
}
|
|
||||||
|
|
||||||
let store = v8::ArrayBuffer::new_backing_store_from_boxed_slice(self.bytecode.code);
|
|
||||||
let ab = v8::ArrayBuffer::with_backing_store(scope, &store.make_shared());
|
|
||||||
let u8a = v8::Uint8Array::new(scope, ab, 0, ab.byte_length())
|
|
||||||
.ok_or_else(|| new_simple_jserror("failed to create Uint8Array".into()))?;
|
|
||||||
|
|
||||||
let dir = v8::String::new(scope, &self.bytecode.current_dir)
|
|
||||||
.ok_or_else(|| new_simple_jserror("failed to create dir string".into()))?;
|
|
||||||
|
|
||||||
let arr = v8::Array::new_with_elements(scope, &[u8a.into(), dir.into()]);
|
|
||||||
Ok(arr.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
|
||||||
pub(super) fn op_import<Ctx: RuntimeContext>(
|
pub(super) fn op_import<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] path: String,
|
#[string] path: String,
|
||||||
) -> Result<BytecodeRet> {
|
) -> Result<String> {
|
||||||
let _span = tracing::info_span!("op_import", path = %path).entered();
|
let _span = tracing::info_span!("op_import", path = %path).entered();
|
||||||
let ctx: &mut Ctx = state.get_ctx_mut();
|
let ctx: &mut Ctx = state.get_ctx_mut();
|
||||||
|
|
||||||
@@ -164,17 +61,7 @@ pub(super) fn op_import<Ctx: RuntimeContext>(
|
|||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
ctx.add_source(source.clone());
|
ctx.add_source(source.clone());
|
||||||
let bytecode = ctx
|
return Ok(ctx.compile(source).map_err(|err| err.to_string())?);
|
||||||
.compile_bytecode(source)
|
|
||||||
.map_err(|err| err.to_string())?;
|
|
||||||
let (new_strings, new_constants, strings_base, constants_base) = ctx.get_unsynced();
|
|
||||||
return Ok(BytecodeRet {
|
|
||||||
bytecode,
|
|
||||||
new_strings,
|
|
||||||
new_constants,
|
|
||||||
strings_base,
|
|
||||||
constants_base,
|
|
||||||
});
|
|
||||||
} else {
|
} else {
|
||||||
return Err(format!("Corepkg not found: {}", corepkg_name).into());
|
return Err(format!("Corepkg not found: {}", corepkg_name).into());
|
||||||
}
|
}
|
||||||
@@ -198,25 +85,16 @@ pub(super) fn op_import<Ctx: RuntimeContext>(
|
|||||||
tracing::debug!("Compiling file");
|
tracing::debug!("Compiling file");
|
||||||
ctx.add_source(source.clone());
|
ctx.add_source(source.clone());
|
||||||
|
|
||||||
let bytecode = ctx
|
Ok(ctx.compile(source).map_err(|err| err.to_string())?)
|
||||||
.compile_bytecode(source)
|
|
||||||
.map_err(|err| err.to_string())?;
|
|
||||||
let (new_strings, new_constants, strings_base, constants_base) = ctx.get_unsynced();
|
|
||||||
Ok(BytecodeRet {
|
|
||||||
bytecode,
|
|
||||||
new_strings,
|
|
||||||
new_constants,
|
|
||||||
strings_base,
|
|
||||||
constants_base,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[string]
|
||||||
pub(super) fn op_scoped_import<Ctx: RuntimeContext>(
|
pub(super) fn op_scoped_import<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] path: String,
|
#[string] path: String,
|
||||||
#[serde] scope: Vec<String>,
|
#[serde] scope: Vec<String>,
|
||||||
) -> Result<BytecodeRet> {
|
) -> Result<String> {
|
||||||
let _span = tracing::info_span!("op_scoped_import", path = %path).entered();
|
let _span = tracing::info_span!("op_scoped_import", path = %path).entered();
|
||||||
let ctx: &mut Ctx = state.get_ctx_mut();
|
let ctx: &mut Ctx = state.get_ctx_mut();
|
||||||
|
|
||||||
@@ -235,26 +113,18 @@ pub(super) fn op_scoped_import<Ctx: RuntimeContext>(
|
|||||||
tracing::debug!("Compiling file for scoped import");
|
tracing::debug!("Compiling file for scoped import");
|
||||||
ctx.add_source(source.clone());
|
ctx.add_source(source.clone());
|
||||||
|
|
||||||
let bytecode = ctx
|
Ok(ctx
|
||||||
.compile_bytecode_scoped(source, scope)
|
.compile_scoped(source, scope)
|
||||||
.map_err(|err| err.to_string())?;
|
.map_err(|err| err.to_string())?)
|
||||||
let (new_strings, new_constants, strings_base, constants_base) = ctx.get_unsynced();
|
|
||||||
Ok(BytecodeRet {
|
|
||||||
bytecode,
|
|
||||||
new_strings,
|
|
||||||
new_constants,
|
|
||||||
strings_base,
|
|
||||||
constants_base,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_read_file(#[string] path: String) -> Result<String> {
|
pub(super) fn op_read_file(#[string] path: String) -> Result<String> {
|
||||||
Ok(std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {}", path, e))?)
|
Ok(std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {}", path, e))?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(fast, reentrant)]
|
#[deno_core::op2(fast)]
|
||||||
pub(super) fn op_path_exists(#[string] path: String) -> bool {
|
pub(super) fn op_path_exists(#[string] path: String) -> bool {
|
||||||
let must_be_dir = path.ends_with('/') || path.ends_with("/.");
|
let must_be_dir = path.ends_with('/') || path.ends_with("/.");
|
||||||
let p = Path::new(&path);
|
let p = Path::new(&path);
|
||||||
@@ -269,7 +139,7 @@ pub(super) fn op_path_exists(#[string] path: String) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_read_file_type(#[string] path: String) -> Result<String> {
|
pub(super) fn op_read_file_type(#[string] path: String) -> Result<String> {
|
||||||
let path = Path::new(&path);
|
let path = Path::new(&path);
|
||||||
@@ -290,8 +160,11 @@ pub(super) fn op_read_file_type(#[string] path: String) -> Result<String> {
|
|||||||
Ok(type_str.to_string())
|
Ok(type_str.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
pub(super) fn op_read_dir(#[string] path: String) -> Result<Map<String, &'static str>> {
|
#[serde]
|
||||||
|
pub(super) fn op_read_dir(
|
||||||
|
#[string] path: String,
|
||||||
|
) -> Result<std::collections::HashMap<String, String>> {
|
||||||
let path = Path::new(&path);
|
let path = Path::new(&path);
|
||||||
|
|
||||||
if !path.is_dir() {
|
if !path.is_dir() {
|
||||||
@@ -301,7 +174,7 @@ pub(super) fn op_read_dir(#[string] path: String) -> Result<Map<String, &'static
|
|||||||
let entries = std::fs::read_dir(path)
|
let entries = std::fs::read_dir(path)
|
||||||
.map_err(|e| format!("Failed to read directory {}: {}", path.display(), e))?;
|
.map_err(|e| format!("Failed to read directory {}: {}", path.display(), e))?;
|
||||||
|
|
||||||
let mut result = HashMap::new();
|
let mut result = std::collections::HashMap::new();
|
||||||
|
|
||||||
for entry in entries {
|
for entry in entries {
|
||||||
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||||||
@@ -325,13 +198,13 @@ pub(super) fn op_read_dir(#[string] path: String) -> Result<Map<String, &'static
|
|||||||
"unknown"
|
"unknown"
|
||||||
};
|
};
|
||||||
|
|
||||||
result.insert(file_name, type_str);
|
result.insert(file_name, type_str.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Map(result))
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_resolve_path(
|
pub(super) fn op_resolve_path(
|
||||||
#[string] current_dir: String,
|
#[string] current_dir: String,
|
||||||
@@ -370,7 +243,7 @@ pub(super) fn op_resolve_path(
|
|||||||
Ok(normalized.to_string_lossy().to_string())
|
Ok(normalized.to_string_lossy().to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_make_placeholder(#[string] output: String) -> String {
|
pub(super) fn op_make_placeholder(#[string] output: String) -> String {
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
@@ -382,40 +255,35 @@ pub(super) fn op_make_placeholder(#[string] output: String) -> String {
|
|||||||
format!("/{}", encoded)
|
format!("/{}", encoded)
|
||||||
}
|
}
|
||||||
|
|
||||||
enum StringOrU32 {
|
#[deno_core::op2]
|
||||||
String(String),
|
#[serde]
|
||||||
U32(u32),
|
|
||||||
}
|
|
||||||
impl<'a> ToV8<'a> for StringOrU32 {
|
|
||||||
type Error = Infallible;
|
|
||||||
fn to_v8<'i>(
|
|
||||||
self,
|
|
||||||
scope: &mut v8::PinScope<'a, 'i>,
|
|
||||||
) -> std::result::Result<v8::Local<'a, v8::Value>, Self::Error> {
|
|
||||||
match self {
|
|
||||||
Self::String(x) => x.to_v8(scope),
|
|
||||||
Self::U32(x) => x.to_v8(scope),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
|
||||||
pub(super) fn op_decode_span<Ctx: RuntimeContext>(
|
pub(super) fn op_decode_span<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[smi] span_id: u32,
|
#[string] span_str: String,
|
||||||
) -> Map<&'static str, StringOrU32> {
|
) -> Result<serde_json::Value> {
|
||||||
|
let parts: Vec<&str> = span_str.split(':').collect();
|
||||||
|
if parts.len() != 3 {
|
||||||
|
return Ok(serde_json::json!({
|
||||||
|
"file": serde_json::Value::Null,
|
||||||
|
"line": serde_json::Value::Null,
|
||||||
|
"column": serde_json::Value::Null
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
let source_id: usize = parts[0].parse().map_err(|_| "Invalid source ID")?;
|
||||||
|
let start: u32 = parts[1].parse().map_err(|_| "Invalid start offset")?;
|
||||||
|
|
||||||
let ctx: &Ctx = state.get_ctx();
|
let ctx: &Ctx = state.get_ctx();
|
||||||
let (source_id, range) = ctx.get_span(span_id as usize);
|
|
||||||
let source = ctx.get_source(source_id);
|
let source = ctx.get_source(source_id);
|
||||||
let start = u32::from(range.start());
|
let content = &source.src;
|
||||||
|
|
||||||
let (line, column) = byte_offset_to_line_col(&source.src, start as usize);
|
let (line, column) = byte_offset_to_line_col(content, start as usize);
|
||||||
|
|
||||||
Map(HashMap::from([
|
Ok(serde_json::json!({
|
||||||
("file", StringOrU32::String(source.get_name())),
|
"file": source.get_name(),
|
||||||
("line", StringOrU32::U32(line)),
|
"line": line,
|
||||||
("column", StringOrU32::U32(column)),
|
"column": column
|
||||||
]))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
|
fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
|
||||||
@@ -437,18 +305,14 @@ fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
|
|||||||
(line, col)
|
(line, col)
|
||||||
}
|
}
|
||||||
|
|
||||||
mod private {
|
#[derive(serde::Serialize)]
|
||||||
use deno_core::ToV8;
|
|
||||||
|
|
||||||
#[derive(ToV8)]
|
|
||||||
pub(super) struct ParsedHash {
|
pub(super) struct ParsedHash {
|
||||||
pub(super) hex: String,
|
hex: String,
|
||||||
pub(super) algo: String,
|
algo: String,
|
||||||
}
|
}
|
||||||
}
|
|
||||||
use private::*;
|
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[serde]
|
||||||
pub(super) fn op_parse_hash(
|
pub(super) fn op_parse_hash(
|
||||||
#[string] hash_str: String,
|
#[string] hash_str: String,
|
||||||
#[string] algo: Option<String>,
|
#[string] algo: Option<String>,
|
||||||
@@ -474,7 +338,7 @@ pub(super) fn op_parse_hash(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_add_path<Ctx: RuntimeContext>(
|
pub(super) fn op_add_path<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
@@ -483,11 +347,10 @@ pub(super) fn op_add_path<Ctx: RuntimeContext>(
|
|||||||
recursive: bool,
|
recursive: bool,
|
||||||
#[string] sha256: Option<String>,
|
#[string] sha256: Option<String>,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use nix_compat::nixhash::{HashAlgo, NixHash};
|
use nix_compat::nixhash::{HashAlgo, NixHash};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
let path_obj = Path::new(&path);
|
let path_obj = Path::new(&path);
|
||||||
|
|
||||||
@@ -554,7 +417,7 @@ pub(super) fn op_add_path<Ctx: RuntimeContext>(
|
|||||||
Ok(store_path)
|
Ok(store_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_store_path<Ctx: RuntimeContext>(
|
pub(super) fn op_store_path<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
@@ -575,13 +438,13 @@ pub(super) fn op_store_path<Ctx: RuntimeContext>(
|
|||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_to_file<Ctx: RuntimeContext>(
|
pub(super) fn op_to_file<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] name: String,
|
#[string] name: String,
|
||||||
#[string] contents: String,
|
#[string] contents: String,
|
||||||
#[scoped] references: Vec<String>,
|
#[serde] references: Vec<String>,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
let ctx: &Ctx = state.get_ctx();
|
let ctx: &Ctx = state.get_ctx();
|
||||||
let store = ctx.get_store();
|
let store = ctx.get_store();
|
||||||
@@ -592,7 +455,7 @@ pub(super) fn op_to_file<Ctx: RuntimeContext>(
|
|||||||
Ok(store_path)
|
Ok(store_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
|
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
@@ -624,7 +487,7 @@ pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
|
|||||||
Ok(store_path)
|
Ok(store_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_get_env(#[string] key: String) -> Result<String> {
|
pub(super) fn op_get_env(#[string] key: String) -> Result<String> {
|
||||||
match std::env::var(key) {
|
match std::env::var(key) {
|
||||||
@@ -634,7 +497,8 @@ pub(super) fn op_get_env(#[string] key: String) -> Result<String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[serde]
|
||||||
pub(super) fn op_walk_dir(#[string] path: String) -> Result<Vec<(String, String)>> {
|
pub(super) fn op_walk_dir(#[string] path: String) -> Result<Vec<(String, String)>> {
|
||||||
fn walk_recursive(
|
fn walk_recursive(
|
||||||
base: &Path,
|
base: &Path,
|
||||||
@@ -688,7 +552,7 @@ pub(super) fn op_walk_dir(#[string] path: String) -> Result<Vec<(String, String)
|
|||||||
Ok(results)
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
|
pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
@@ -696,12 +560,11 @@ pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
|
|||||||
#[string] name: Option<String>,
|
#[string] name: Option<String>,
|
||||||
recursive: bool,
|
recursive: bool,
|
||||||
#[string] sha256: Option<String>,
|
#[string] sha256: Option<String>,
|
||||||
#[scoped] include_paths: Vec<String>,
|
#[serde] include_paths: Vec<String>,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
use nix_compat::nixhash::{HashAlgo, NixHash};
|
use nix_compat::nixhash::{HashAlgo, NixHash};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
let src = Path::new(&src_path);
|
let src = Path::new(&src_path);
|
||||||
if !src.exists() {
|
if !src.exists() {
|
||||||
@@ -795,7 +658,8 @@ pub(super) fn op_add_filtered_path<Ctx: RuntimeContext>(
|
|||||||
Ok(store_path)
|
Ok(store_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[serde]
|
||||||
pub(super) fn op_match(
|
pub(super) fn op_match(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] regex: String,
|
#[string] regex: String,
|
||||||
@@ -819,7 +683,8 @@ pub(super) fn op_match(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[serde]
|
||||||
pub(super) fn op_split(
|
pub(super) fn op_split(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] regex: String,
|
#[string] regex: String,
|
||||||
@@ -855,30 +720,13 @@ pub(super) fn op_split(
|
|||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
pub(super) enum SplitResult {
|
pub(super) enum SplitResult {
|
||||||
Text(String),
|
Text(String),
|
||||||
Captures(Vec<Option<String>>),
|
Captures(Vec<Option<String>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ToV8<'a> for SplitResult {
|
|
||||||
type Error = Infallible;
|
|
||||||
fn to_v8<'i>(
|
|
||||||
self,
|
|
||||||
scope: &mut v8::PinScope<'a, 'i>,
|
|
||||||
) -> std::result::Result<v8::Local<'a, v8::Value>, Self::Error> {
|
|
||||||
Ok(match self {
|
|
||||||
Self::Text(text) => {
|
|
||||||
let Ok(value) = text.to_v8(scope);
|
|
||||||
value
|
|
||||||
}
|
|
||||||
Self::Captures(captures) => {
|
|
||||||
let Ok(value) = captures.to_v8(scope);
|
|
||||||
value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) enum NixJsonValue {
|
pub(super) enum NixJsonValue {
|
||||||
Null,
|
Null,
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
@@ -985,38 +833,48 @@ fn toml_to_nix(value: toml::Value) -> Result<NixJsonValue> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
pub(super) fn op_from_json(#[string] json_str: String) -> Result<NixJsonValue> {
|
pub(super) fn op_from_json(#[string] json_str: String) -> Result<NixJsonValue> {
|
||||||
let parsed: serde_json::Value = serde_json::from_str(&json_str)
|
let parsed: serde_json::Value = serde_json::from_str(&json_str)
|
||||||
.map_err(|e| NixRuntimeError::from(format!("builtins.fromJSON: {e}")))?;
|
.map_err(|e| NixRuntimeError::from(format!("builtins.fromJSON: {e}")))?;
|
||||||
Ok(json_to_nix(parsed))
|
Ok(json_to_nix(parsed))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
pub(super) fn op_from_toml(#[string] toml_str: String) -> Result<NixJsonValue> {
|
pub(super) fn op_from_toml(#[string] toml_str: String) -> Result<NixJsonValue> {
|
||||||
let parsed: toml::Value = toml::from_str(&toml_str)
|
let parsed: toml::Value = toml::from_str(&toml_str)
|
||||||
.map_err(|e| NixRuntimeError::from(format!("while parsing TOML: {e}")))?;
|
.map_err(|e| NixRuntimeError::from(format!("while parsing TOML: {e}")))?;
|
||||||
toml_to_nix(parsed)
|
toml_to_nix(parsed)
|
||||||
}
|
}
|
||||||
|
|
||||||
mod scope {
|
#[derive(serde::Deserialize)]
|
||||||
use deno_core::{FromV8, ToV8};
|
|
||||||
|
|
||||||
#[derive(FromV8)]
|
|
||||||
pub(super) struct FixedOutputInput {
|
pub(super) struct FixedOutputInput {
|
||||||
pub(super) hash_algo: String,
|
#[serde(rename = "hashAlgo")]
|
||||||
pub(super) hash: String,
|
hash_algo: String,
|
||||||
pub(super) hash_mode: String,
|
hash: String,
|
||||||
|
#[serde(rename = "hashMode")]
|
||||||
|
hash_mode: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(ToV8)]
|
#[derive(serde::Deserialize)]
|
||||||
|
pub(super) struct FinalizeDerivationInput {
|
||||||
|
name: String,
|
||||||
|
builder: String,
|
||||||
|
platform: String,
|
||||||
|
outputs: Vec<String>,
|
||||||
|
args: Vec<String>,
|
||||||
|
env: Vec<(String, String)>,
|
||||||
|
context: Vec<String>,
|
||||||
|
#[serde(rename = "fixedOutput")]
|
||||||
|
fixed_output: Option<FixedOutputInput>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
pub(super) struct FinalizeDerivationOutput {
|
pub(super) struct FinalizeDerivationOutput {
|
||||||
// renamed to `drvPath` automatically
|
#[serde(rename = "drvPath")]
|
||||||
pub(super) drv_path: String,
|
drv_path: String,
|
||||||
pub(super) outputs: Vec<(String, String)>,
|
outputs: Vec<(String, String)>,
|
||||||
}
|
}
|
||||||
}
|
|
||||||
use scope::*;
|
|
||||||
|
|
||||||
fn output_path_name(drv_name: &str, output: &str) -> String {
|
fn output_path_name(drv_name: &str, output: &str) -> String {
|
||||||
if output == "out" {
|
if output == "out" {
|
||||||
@@ -1026,17 +884,11 @@ fn output_path_name(drv_name: &str, output: &str) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[serde]
|
||||||
pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
#[string] name: String,
|
#[serde] input: FinalizeDerivationInput,
|
||||||
#[string] builder: String,
|
|
||||||
#[string] platform: String,
|
|
||||||
#[scoped] outputs: Vec<String>,
|
|
||||||
#[scoped] args: Vec<String>,
|
|
||||||
#[scoped] env: Vec<(String, String)>,
|
|
||||||
#[scoped] context: Vec<String>,
|
|
||||||
#[scoped] fixed_output: Option<FixedOutputInput>,
|
|
||||||
) -> Result<FinalizeDerivationOutput> {
|
) -> Result<FinalizeDerivationOutput> {
|
||||||
use crate::derivation::{DerivationData, OutputInfo};
|
use crate::derivation::{DerivationData, OutputInfo};
|
||||||
use crate::string_context::extract_input_drvs_and_srcs;
|
use crate::string_context::extract_input_drvs_and_srcs;
|
||||||
@@ -1046,15 +898,15 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
let store_dir = store.get_store_dir().to_string();
|
let store_dir = store.get_store_dir().to_string();
|
||||||
|
|
||||||
let (input_drvs, input_srcs) =
|
let (input_drvs, input_srcs) =
|
||||||
extract_input_drvs_and_srcs(&context).map_err(NixRuntimeError::from)?;
|
extract_input_drvs_and_srcs(&input.context).map_err(NixRuntimeError::from)?;
|
||||||
|
|
||||||
let env: BTreeMap<String, String> = env.into_iter().collect();
|
let env: std::collections::BTreeMap<String, String> = input.env.into_iter().collect();
|
||||||
|
|
||||||
let drv_path;
|
let drv_path;
|
||||||
let output_paths: Vec<(String, String)>;
|
let output_paths: Vec<(String, String)>;
|
||||||
|
|
||||||
if let Some(fixed) = &fixed_output {
|
if let Some(fixed) = &input.fixed_output {
|
||||||
let path_name = output_path_name(&name, "out");
|
let path_name = output_path_name(&input.name, "out");
|
||||||
let out_path = crate::runtime::ops::op_make_fixed_output_path_impl(
|
let out_path = crate::runtime::ops::op_make_fixed_output_path_impl(
|
||||||
&store_dir,
|
&store_dir,
|
||||||
&fixed.hash_algo,
|
&fixed.hash_algo,
|
||||||
@@ -1069,7 +921,7 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
""
|
""
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut final_outputs = BTreeMap::new();
|
let mut final_outputs = std::collections::BTreeMap::new();
|
||||||
final_outputs.insert(
|
final_outputs.insert(
|
||||||
"out".to_string(),
|
"out".to_string(),
|
||||||
OutputInfo {
|
OutputInfo {
|
||||||
@@ -1083,13 +935,13 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
final_env.insert("out".to_string(), out_path.clone());
|
final_env.insert("out".to_string(), out_path.clone());
|
||||||
|
|
||||||
let drv = DerivationData {
|
let drv = DerivationData {
|
||||||
name: name.clone(),
|
name: input.name.clone(),
|
||||||
outputs: final_outputs,
|
outputs: final_outputs,
|
||||||
input_drvs: input_drvs.clone(),
|
input_drvs: input_drvs.clone(),
|
||||||
input_srcs: input_srcs.clone(),
|
input_srcs: input_srcs.clone(),
|
||||||
platform,
|
platform: input.platform,
|
||||||
builder,
|
builder: input.builder,
|
||||||
args,
|
args: input.args,
|
||||||
env: final_env,
|
env: final_env,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1097,7 +949,7 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
let references = drv.collect_references();
|
let references = drv.collect_references();
|
||||||
|
|
||||||
drv_path = store
|
drv_path = store
|
||||||
.add_text_to_store(&format!("{}.drv", name), &final_aterm, references)
|
.add_text_to_store(&format!("{}.drv", input.name), &final_aterm, references)
|
||||||
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
|
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
|
||||||
|
|
||||||
let fixed_hash_fingerprint = format!(
|
let fixed_hash_fingerprint = format!(
|
||||||
@@ -1111,7 +963,8 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
|
|
||||||
output_paths = vec![("out".to_string(), out_path)];
|
output_paths = vec![("out".to_string(), out_path)];
|
||||||
} else {
|
} else {
|
||||||
let masked_outputs: std::collections::BTreeMap<String, OutputInfo> = outputs
|
let masked_outputs: std::collections::BTreeMap<String, OutputInfo> = input
|
||||||
|
.outputs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|o| {
|
.map(|o| {
|
||||||
(
|
(
|
||||||
@@ -1126,18 +979,18 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut masked_env = env.clone();
|
let mut masked_env = env.clone();
|
||||||
for output in &outputs {
|
for output in &input.outputs {
|
||||||
masked_env.insert(output.clone(), String::new());
|
masked_env.insert(output.clone(), String::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
let masked_drv = DerivationData {
|
let masked_drv = DerivationData {
|
||||||
name: name.clone(),
|
name: input.name.clone(),
|
||||||
outputs: masked_outputs,
|
outputs: masked_outputs,
|
||||||
input_drvs: input_drvs.clone(),
|
input_drvs: input_drvs.clone(),
|
||||||
input_srcs: input_srcs.clone(),
|
input_srcs: input_srcs.clone(),
|
||||||
platform: platform.clone(),
|
platform: input.platform.clone(),
|
||||||
builder: builder.clone(),
|
builder: input.builder.clone(),
|
||||||
args: args.clone(),
|
args: input.args.clone(),
|
||||||
env: masked_env,
|
env: masked_env,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1165,8 +1018,8 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
let mut final_env = env;
|
let mut final_env = env;
|
||||||
let mut result_output_paths = Vec::new();
|
let mut result_output_paths = Vec::new();
|
||||||
|
|
||||||
for output_name in &outputs {
|
for output_name in &input.outputs {
|
||||||
let path_name = output_path_name(&name, output_name);
|
let path_name = output_path_name(&input.name, output_name);
|
||||||
let out_path = crate::nix_utils::make_store_path(
|
let out_path = crate::nix_utils::make_store_path(
|
||||||
&store_dir,
|
&store_dir,
|
||||||
&format!("output:{}", output_name),
|
&format!("output:{}", output_name),
|
||||||
@@ -1186,13 +1039,13 @@ pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let final_drv = DerivationData {
|
let final_drv = DerivationData {
|
||||||
name,
|
name: input.name,
|
||||||
outputs: final_outputs,
|
outputs: final_outputs,
|
||||||
input_drvs,
|
input_drvs,
|
||||||
input_srcs,
|
input_srcs,
|
||||||
platform,
|
platform: input.platform,
|
||||||
builder,
|
builder: input.builder,
|
||||||
args,
|
args: input.args,
|
||||||
env: final_env,
|
env: final_env,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1240,7 +1093,7 @@ fn op_make_fixed_output_path_impl(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_hash_string(#[string] algo: String, #[string] data: String) -> Result<String> {
|
pub(super) fn op_hash_string(#[string] algo: String, #[string] data: String) -> Result<String> {
|
||||||
use sha2::{Digest, Sha256, Sha512};
|
use sha2::{Digest, Sha256, Sha512};
|
||||||
@@ -1277,7 +1130,7 @@ pub(super) fn op_hash_string(#[string] algo: String, #[string] data: String) ->
|
|||||||
Ok(hex::encode(hash_bytes))
|
Ok(hex::encode(hash_bytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_hash_file(#[string] algo: String, #[string] path: String) -> Result<String> {
|
pub(super) fn op_hash_file(#[string] algo: String, #[string] path: String) -> Result<String> {
|
||||||
let data = std::fs::read(&path)
|
let data = std::fs::read(&path)
|
||||||
@@ -1317,23 +1170,23 @@ pub(super) fn op_hash_file(#[string] algo: String, #[string] path: String) -> Re
|
|||||||
Ok(hex::encode(hash_bytes))
|
Ok(hex::encode(hash_bytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
pub(super) fn op_convert_hash(
|
pub(super) fn op_convert_hash(#[serde] input: ConvertHashInput) -> Result<String> {
|
||||||
#[string] hash: &str,
|
|
||||||
#[string] algo: Option<String>,
|
|
||||||
#[string] format: &str,
|
|
||||||
) -> Result<String> {
|
|
||||||
use nix_compat::nixhash::{HashAlgo, NixHash};
|
use nix_compat::nixhash::{HashAlgo, NixHash};
|
||||||
|
|
||||||
let hash_algo = algo.as_deref().and_then(|a| HashAlgo::from_str(a).ok());
|
let hash_algo = input
|
||||||
|
.hash_algo
|
||||||
|
.as_deref()
|
||||||
|
.and_then(|a| HashAlgo::from_str(a).ok());
|
||||||
|
|
||||||
let hash = NixHash::from_str(hash, hash_algo)
|
let hash = NixHash::from_str(&input.hash, hash_algo).map_err(|e| {
|
||||||
.map_err(|e| NixRuntimeError::from(format!("cannot convert hash '{}': {}", hash, e)))?;
|
NixRuntimeError::from(format!("cannot convert hash '{}': {}", input.hash, e))
|
||||||
|
})?;
|
||||||
|
|
||||||
let bytes = hash.digest_as_bytes();
|
let bytes = hash.digest_as_bytes();
|
||||||
|
|
||||||
match format {
|
match input.to_format.as_str() {
|
||||||
"base16" => Ok(hex::encode(bytes)),
|
"base16" => Ok(hex::encode(bytes)),
|
||||||
"nix32" | "base32" => Ok(nix_compat::nixbase32::encode(bytes)),
|
"nix32" | "base32" => Ok(nix_compat::nixbase32::encode(bytes)),
|
||||||
"base64" => {
|
"base64" => {
|
||||||
@@ -1346,11 +1199,20 @@ pub(super) fn op_convert_hash(
|
|||||||
})),
|
})),
|
||||||
_ => Err(NixRuntimeError::from(format!(
|
_ => Err(NixRuntimeError::from(format!(
|
||||||
"unknown hash format '{}'",
|
"unknown hash format '{}'",
|
||||||
format
|
input.to_format
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
pub(super) struct ConvertHashInput {
|
||||||
|
hash: String,
|
||||||
|
#[serde(rename = "hashAlgo")]
|
||||||
|
hash_algo: Option<String>,
|
||||||
|
#[serde(rename = "toHashFormat")]
|
||||||
|
to_format: String,
|
||||||
|
}
|
||||||
|
|
||||||
struct XmlCtx<'s> {
|
struct XmlCtx<'s> {
|
||||||
force_fn: v8::Local<'s, v8::Function>,
|
force_fn: v8::Local<'s, v8::Function>,
|
||||||
is_thunk: v8::Local<'s, v8::Symbol>,
|
is_thunk: v8::Local<'s, v8::Symbol>,
|
||||||
@@ -1392,7 +1254,7 @@ impl<'s> XmlCtx<'s> {
|
|||||||
struct XmlWriter {
|
struct XmlWriter {
|
||||||
buf: String,
|
buf: String,
|
||||||
context: Vec<String>,
|
context: Vec<String>,
|
||||||
drvs_seen: HashSet<String>,
|
drvs_seen: hashbrown::HashSet<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl XmlWriter {
|
impl XmlWriter {
|
||||||
@@ -1400,7 +1262,7 @@ impl XmlWriter {
|
|||||||
Self {
|
Self {
|
||||||
buf: String::with_capacity(4096),
|
buf: String::with_capacity(4096),
|
||||||
context: Vec::new(),
|
context: Vec::new(),
|
||||||
drvs_seen: HashSet::new(),
|
drvs_seen: hashbrown::HashSet::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1890,7 +1752,8 @@ impl<'a> FromV8<'a> for ToXmlResult {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deno_core::op2(reentrant)]
|
#[deno_core::op2]
|
||||||
|
#[serde]
|
||||||
pub(super) fn op_to_xml(#[scoped] value: ToXmlResult) -> (String, Vec<String>) {
|
pub(super) fn op_to_xml(#[scoped] value: ToXmlResult) -> (String, Vec<String>) {
|
||||||
(value.xml, value.context)
|
(value.xml, value.context)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -87,12 +87,11 @@ impl Store for DaemonStore {
|
|||||||
recursive: bool,
|
recursive: bool,
|
||||||
references: Vec<String>,
|
references: Vec<String>,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
use nix_compat::nix_daemon::types::AddToStoreNarRequest;
|
use nix_compat::nix_daemon::types::AddToStoreNarRequest;
|
||||||
use nix_compat::nixhash::{CAHash, NixHash};
|
use nix_compat::nixhash::{CAHash, NixHash};
|
||||||
use nix_compat::store_path::{StorePath, build_ca_path};
|
use nix_compat::store_path::{StorePath, build_ca_path};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fs;
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
|
||||||
let temp_file = NamedTempFile::new()
|
let temp_file = NamedTempFile::new()
|
||||||
@@ -238,12 +237,11 @@ impl Store for DaemonStore {
|
|||||||
content: &str,
|
content: &str,
|
||||||
references: Vec<String>,
|
references: Vec<String>,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
use nix_compat::nix_daemon::types::AddToStoreNarRequest;
|
use nix_compat::nix_daemon::types::AddToStoreNarRequest;
|
||||||
use nix_compat::nixhash::CAHash;
|
use nix_compat::nixhash::CAHash;
|
||||||
use nix_compat::store_path::{StorePath, build_text_path};
|
use nix_compat::store_path::{StorePath, build_text_path};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fs;
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
|
||||||
let temp_file = NamedTempFile::new()
|
let temp_file = NamedTempFile::new()
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ pub fn validate_store_path(store_dir: &str, path: &str) -> Result<()> {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn test_valid_store_paths() {
|
fn test_valid_store_paths() {
|
||||||
let store_dir = "/nix/store";
|
let store_dir = "/nix/store";
|
||||||
let valid_paths = vec![
|
let valid_paths = vec![
|
||||||
@@ -100,7 +100,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn test_invalid_store_paths() {
|
fn test_invalid_store_paths() {
|
||||||
let store_dir = "/nix/store";
|
let store_dir = "/nix/store";
|
||||||
let invalid_paths = vec![
|
let invalid_paths = vec![
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use core::fmt::{Debug, Display, Formatter, Result as FmtResult};
|
use core::fmt::{Debug, Display, Formatter, Result as FmtResult};
|
||||||
use core::hash::Hash;
|
use core::hash::Hash;
|
||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::ops::DerefMut;
|
use std::ops::DerefMut;
|
||||||
|
|||||||
@@ -1,33 +1,36 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use nix_js::value::Value;
|
use nix_js::value::Value;
|
||||||
|
use utils::eval;
|
||||||
|
|
||||||
use crate::utils::{eval, eval_result};
|
use crate::utils::eval_result;
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn arithmetic() {
|
fn arithmetic() {
|
||||||
assert_eq!(eval("1 + 1"), Value::Int(2));
|
assert_eq!(eval("1 + 1"), Value::Int(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn simple_function_application() {
|
fn simple_function_application() {
|
||||||
assert_eq!(eval("(x: x) 1"), Value::Int(1));
|
assert_eq!(eval("(x: x) 1"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn curried_function() {
|
fn curried_function() {
|
||||||
assert_eq!(eval("(x: y: x - y) 2 1"), Value::Int(1));
|
assert_eq!(eval("(x: y: x - y) 2 1"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn rec_attrset() {
|
fn rec_attrset() {
|
||||||
assert_eq!(eval("rec { b = a; a = 1; }.b"), Value::Int(1));
|
assert_eq!(eval("rec { b = a; a = 1; }.b"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn let_binding() {
|
fn let_binding() {
|
||||||
assert_eq!(eval("let b = a; a = 1; in b"), Value::Int(1));
|
assert_eq!(eval("let b = a; a = 1; in b"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fibonacci() {
|
fn fibonacci() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval(
|
eval(
|
||||||
@@ -37,7 +40,7 @@ fn fibonacci() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_point_combinator() {
|
fn fixed_point_combinator() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("((f: let x = f x; in x)(self: { x = 1; y = self.x + 1; })).y"),
|
eval("((f: let x = f x; in x)(self: { x = 1; y = self.x + 1; })).y"),
|
||||||
@@ -45,17 +48,17 @@ fn fixed_point_combinator() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn conditional_true() {
|
fn conditional_true() {
|
||||||
assert_eq!(eval("if true then 1 else 0"), Value::Int(1));
|
assert_eq!(eval("if true then 1 else 0"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn conditional_false() {
|
fn conditional_false() {
|
||||||
assert_eq!(eval("if false then 1 else 0"), Value::Int(0));
|
assert_eq!(eval("if false then 1 else 0"), Value::Int(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn nested_let() {
|
fn nested_let() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("let x = 1; in let y = x + 1; z = y + 1; in z"),
|
eval("let x = 1; in let y = x + 1; z = y + 1; in z"),
|
||||||
@@ -63,7 +66,7 @@ fn nested_let() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn rec_inherit_fails() {
|
fn rec_inherit_fails() {
|
||||||
assert!(eval_result("{ inherit x; }").is_err());
|
assert!(eval_result("{ inherit x; }").is_err());
|
||||||
}
|
}
|
||||||
@@ -1,32 +1,33 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use nix_js::value::{AttrSet, List, Value};
|
use nix_js::value::{AttrSet, List, Value};
|
||||||
|
use utils::eval;
|
||||||
|
|
||||||
use crate::utils::eval;
|
#[test]
|
||||||
|
|
||||||
#[test_log::test]
|
|
||||||
fn builtins_accessible() {
|
fn builtins_accessible() {
|
||||||
let result = eval("builtins");
|
let result = eval("builtins");
|
||||||
assert!(matches!(result, Value::AttrSet(_)));
|
assert!(matches!(result, Value::AttrSet(_)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_self_reference() {
|
fn builtins_self_reference() {
|
||||||
let result = eval("builtins.builtins");
|
let result = eval("builtins.builtins");
|
||||||
assert!(matches!(result, Value::AttrSet(_)));
|
assert!(matches!(result, Value::AttrSet(_)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_add() {
|
fn builtins_add() {
|
||||||
assert_eq!(eval("builtins.add 1 2"), Value::Int(3));
|
assert_eq!(eval("builtins.add 1 2"), Value::Int(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_length() {
|
fn builtins_length() {
|
||||||
assert_eq!(eval("builtins.length [1 2 3]"), Value::Int(3));
|
assert_eq!(eval("builtins.length [1 2 3]"), Value::Int(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_map() {
|
fn builtins_map() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.map (x: x * 2) [1 2 3]"),
|
eval("builtins.map (x: x * 2) [1 2 3]"),
|
||||||
@@ -34,7 +35,7 @@ fn builtins_map() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_filter() {
|
fn builtins_filter() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.filter (x: x > 1) [1 2 3]"),
|
eval("builtins.filter (x: x > 1) [1 2 3]"),
|
||||||
@@ -42,7 +43,7 @@ fn builtins_filter() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_attrnames() {
|
fn builtins_attrnames() {
|
||||||
let result = eval("builtins.attrNames { a = 1; b = 2; }");
|
let result = eval("builtins.attrNames { a = 1; b = 2; }");
|
||||||
assert!(matches!(result, Value::List(_)));
|
assert!(matches!(result, Value::List(_)));
|
||||||
@@ -51,12 +52,12 @@ fn builtins_attrnames() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_head() {
|
fn builtins_head() {
|
||||||
assert_eq!(eval("builtins.head [1 2 3]"), Value::Int(1));
|
assert_eq!(eval("builtins.head [1 2 3]"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_tail() {
|
fn builtins_tail() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.tail [1 2 3]"),
|
eval("builtins.tail [1 2 3]"),
|
||||||
@@ -64,17 +65,17 @@ fn builtins_tail() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_in_let() {
|
fn builtins_in_let() {
|
||||||
assert_eq!(eval("let b = builtins; in b.add 5 3"), Value::Int(8));
|
assert_eq!(eval("let b = builtins; in b.add 5 3"), Value::Int(8));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_in_with() {
|
fn builtins_in_with() {
|
||||||
assert_eq!(eval("with builtins; add 10 20"), Value::Int(30));
|
assert_eq!(eval("with builtins; add 10 20"), Value::Int(30));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_nested_calls() {
|
fn builtins_nested_calls() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.add (builtins.mul 2 3) (builtins.sub 10 5)"),
|
eval("builtins.add (builtins.mul 2 3) (builtins.sub 10 5)"),
|
||||||
@@ -82,32 +83,32 @@ fn builtins_nested_calls() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_is_list() {
|
fn builtins_is_list() {
|
||||||
assert_eq!(eval("builtins.isList [1 2 3]"), Value::Bool(true));
|
assert_eq!(eval("builtins.isList [1 2 3]"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_is_attrs() {
|
fn builtins_is_attrs() {
|
||||||
assert_eq!(eval("builtins.isAttrs { a = 1; }"), Value::Bool(true));
|
assert_eq!(eval("builtins.isAttrs { a = 1; }"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_is_function() {
|
fn builtins_is_function() {
|
||||||
assert_eq!(eval("builtins.isFunction (x: x)"), Value::Bool(true));
|
assert_eq!(eval("builtins.isFunction (x: x)"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_is_null() {
|
fn builtins_is_null() {
|
||||||
assert_eq!(eval("builtins.isNull null"), Value::Bool(true));
|
assert_eq!(eval("builtins.isNull null"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_is_bool() {
|
fn builtins_is_bool() {
|
||||||
assert_eq!(eval("builtins.isBool true"), Value::Bool(true));
|
assert_eq!(eval("builtins.isBool true"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_shadowing() {
|
fn builtins_shadowing() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("let builtins = { add = x: y: x - y; }; in builtins.add 5 3"),
|
eval("let builtins = { add = x: y: x - y; }; in builtins.add 5 3"),
|
||||||
@@ -115,13 +116,13 @@ fn builtins_shadowing() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_lazy_evaluation() {
|
fn builtins_lazy_evaluation() {
|
||||||
let result = eval("builtins.builtins.builtins.add 1 1");
|
let result = eval("builtins.builtins.builtins.add 1 1");
|
||||||
assert_eq!(result, Value::Int(2));
|
assert_eq!(result, Value::Int(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_foldl() {
|
fn builtins_foldl() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.foldl' (acc: x: acc + x) 0 [1 2 3 4 5]"),
|
eval("builtins.foldl' (acc: x: acc + x) 0 [1 2 3 4 5]"),
|
||||||
@@ -129,13 +130,13 @@ fn builtins_foldl() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_elem() {
|
fn builtins_elem() {
|
||||||
assert_eq!(eval("builtins.elem 2 [1 2 3]"), Value::Bool(true));
|
assert_eq!(eval("builtins.elem 2 [1 2 3]"), Value::Bool(true));
|
||||||
assert_eq!(eval("builtins.elem 5 [1 2 3]"), Value::Bool(false));
|
assert_eq!(eval("builtins.elem 5 [1 2 3]"), Value::Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_concat_lists() {
|
fn builtins_concat_lists() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.concatLists [[1 2] [3 4] [5]]"),
|
eval("builtins.concatLists [[1 2] [3 4] [5]]"),
|
||||||
@@ -149,7 +150,7 @@ fn builtins_concat_lists() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_basic() {
|
fn builtins_compare_versions_basic() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.compareVersions \"1.0\" \"2.3\""),
|
eval("builtins.compareVersions \"1.0\" \"2.3\""),
|
||||||
@@ -173,7 +174,7 @@ fn builtins_compare_versions_basic() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_components() {
|
fn builtins_compare_versions_components() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.compareVersions \"2.3.1\" \"2.3\""),
|
eval("builtins.compareVersions \"2.3.1\" \"2.3\""),
|
||||||
@@ -185,7 +186,7 @@ fn builtins_compare_versions_components() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_numeric_vs_alpha() {
|
fn builtins_compare_versions_numeric_vs_alpha() {
|
||||||
// Numeric component comes before alpha component
|
// Numeric component comes before alpha component
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -198,7 +199,7 @@ fn builtins_compare_versions_numeric_vs_alpha() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_pre() {
|
fn builtins_compare_versions_pre() {
|
||||||
// "pre" is special: comes before everything except another "pre"
|
// "pre" is special: comes before everything except another "pre"
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -219,7 +220,7 @@ fn builtins_compare_versions_pre() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_alpha() {
|
fn builtins_compare_versions_alpha() {
|
||||||
// Alphabetic comparison
|
// Alphabetic comparison
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -232,7 +233,7 @@ fn builtins_compare_versions_alpha() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_symmetry() {
|
fn builtins_compare_versions_symmetry() {
|
||||||
// Test symmetry: compareVersions(a, b) == -compareVersions(b, a)
|
// Test symmetry: compareVersions(a, b) == -compareVersions(b, a)
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -245,7 +246,7 @@ fn builtins_compare_versions_symmetry() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_compare_versions_complex() {
|
fn builtins_compare_versions_complex() {
|
||||||
// Complex version strings with multiple components
|
// Complex version strings with multiple components
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -262,7 +263,7 @@ fn builtins_compare_versions_complex() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_generic_closure() {
|
fn builtins_generic_closure() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval(
|
eval(
|
||||||
@@ -278,7 +279,7 @@ fn builtins_generic_closure() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_function_args() {
|
fn builtins_function_args() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("builtins.functionArgs (x: 1)"),
|
eval("builtins.functionArgs (x: 1)"),
|
||||||
@@ -315,7 +316,7 @@ fn builtins_function_args() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn builtins_parse_drv_name() {
|
fn builtins_parse_drv_name() {
|
||||||
let result = eval(r#"builtins.parseDrvName "nix-js-0.1.0pre""#).unwrap_attr_set();
|
let result = eval(r#"builtins.parseDrvName "nix-js-0.1.0pre""#).unwrap_attr_set();
|
||||||
assert_eq!(result.get("name"), Some(&Value::String("nix-js".into())));
|
assert_eq!(result.get("name"), Some(&Value::String("nix-js".into())));
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use nix_js::value::Value;
|
use nix_js::value::Value;
|
||||||
|
use utils::eval_result;
|
||||||
|
|
||||||
use crate::utils::eval_result;
|
#[test]
|
||||||
|
|
||||||
#[test_log::test]
|
|
||||||
fn to_file_simple() {
|
fn to_file_simple() {
|
||||||
let result =
|
let result =
|
||||||
eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate");
|
eval_result(r#"builtins.toFile "hello.txt" "Hello, World!""#).expect("Failed to evaluate");
|
||||||
@@ -19,7 +20,7 @@ fn to_file_simple() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_with_references() {
|
fn to_file_with_references() {
|
||||||
let result = eval_result(
|
let result = eval_result(
|
||||||
r#"
|
r#"
|
||||||
@@ -42,7 +43,7 @@ fn to_file_with_references() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_invalid_name_with_slash() {
|
fn to_file_invalid_name_with_slash() {
|
||||||
let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#);
|
let result = eval_result(r#"builtins.toFile "foo/bar.txt" "content""#);
|
||||||
|
|
||||||
@@ -55,7 +56,7 @@ fn to_file_invalid_name_with_slash() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_invalid_name_dot() {
|
fn to_file_invalid_name_dot() {
|
||||||
let result = eval_result(r#"builtins.toFile "." "content""#);
|
let result = eval_result(r#"builtins.toFile "." "content""#);
|
||||||
|
|
||||||
@@ -63,7 +64,7 @@ fn to_file_invalid_name_dot() {
|
|||||||
assert!(result.unwrap_err().to_string().contains("invalid name"));
|
assert!(result.unwrap_err().to_string().contains("invalid name"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_invalid_name_dotdot() {
|
fn to_file_invalid_name_dotdot() {
|
||||||
let result = eval_result(r#"builtins.toFile ".." "content""#);
|
let result = eval_result(r#"builtins.toFile ".." "content""#);
|
||||||
|
|
||||||
@@ -71,7 +72,7 @@ fn to_file_invalid_name_dotdot() {
|
|||||||
assert!(result.unwrap_err().to_string().contains("invalid name"));
|
assert!(result.unwrap_err().to_string().contains("invalid name"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn store_path_validation_not_in_store() {
|
fn store_path_validation_not_in_store() {
|
||||||
let result = eval_result(r#"builtins.storePath "/tmp/foo""#);
|
let result = eval_result(r#"builtins.storePath "/tmp/foo""#);
|
||||||
|
|
||||||
@@ -84,7 +85,7 @@ fn store_path_validation_not_in_store() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn store_path_validation_malformed_hash() {
|
fn store_path_validation_malformed_hash() {
|
||||||
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
|
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
|
||||||
.expect("Failed to create dummy file");
|
.expect("Failed to create dummy file");
|
||||||
@@ -112,7 +113,7 @@ fn store_path_validation_malformed_hash() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn store_path_validation_missing_name() {
|
fn store_path_validation_missing_name() {
|
||||||
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
|
let dummy_file_result = eval_result(r#"builtins.toFile "dummy.txt" "content""#)
|
||||||
.expect("Failed to create dummy file");
|
.expect("Failed to create dummy file");
|
||||||
@@ -140,7 +141,7 @@ fn store_path_validation_missing_name() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_curried_application() {
|
fn to_file_curried_application() {
|
||||||
let result = eval_result(
|
let result = eval_result(
|
||||||
r#"
|
r#"
|
||||||
@@ -162,7 +163,7 @@ fn to_file_curried_application() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_number_conversion() {
|
fn to_file_number_conversion() {
|
||||||
let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#)
|
let result = eval_result(r#"builtins.toFile "number.txt" (builtins.toString 42)"#)
|
||||||
.expect("Failed to evaluate");
|
.expect("Failed to evaluate");
|
||||||
@@ -176,7 +177,7 @@ fn to_file_number_conversion() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_file_list_conversion() {
|
fn to_file_list_conversion() {
|
||||||
let result = eval_result(
|
let result = eval_result(
|
||||||
r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#,
|
r#"builtins.toFile "list.txt" (builtins.concatStringsSep "\n" ["line1" "line2" "line3"])"#,
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use nix_js::value::Value;
|
use nix_js::value::Value;
|
||||||
|
use utils::{eval_deep, eval_deep_result};
|
||||||
|
|
||||||
use crate::utils::{eval_deep, eval_deep_result};
|
#[test]
|
||||||
|
|
||||||
#[test_log::test]
|
|
||||||
fn add_operator_preserves_derivation_context() {
|
fn add_operator_preserves_derivation_context() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"
|
r#"
|
||||||
@@ -38,7 +39,7 @@ fn add_operator_preserves_derivation_context() {
|
|||||||
assert_eq!(result, nix_result);
|
assert_eq!(result, nix_result);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_minimal() {
|
fn derivation_minimal() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
|
r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
|
||||||
@@ -76,7 +77,7 @@ fn derivation_minimal() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_with_args() {
|
fn derivation_with_args() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -98,7 +99,7 @@ fn derivation_with_args() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_to_string() {
|
fn derivation_to_string() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
|
r#"toString (derivation { name = "foo"; builder = "/bin/sh"; system = "x86_64-linux"; })"#,
|
||||||
@@ -110,7 +111,7 @@ fn derivation_to_string() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_missing_name() {
|
fn derivation_missing_name() {
|
||||||
let result =
|
let result =
|
||||||
eval_deep_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
|
eval_deep_result(r#"derivation { builder = "/bin/sh"; system = "x86_64-linux"; }"#);
|
||||||
@@ -120,7 +121,7 @@ fn derivation_missing_name() {
|
|||||||
assert!(err_msg.contains("missing required attribute 'name'"));
|
assert!(err_msg.contains("missing required attribute 'name'"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_invalid_name_with_drv_suffix() {
|
fn derivation_invalid_name_with_drv_suffix() {
|
||||||
let result = eval_deep_result(
|
let result = eval_deep_result(
|
||||||
r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
|
r#"derivation { name = "foo.drv"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
|
||||||
@@ -131,7 +132,7 @@ fn derivation_invalid_name_with_drv_suffix() {
|
|||||||
assert!(err_msg.contains("cannot end with .drv"));
|
assert!(err_msg.contains("cannot end with .drv"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_missing_builder() {
|
fn derivation_missing_builder() {
|
||||||
let result = eval_deep_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
|
let result = eval_deep_result(r#"derivation { name = "test"; system = "x86_64-linux"; }"#);
|
||||||
|
|
||||||
@@ -140,7 +141,7 @@ fn derivation_missing_builder() {
|
|||||||
assert!(err_msg.contains("missing required attribute 'builder'"));
|
assert!(err_msg.contains("missing required attribute 'builder'"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_missing_system() {
|
fn derivation_missing_system() {
|
||||||
let result = eval_deep_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
|
let result = eval_deep_result(r#"derivation { name = "test"; builder = "/bin/sh"; }"#);
|
||||||
|
|
||||||
@@ -149,7 +150,7 @@ fn derivation_missing_system() {
|
|||||||
assert!(err_msg.contains("missing required attribute 'system'"));
|
assert!(err_msg.contains("missing required attribute 'system'"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_with_env_vars() {
|
fn derivation_with_env_vars() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -170,7 +171,7 @@ fn derivation_with_env_vars() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_strict() {
|
fn derivation_strict() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
|
r#"builtins.derivationStrict { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }"#,
|
||||||
@@ -187,7 +188,7 @@ fn derivation_strict() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_deterministic_paths() {
|
fn derivation_deterministic_paths() {
|
||||||
let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#;
|
let expr = r#"derivation { name = "hello"; builder = "/bin/sh"; system = "x86_64-linux"; }"#;
|
||||||
|
|
||||||
@@ -203,7 +204,7 @@ fn derivation_deterministic_paths() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn derivation_escaping_in_aterm() {
|
fn derivation_escaping_in_aterm() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -223,7 +224,7 @@ fn derivation_escaping_in_aterm() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn multi_output_two_outputs() {
|
fn multi_output_two_outputs() {
|
||||||
let drv = eval_deep(
|
let drv = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -266,7 +267,7 @@ fn multi_output_two_outputs() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn multi_output_three_outputs() {
|
fn multi_output_three_outputs() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -314,7 +315,7 @@ fn multi_output_three_outputs() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn multi_output_backward_compat() {
|
fn multi_output_backward_compat() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -340,7 +341,7 @@ fn multi_output_backward_compat() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn multi_output_deterministic() {
|
fn multi_output_deterministic() {
|
||||||
let result1 = eval_deep(
|
let result1 = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -363,7 +364,7 @@ fn multi_output_deterministic() {
|
|||||||
assert_eq!(result1, result2);
|
assert_eq!(result1, result2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_output_sha256_flat() {
|
fn fixed_output_sha256_flat() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -400,7 +401,7 @@ fn fixed_output_sha256_flat() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_output_missing_hashalgo() {
|
fn fixed_output_missing_hashalgo() {
|
||||||
assert!(
|
assert!(
|
||||||
eval_deep_result(
|
eval_deep_result(
|
||||||
@@ -415,7 +416,7 @@ fn fixed_output_missing_hashalgo() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_output_recursive_mode() {
|
fn fixed_output_recursive_mode() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -445,7 +446,7 @@ fn fixed_output_recursive_mode() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_output_rejects_multi_output() {
|
fn fixed_output_rejects_multi_output() {
|
||||||
let result = eval_deep_result(
|
let result = eval_deep_result(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -463,7 +464,7 @@ fn fixed_output_rejects_multi_output() {
|
|||||||
assert!(err_msg.contains("fixed-output") && err_msg.contains("one"));
|
assert!(err_msg.contains("fixed-output") && err_msg.contains("one"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_output_invalid_hash_mode() {
|
fn fixed_output_invalid_hash_mode() {
|
||||||
let result = eval_deep_result(
|
let result = eval_deep_result(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -480,7 +481,7 @@ fn fixed_output_invalid_hash_mode() {
|
|||||||
assert!(err_msg.contains("outputHashMode") && err_msg.contains("invalid"));
|
assert!(err_msg.contains("outputHashMode") && err_msg.contains("invalid"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn structured_attrs_basic() {
|
fn structured_attrs_basic() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -505,7 +506,7 @@ fn structured_attrs_basic() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn structured_attrs_nested() {
|
fn structured_attrs_nested() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -526,7 +527,7 @@ fn structured_attrs_nested() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn structured_attrs_rejects_functions() {
|
fn structured_attrs_rejects_functions() {
|
||||||
let result = eval_deep_result(
|
let result = eval_deep_result(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -543,7 +544,7 @@ fn structured_attrs_rejects_functions() {
|
|||||||
assert!(err_msg.contains("cannot convert lambda to JSON"));
|
assert!(err_msg.contains("cannot convert lambda to JSON"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn structured_attrs_false() {
|
fn structured_attrs_false() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -566,7 +567,7 @@ fn structured_attrs_false() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn ignore_nulls_true() {
|
fn ignore_nulls_true() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -588,7 +589,7 @@ fn ignore_nulls_true() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn ignore_nulls_false() {
|
fn ignore_nulls_false() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -611,7 +612,7 @@ fn ignore_nulls_false() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn ignore_nulls_with_structured_attrs() {
|
fn ignore_nulls_with_structured_attrs() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -635,7 +636,7 @@ fn ignore_nulls_with_structured_attrs() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn all_features_combined() {
|
fn all_features_combined() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -662,7 +663,7 @@ fn all_features_combined() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn fixed_output_with_structured_attrs() {
|
fn fixed_output_with_structured_attrs() {
|
||||||
let result = eval_deep(
|
let result = eval_deep(
|
||||||
r#"derivation {
|
r#"derivation {
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
use crate::utils::eval;
|
mod utils;
|
||||||
|
|
||||||
#[test_log::test]
|
use utils::eval;
|
||||||
|
|
||||||
|
#[test]
|
||||||
fn test_find_file_corepkg_fetchurl() {
|
fn test_find_file_corepkg_fetchurl() {
|
||||||
let result = eval(
|
let result = eval(
|
||||||
r#"
|
r#"
|
||||||
@@ -15,13 +17,13 @@ fn test_find_file_corepkg_fetchurl() {
|
|||||||
assert!(result.to_string().contains("fetchurl.nix"));
|
assert!(result.to_string().contains("fetchurl.nix"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn test_lookup_path_syntax() {
|
fn test_lookup_path_syntax() {
|
||||||
let result = eval(r#"<nix/fetchurl.nix>"#);
|
let result = eval(r#"<nix/fetchurl.nix>"#);
|
||||||
assert!(result.to_string().contains("fetchurl.nix"));
|
assert!(result.to_string().contains("fetchurl.nix"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn test_import_corepkg() {
|
fn test_import_corepkg() {
|
||||||
let result = eval(
|
let result = eval(
|
||||||
r#"
|
r#"
|
||||||
@@ -1,23 +1,24 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use nix_js::value::{List, Value};
|
use nix_js::value::{List, Value};
|
||||||
|
use utils::eval;
|
||||||
|
|
||||||
use crate::utils::{eval, eval_result};
|
#[test]
|
||||||
|
|
||||||
#[test_log::test]
|
|
||||||
fn true_literal() {
|
fn true_literal() {
|
||||||
assert_eq!(eval("true"), Value::Bool(true));
|
assert_eq!(eval("true"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn false_literal() {
|
fn false_literal() {
|
||||||
assert_eq!(eval("false"), Value::Bool(false));
|
assert_eq!(eval("false"), Value::Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn null_literal() {
|
fn null_literal() {
|
||||||
assert_eq!(eval("null"), Value::Null);
|
assert_eq!(eval("null"), Value::Null);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn map_function() {
|
fn map_function() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("map (x: x * 2) [1 2 3]"),
|
eval("map (x: x * 2) [1 2 3]"),
|
||||||
@@ -25,23 +26,23 @@ fn map_function() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn is_null_function() {
|
fn is_null_function() {
|
||||||
assert_eq!(eval("isNull null"), Value::Bool(true));
|
assert_eq!(eval("isNull null"), Value::Bool(true));
|
||||||
assert_eq!(eval("isNull 5"), Value::Bool(false));
|
assert_eq!(eval("isNull 5"), Value::Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn shadow_true() {
|
fn shadow_true() {
|
||||||
assert_eq!(eval("let true = false; in true"), Value::Bool(false));
|
assert_eq!(eval("let true = false; in true"), Value::Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn shadow_map() {
|
fn shadow_map() {
|
||||||
assert_eq!(eval("let map = x: y: x; in map 1 2"), Value::Int(1));
|
assert_eq!(eval("let map = x: y: x; in map 1 2"), Value::Int(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn mixed_usage() {
|
fn mixed_usage() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("if true then map (x: x + 1) [1 2] else []"),
|
eval("if true then map (x: x + 1) [1 2] else []"),
|
||||||
@@ -49,7 +50,7 @@ fn mixed_usage() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn in_let_bindings() {
|
fn in_let_bindings() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("let x = true; y = false; in x && y"),
|
eval("let x = true; y = false; in x && y"),
|
||||||
@@ -57,18 +58,18 @@ fn in_let_bindings() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn shadow_in_function() {
|
fn shadow_in_function() {
|
||||||
assert_eq!(eval("(true: true) false"), Value::Bool(false));
|
assert_eq!(eval("(true: true) false"), Value::Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn throw_function() {
|
fn throw_function() {
|
||||||
let result = eval_result("throw \"error message\"");
|
let result = utils::eval_result("throw \"error message\"");
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn to_string_function() {
|
fn to_string_function() {
|
||||||
assert_eq!(eval("toString 42"), Value::String("42".to_string()));
|
assert_eq!(eval("toString 42"), Value::String("42".to_string()));
|
||||||
}
|
}
|
||||||
@@ -1,19 +1,20 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use nix_js::value::Value;
|
use nix_js::value::Value;
|
||||||
|
use utils::{eval, eval_result};
|
||||||
|
|
||||||
use crate::utils::{eval, eval_result};
|
#[test]
|
||||||
|
|
||||||
#[test_log::test]
|
|
||||||
fn required_parameters() {
|
fn required_parameters() {
|
||||||
assert_eq!(eval("({ a, b }: a + b) { a = 1; b = 2; }"), Value::Int(3));
|
assert_eq!(eval("({ a, b }: a + b) { a = 1; b = 2; }"), Value::Int(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn missing_required_parameter() {
|
fn missing_required_parameter() {
|
||||||
let result = eval_result("({ a, b }: a + b) { a = 1; }");
|
let result = eval_result("({ a, b }: a + b) { a = 1; }");
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn all_required_parameters_present() {
|
fn all_required_parameters_present() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ x, y, z }: x + y + z) { x = 1; y = 2; z = 3; }"),
|
eval("({ x, y, z }: x + y + z) { x = 1; y = 2; z = 3; }"),
|
||||||
@@ -21,13 +22,13 @@ fn all_required_parameters_present() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn reject_unexpected_arguments() {
|
fn reject_unexpected_arguments() {
|
||||||
let result = eval_result("({ a, b }: a + b) { a = 1; b = 2; c = 3; }");
|
let result = eval_result("({ a, b }: a + b) { a = 1; b = 2; c = 3; }");
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn ellipsis_accepts_extra_arguments() {
|
fn ellipsis_accepts_extra_arguments() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a, b, ... }: a + b) { a = 1; b = 2; c = 3; }"),
|
eval("({ a, b, ... }: a + b) { a = 1; b = 2; c = 3; }"),
|
||||||
@@ -35,12 +36,12 @@ fn ellipsis_accepts_extra_arguments() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn default_parameters() {
|
fn default_parameters() {
|
||||||
assert_eq!(eval("({ a, b ? 5 }: a + b) { a = 1; }"), Value::Int(6));
|
assert_eq!(eval("({ a, b ? 5 }: a + b) { a = 1; }"), Value::Int(6));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn override_default_parameter() {
|
fn override_default_parameter() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a, b ? 5 }: a + b) { a = 1; b = 10; }"),
|
eval("({ a, b ? 5 }: a + b) { a = 1; b = 10; }"),
|
||||||
@@ -48,7 +49,7 @@ fn override_default_parameter() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn at_pattern_alias() {
|
fn at_pattern_alias() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("(args@{ a, b }: args.a + args.b) { a = 1; b = 2; }"),
|
eval("(args@{ a, b }: args.a + args.b) { a = 1; b = 2; }"),
|
||||||
@@ -56,17 +57,17 @@ fn at_pattern_alias() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn simple_parameter_no_validation() {
|
fn simple_parameter_no_validation() {
|
||||||
assert_eq!(eval("(x: x.a + x.b) { a = 1; b = 2; }"), Value::Int(3));
|
assert_eq!(eval("(x: x.a + x.b) { a = 1; b = 2; }"), Value::Int(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn simple_parameter_accepts_any_argument() {
|
fn simple_parameter_accepts_any_argument() {
|
||||||
assert_eq!(eval("(x: x) 42"), Value::Int(42));
|
assert_eq!(eval("(x: x) 42"), Value::Int(42));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn nested_function_parameters() {
|
fn nested_function_parameters() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a }: { b }: a + b) { a = 5; } { b = 3; }"),
|
eval("({ a }: { b }: a + b) { a = 5; } { b = 3; }"),
|
||||||
@@ -74,12 +75,12 @@ fn nested_function_parameters() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn pattern_param_simple_reference_in_default() {
|
fn pattern_param_simple_reference_in_default() {
|
||||||
assert_eq!(eval("({ a, b ? a }: b) { a = 10; }"), Value::Int(10));
|
assert_eq!(eval("({ a, b ? a }: b) { a = 10; }"), Value::Int(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn pattern_param_multiple_references_in_default() {
|
fn pattern_param_multiple_references_in_default() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a, b ? a + 5, c ? 1 }: b + c) { a = 10; }"),
|
eval("({ a, b ? a + 5, c ? 1 }: b + c) { a = 10; }"),
|
||||||
@@ -87,7 +88,7 @@ fn pattern_param_multiple_references_in_default() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn pattern_param_mutual_reference() {
|
fn pattern_param_mutual_reference() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; }"),
|
eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; }"),
|
||||||
@@ -95,7 +96,7 @@ fn pattern_param_mutual_reference() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn pattern_param_override_mutual_reference() {
|
fn pattern_param_override_mutual_reference() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; c = 10; }"),
|
eval("({ a, b ? c + 1, c ? 5 }: b) { a = 1; c = 10; }"),
|
||||||
@@ -103,7 +104,7 @@ fn pattern_param_override_mutual_reference() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn pattern_param_reference_list() {
|
fn pattern_param_reference_list() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("({ a, b ? [ a 2 ] }: builtins.elemAt b 0) { a = 42; }"),
|
eval("({ a, b ? [ a 2 ] }: builtins.elemAt b 0) { a = 42; }"),
|
||||||
@@ -111,7 +112,7 @@ fn pattern_param_reference_list() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn pattern_param_alias_in_default() {
|
fn pattern_param_alias_in_default() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
eval("(args@{ a, b ? args.a + 10 }: b) { a = 5; }"),
|
eval("(args@{ a, b ? args.a + 10 }: b) { a = 5; }"),
|
||||||
@@ -1,10 +1,12 @@
|
|||||||
|
mod utils;
|
||||||
|
|
||||||
use nix_js::context::Context;
|
use nix_js::context::Context;
|
||||||
use nix_js::error::Source;
|
use nix_js::error::Source;
|
||||||
use nix_js::value::Value;
|
use nix_js::value::Value;
|
||||||
|
|
||||||
use crate::utils::{eval, eval_result};
|
use crate::utils::{eval, eval_result};
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn import_absolute_path() {
|
fn import_absolute_path() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let lib_path = temp_dir.path().join("nix_test_lib.nix");
|
let lib_path = temp_dir.path().join("nix_test_lib.nix");
|
||||||
@@ -15,7 +17,7 @@ fn import_absolute_path() {
|
|||||||
assert_eq!(eval(&expr), Value::Int(8));
|
assert_eq!(eval(&expr), Value::Int(8));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn import_nested() {
|
fn import_nested() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
@@ -33,7 +35,7 @@ fn import_nested() {
|
|||||||
assert_eq!(eval(&expr), Value::Int(30));
|
assert_eq!(eval(&expr), Value::Int(30));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn import_relative_path() {
|
fn import_relative_path() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let subdir = temp_dir.path().join("subdir");
|
let subdir = temp_dir.path().join("subdir");
|
||||||
@@ -64,7 +66,7 @@ fn import_relative_path() {
|
|||||||
assert_eq!(eval(&expr), Value::Int(7));
|
assert_eq!(eval(&expr), Value::Int(7));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn import_returns_function() {
|
fn import_returns_function() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let func_path = temp_dir.path().join("nix_test_func.nix");
|
let func_path = temp_dir.path().join("nix_test_func.nix");
|
||||||
@@ -74,7 +76,7 @@ fn import_returns_function() {
|
|||||||
assert_eq!(eval(&expr), Value::Int(10));
|
assert_eq!(eval(&expr), Value::Int(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn import_with_complex_dependency_graph() {
|
fn import_with_complex_dependency_graph() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
@@ -95,7 +97,7 @@ fn import_with_complex_dependency_graph() {
|
|||||||
|
|
||||||
// Tests for builtins.path
|
// Tests for builtins.path
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_with_file() {
|
fn path_with_file() {
|
||||||
let mut ctx = Context::new().unwrap();
|
let mut ctx = Context::new().unwrap();
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
@@ -114,7 +116,7 @@ fn path_with_file() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_with_custom_name() {
|
fn path_with_custom_name() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_file = temp_dir.path().join("original.txt");
|
let test_file = temp_dir.path().join("original.txt");
|
||||||
@@ -134,7 +136,7 @@ fn path_with_custom_name() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_with_directory_recursive() {
|
fn path_with_directory_recursive() {
|
||||||
let mut ctx = Context::new().unwrap();
|
let mut ctx = Context::new().unwrap();
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
@@ -157,7 +159,7 @@ fn path_with_directory_recursive() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_flat_with_file() {
|
fn path_flat_with_file() {
|
||||||
let mut ctx = Context::new().unwrap();
|
let mut ctx = Context::new().unwrap();
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
@@ -177,7 +179,7 @@ fn path_flat_with_file() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_flat_with_directory_fails() {
|
fn path_flat_with_directory_fails() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_dir = temp_dir.path().join("mydir");
|
let test_dir = temp_dir.path().join("mydir");
|
||||||
@@ -194,7 +196,7 @@ fn path_flat_with_directory_fails() {
|
|||||||
assert!(err_msg.contains("recursive") || err_msg.contains("regular file"));
|
assert!(err_msg.contains("recursive") || err_msg.contains("regular file"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_nonexistent_fails() {
|
fn path_nonexistent_fails() {
|
||||||
let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#;
|
let expr = r#"builtins.path { path = "/nonexistent/path/that/should/not/exist"; }"#;
|
||||||
let result = eval_result(expr);
|
let result = eval_result(expr);
|
||||||
@@ -204,7 +206,7 @@ fn path_nonexistent_fails() {
|
|||||||
assert!(err_msg.contains("does not exist"));
|
assert!(err_msg.contains("does not exist"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_missing_path_param() {
|
fn path_missing_path_param() {
|
||||||
let expr = r#"builtins.path { name = "test"; }"#;
|
let expr = r#"builtins.path { name = "test"; }"#;
|
||||||
let result = eval_result(expr);
|
let result = eval_result(expr);
|
||||||
@@ -214,7 +216,7 @@ fn path_missing_path_param() {
|
|||||||
assert!(err_msg.contains("path") && err_msg.contains("required"));
|
assert!(err_msg.contains("path") && err_msg.contains("required"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_with_sha256() {
|
fn path_with_sha256() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_file = temp_dir.path().join("hash_test.txt");
|
let test_file = temp_dir.path().join("hash_test.txt");
|
||||||
@@ -241,7 +243,7 @@ fn path_with_sha256() {
|
|||||||
assert_eq!(store_path1, store_path2);
|
assert_eq!(store_path1, store_path2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn path_deterministic() {
|
fn path_deterministic() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_file = temp_dir.path().join("deterministic.txt");
|
let test_file = temp_dir.path().join("deterministic.txt");
|
||||||
@@ -259,7 +261,7 @@ fn path_deterministic() {
|
|||||||
assert_eq!(result1, result2);
|
assert_eq!(result1, result2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_file_type_regular_file() {
|
fn read_file_type_regular_file() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_file = temp_dir.path().join("test.txt");
|
let test_file = temp_dir.path().join("test.txt");
|
||||||
@@ -269,7 +271,7 @@ fn read_file_type_regular_file() {
|
|||||||
assert_eq!(eval(&expr), Value::String("regular".to_string()));
|
assert_eq!(eval(&expr), Value::String("regular".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_file_type_directory() {
|
fn read_file_type_directory() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_dir = temp_dir.path().join("testdir");
|
let test_dir = temp_dir.path().join("testdir");
|
||||||
@@ -279,7 +281,7 @@ fn read_file_type_directory() {
|
|||||||
assert_eq!(eval(&expr), Value::String("directory".to_string()));
|
assert_eq!(eval(&expr), Value::String("directory".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_file_type_symlink() {
|
fn read_file_type_symlink() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let target = temp_dir.path().join("target.txt");
|
let target = temp_dir.path().join("target.txt");
|
||||||
@@ -297,7 +299,7 @@ fn read_file_type_symlink() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_dir_basic() {
|
fn read_dir_basic() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_dir = temp_dir.path().join("readdir_test");
|
let test_dir = temp_dir.path().join("readdir_test");
|
||||||
@@ -329,7 +331,7 @@ fn read_dir_basic() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_dir_empty() {
|
fn read_dir_empty() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_dir = temp_dir.path().join("empty_dir");
|
let test_dir = temp_dir.path().join("empty_dir");
|
||||||
@@ -345,7 +347,7 @@ fn read_dir_empty() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_dir_nonexistent_fails() {
|
fn read_dir_nonexistent_fails() {
|
||||||
let expr = r#"builtins.readDir "/nonexistent/directory""#;
|
let expr = r#"builtins.readDir "/nonexistent/directory""#;
|
||||||
let result = eval_result(expr);
|
let result = eval_result(expr);
|
||||||
@@ -353,7 +355,7 @@ fn read_dir_nonexistent_fails() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn read_dir_on_file_fails() {
|
fn read_dir_on_file_fails() {
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let test_file = temp_dir.path().join("test.txt");
|
let test_file = temp_dir.path().join("test.txt");
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
mod utils;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use nix_js::context::Context;
|
use nix_js::context::Context;
|
||||||
@@ -7,7 +9,7 @@ use nix_js::error::Source;
|
|||||||
use nix_js::value::Value;
|
use nix_js::value::Value;
|
||||||
|
|
||||||
fn get_lang_dir() -> PathBuf {
|
fn get_lang_dir() -> PathBuf {
|
||||||
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/tests/lang")
|
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/lang")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_file(name: &str) -> Result<(Value, Source), String> {
|
fn eval_file(name: &str) -> Result<(Value, Source), String> {
|
||||||
@@ -42,7 +44,7 @@ fn format_value(value: &Value) -> String {
|
|||||||
macro_rules! eval_okay_test {
|
macro_rules! eval_okay_test {
|
||||||
($(#[$attr:meta])* $name:ident$(, $pre:expr)?) => {
|
($(#[$attr:meta])* $name:ident$(, $pre:expr)?) => {
|
||||||
$(#[$attr])*
|
$(#[$attr])*
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn $name() {
|
fn $name() {
|
||||||
$(($pre)();)?
|
$(($pre)();)?
|
||||||
let test_name = concat!("eval-okay-", stringify!($name))
|
let test_name = concat!("eval-okay-", stringify!($name))
|
||||||
@@ -74,7 +76,7 @@ macro_rules! eval_okay_test {
|
|||||||
|
|
||||||
macro_rules! eval_fail_test {
|
macro_rules! eval_fail_test {
|
||||||
($name:ident) => {
|
($name:ident) => {
|
||||||
#[test_log::test]
|
#[test]
|
||||||
fn $name() {
|
fn $name() {
|
||||||
let test_name = concat!("eval-fail-", stringify!($name))
|
let test_name = concat!("eval-fail-", stringify!($name))
|
||||||
.replace("_", "-")
|
.replace("_", "-")
|
||||||
@@ -192,11 +194,15 @@ eval_okay_test!(
|
|||||||
eval_okay_test!(partition);
|
eval_okay_test!(partition);
|
||||||
eval_okay_test!(path);
|
eval_okay_test!(path);
|
||||||
eval_okay_test!(pathexists);
|
eval_okay_test!(pathexists);
|
||||||
eval_okay_test!(path_string_interpolation, || {
|
eval_okay_test!(
|
||||||
|
#[ignore = "rnix 0.13 regression: /${foo}-/*...*/ fails to parse"]
|
||||||
|
path_string_interpolation,
|
||||||
|
|| {
|
||||||
unsafe {
|
unsafe {
|
||||||
std::env::set_var("HOME", "/fake-home");
|
std::env::set_var("HOME", "/fake-home");
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
);
|
||||||
eval_okay_test!(patterns);
|
eval_okay_test!(patterns);
|
||||||
eval_okay_test!(print);
|
eval_okay_test!(print);
|
||||||
eval_okay_test!(readDir);
|
eval_okay_test!(readDir);
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user