feat: use snix nix-compat; implement metadata cache

This commit is contained in:
2026-01-18 11:52:47 +08:00
parent 611255d42c
commit 2441e10607
9 changed files with 1346 additions and 589 deletions

372
Cargo.lock generated
View File

@@ -34,15 +34,6 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "android_system_properties"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "anes" name = "anes"
version = "0.1.6" version = "0.1.6"
@@ -70,28 +61,6 @@ dependencies = [
"derive_arbitrary", "derive_arbitrary",
] ]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "atomic-waker" name = "atomic-waker"
version = "1.1.2" version = "1.1.2"
@@ -126,6 +95,12 @@ dependencies = [
"vsimd", "vsimd",
] ]
[[package]]
name = "base64ct"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
[[package]] [[package]]
name = "bincode" name = "bincode"
version = "1.3.3" version = "1.3.3"
@@ -207,6 +182,17 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "bstr"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
dependencies = [
"memchr",
"regex-automata",
"serde",
]
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.19.0" version = "3.19.0"
@@ -303,7 +289,7 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [ dependencies = [
"nom", "nom 7.1.3",
] ]
[[package]] [[package]]
@@ -324,19 +310,6 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-link",
]
[[package]] [[package]]
name = "ciborium" name = "ciborium"
version = "0.2.2" version = "0.2.2"
@@ -419,6 +392,12 @@ dependencies = [
"error-code", "error-code",
] ]
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]] [[package]]
name = "constant_time_eq" name = "constant_time_eq"
version = "0.3.1" version = "0.3.1"
@@ -449,12 +428,6 @@ version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f" checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f"
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]] [[package]]
name = "core_maths" name = "core_maths"
version = "0.1.1" version = "0.1.1"
@@ -580,6 +553,33 @@ dependencies = [
"typenum", "typenum",
] ]
[[package]]
name = "curve25519-dalek"
version = "4.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
dependencies = [
"cfg-if",
"cpufeatures",
"curve25519-dalek-derive",
"digest",
"fiat-crypto",
"rustc_version",
"subtle",
"zeroize",
]
[[package]]
name = "curve25519-dalek-derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "data-encoding" name = "data-encoding"
version = "2.9.0" version = "2.9.0"
@@ -713,6 +713,16 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "der"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid",
"zeroize",
]
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.5.5" version = "0.5.5"
@@ -831,6 +841,30 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "ed25519"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53"
dependencies = [
"pkcs8",
"signature",
]
[[package]]
name = "ed25519-dalek"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9"
dependencies = [
"curve25519-dalek",
"ed25519",
"serde",
"sha2",
"subtle",
"zeroize",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.15.0" version = "1.15.0"
@@ -865,6 +899,18 @@ version = "3.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59" checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
[[package]]
name = "fallible-iterator"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "fallible-streaming-iterator"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "2.3.0" version = "2.3.0"
@@ -882,6 +928,12 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "fiat-crypto"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]] [[package]]
name = "filetime" name = "filetime"
version = "0.2.26" version = "0.2.26"
@@ -1125,6 +1177,15 @@ dependencies = [
"foldhash 0.2.0", "foldhash 0.2.0",
] ]
[[package]]
name = "hashlink"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
dependencies = [
"hashbrown 0.15.5",
]
[[package]] [[package]]
name = "heck" name = "heck"
version = "0.5.0" version = "0.5.0"
@@ -1262,30 +1323,6 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "iana-time-zone"
version = "0.1.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"log",
"wasm-bindgen",
"windows-core",
]
[[package]]
name = "iana-time-zone-haiku"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
dependencies = [
"cc",
]
[[package]] [[package]]
name = "icu_calendar" name = "icu_calendar"
version = "2.1.1" version = "2.1.1"
@@ -1603,6 +1640,17 @@ dependencies = [
"redox_syscall 0.7.0", "redox_syscall 0.7.0",
] ]
[[package]]
name = "libsqlite3-sys"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad8935b44e7c13394a179a438e0cebba0fe08fe01b54f152e29a93b5cf993fd4"
dependencies = [
"cc",
"pkg-config",
"vcpkg",
]
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.4.15" version = "0.4.15"
@@ -1745,23 +1793,38 @@ dependencies = [
] ]
[[package]] [[package]]
name = "nix-daemon" name = "nix-compat"
version = "0.1.1" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://git.snix.dev/snix/snix.git#44743974c24f9850a1552d07734f95a54e51dba0"
checksum = "bb28bc02b8ea18d59e15fc8e86ae35850326dc5e4e2dcf17bc659f2fd79f1a08"
dependencies = [ dependencies = [
"async-stream", "bitflags",
"chrono", "bstr",
"bytes",
"data-encoding",
"ed25519",
"ed25519-dalek",
"futures", "futures",
"mimalloc",
"nix-compat-derive",
"nom 8.0.0",
"num_enum", "num_enum",
"tap", "pin-project-lite",
"thiserror 1.0.69", "sha2",
"thiserror 2.0.17",
"tokio", "tokio",
"tokio-stream",
"tokio-test",
"tracing", "tracing",
] ]
[[package]]
name = "nix-compat-derive"
version = "0.1.0"
source = "git+https://git.snix.dev/snix/snix.git#44743974c24f9850a1552d07734f95a54e51dba0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "nix-js" name = "nix-js"
version = "0.1.0" version = "0.1.0"
@@ -1778,13 +1841,14 @@ dependencies = [
"hex", "hex",
"itertools 0.14.0", "itertools 0.14.0",
"mimalloc", "mimalloc",
"nix-daemon", "nix-compat",
"nix-js-macros", "nix-js-macros",
"nix-nar", "nix-nar",
"petgraph", "petgraph",
"regex", "regex",
"reqwest", "reqwest",
"rnix", "rnix",
"rusqlite",
"rustyline", "rustyline",
"serde", "serde",
"serde_json", "serde_json",
@@ -1832,6 +1896,15 @@ dependencies = [
"minimal-lexical", "minimal-lexical",
] ]
[[package]]
name = "nom"
version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "nu-ansi-term" name = "nu-ansi-term"
version = "0.50.3" version = "0.50.3"
@@ -2011,6 +2084,16 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]] [[package]]
name = "pkg-config" name = "pkg-config"
version = "0.3.32" version = "0.3.32"
@@ -2219,6 +2302,9 @@ name = "rand_core"
version = "0.6.4" version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.16",
]
[[package]] [[package]]
name = "rand_core" name = "rand_core"
@@ -2393,6 +2479,20 @@ dependencies = [
"text-size", "text-size",
] ]
[[package]]
name = "rusqlite"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c6d5e5acb6f6129fe3f7ba0a7fc77bca1942cb568535e18e7bc40262baf3110"
dependencies = [
"bitflags",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"smallvec",
]
[[package]] [[package]]
name = "rustc-hash" name = "rustc-hash"
version = "1.1.0" version = "1.1.0"
@@ -2647,6 +2747,15 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"rand_core 0.6.4",
]
[[package]] [[package]]
name = "simd-adler32" name = "simd-adler32"
version = "0.3.8" version = "0.3.8"
@@ -2693,6 +2802,16 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]] [[package]]
name = "stable_deref_trait" name = "stable_deref_trait"
version = "1.2.1" version = "1.2.1"
@@ -3048,28 +3167,6 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "tokio-stream"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f6d24790a10a7af737693a3e8f1d03faef7e6ca0cc99aae5066f533766de545"
dependencies = [
"futures-core",
"tokio",
"tokio-stream",
]
[[package]] [[package]]
name = "toml_datetime" name = "toml_datetime"
version = "0.7.5+spec-1.1.0" version = "0.7.5+spec-1.1.0"
@@ -3311,6 +3408,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.5" version = "0.9.5"
@@ -3497,65 +3600,12 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
dependencies = [
"windows-implement",
"windows-interface",
"windows-link",
"windows-result",
"windows-strings",
]
[[package]]
name = "windows-implement"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-interface"
version = "0.59.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "windows-link" name = "windows-link"
version = "0.2.1" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-result"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
dependencies = [
"windows-link",
]
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.48.0" version = "0.48.0"

View File

@@ -6,13 +6,13 @@ build = "build.rs"
[features] [features]
default = ["daemon"] default = ["daemon"]
daemon = ["dep:tokio", "dep:nix-daemon"] daemon = ["dep:tokio", "dep:nix-compat"]
[dependencies] [dependencies]
mimalloc = "0.1" mimalloc = "0.1"
tokio = { version = "1.41", features = ["rt-multi-thread", "sync"], optional = true } tokio = { version = "1.41", features = ["rt-multi-thread", "sync", "net", "io-util"], optional = true }
nix-daemon = { version = "0.1", optional = true } nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", features = ["wire", "async"], optional = true }
# REPL # REPL
anyhow = "1.0" anyhow = "1.0"
@@ -51,6 +51,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
dirs = "5.0" dirs = "5.0"
tempfile = "3.24" tempfile = "3.24"
rusqlite = { version = "0.33", features = ["bundled"] }
rnix = "0.12" rnix = "0.12"

View File

@@ -1,4 +1,5 @@
use deno_core::op2; use deno_core::op2;
use deno_core::OpState;
use serde::Serialize; use serde::Serialize;
use tracing::debug; use tracing::debug;
@@ -7,9 +8,11 @@ pub(crate) mod cache;
mod download; mod download;
mod git; mod git;
mod hg; mod hg;
mod metadata_cache;
pub use cache::FetcherCache; pub use cache::FetcherCache;
pub use download::Downloader; pub use download::Downloader;
pub use metadata_cache::MetadataCache;
use crate::runtime::NixError; use crate::runtime::NixError;
use crate::nar; use crate::nar;
@@ -51,27 +54,62 @@ pub struct FetchHgResult {
#[op2] #[op2]
#[serde] #[serde]
pub fn op_fetch_url( pub fn op_fetch_url(
state: &mut OpState,
#[string] url: String, #[string] url: String,
#[string] expected_hash: Option<String>, #[string] expected_hash: Option<String>,
#[string] name: Option<String>, #[string] name: Option<String>,
executable: bool, executable: bool,
) -> Result<FetchUrlResult, NixError> { ) -> Result<FetchUrlResult, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
debug!("fetchurl: {}", url); debug!("fetchurl: {}", url);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new();
let file_name = let file_name =
name.unwrap_or_else(|| url.rsplit('/').next().unwrap_or("download").to_string()); name.unwrap_or_else(|| url.rsplit('/').next().unwrap_or("download").to_string());
if let Some(ref hash) = expected_hash let metadata_cache =
&& let Some(cached) = cache.get_url(&url, hash) MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
let input = serde_json::json!({
"type": "file",
"url": url,
"name": file_name,
"executable": executable,
});
if let Some(cached_entry) = metadata_cache
.lookup(&input)
.map_err(|e| NixError::from(e.to_string()))?
{ {
return Ok(FetchUrlResult { let cached_hash = cached_entry
store_path: cached.to_string_lossy().to_string(), .info
hash: hash.clone(), .get("hash")
}); .and_then(|v| v.as_str())
.unwrap_or("");
if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected);
if cached_hash != normalized_expected {
debug!("Cached hash mismatch, re-fetching");
} else {
debug!("Cache hit for {}", url);
return Ok(FetchUrlResult {
store_path: cached_entry.store_path.clone(),
hash: cached_hash.to_string(),
});
}
} else {
debug!("Cache hit for {} (no hash check)", url);
return Ok(FetchUrlResult {
store_path: cached_entry.store_path.clone(),
hash: cached_hash.to_string(),
});
}
} }
debug!("Cache miss, downloading {}", url);
let downloader = Downloader::new();
let data = downloader let data = downloader
.download(&url) .download(&url)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
@@ -88,12 +126,33 @@ pub fn op_fetch_url(
} }
} }
let store_path = cache let store = state.borrow::<Arc<StoreBackend>>();
.put_url(&url, &hash, &data, &file_name, executable) let store_path = store
.as_store()
.add_to_store(&file_name, &data, false, vec![])
.map_err(|e| NixError::from(e.to_string()))?;
#[cfg(unix)]
if executable {
use std::os::unix::fs::PermissionsExt;
if let Ok(metadata) = std::fs::metadata(&store_path) {
let mut perms = metadata.permissions();
perms.set_mode(0o755);
let _ = std::fs::set_permissions(&store_path, perms);
}
}
let info = serde_json::json!({
"hash": hash,
"url": url,
});
metadata_cache
.add(&input, &info, &store_path, true)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
Ok(FetchUrlResult { Ok(FetchUrlResult {
store_path: store_path.to_string_lossy().to_string(), store_path,
hash, hash,
}) })
} }
@@ -101,63 +160,73 @@ pub fn op_fetch_url(
#[op2] #[op2]
#[serde] #[serde]
pub fn op_fetch_tarball( pub fn op_fetch_tarball(
state: &mut OpState,
#[string] url: String, #[string] url: String,
#[string] expected_hash: Option<String>, #[string] expected_hash: Option<String>,
#[string] expected_nar_hash: Option<String>, #[string] expected_nar_hash: Option<String>,
#[string] name: Option<String>, #[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> { ) -> Result<FetchTarballResult, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
debug!( debug!(
"fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}", "fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}",
url, expected_hash, expected_nar_hash url, expected_hash, expected_nar_hash
); );
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new();
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());
let metadata_cache =
MetadataCache::new(3600).map_err(|e| NixError::from(e.to_string()))?;
// Try cache lookup with narHash if provided let input = serde_json::json!({
if let Some(ref nar_hash) = expected_nar_hash { "type": "tarball",
let normalized = normalize_hash(nar_hash); "url": url,
debug!("fetchTarball: normalized nar_hash={}", normalized); "name": dir_name,
if let Some(cached) = cache.get_extracted_tarball(&url, &normalized) { });
debug!("fetchTarball: cache HIT (with expected nar_hash)");
// Need to compute tarball hash if not cached if let Some(cached_entry) = metadata_cache
let tarball_hash = expected_hash .lookup(&input)
.as_ref() .map_err(|e| NixError::from(e.to_string()))?
.map(|h| normalize_hash(h)) {
.unwrap_or_default(); let cached_nar_hash = cached_entry
.info
.get("nar_hash")
.and_then(|v| v.as_str())
.unwrap_or("");
let cached_tarball_hash = cached_entry
.info
.get("tarball_hash")
.and_then(|v| v.as_str())
.unwrap_or("");
if let Some(ref expected_nar) = expected_nar_hash {
let normalized_expected = normalize_hash(expected_nar);
if cached_nar_hash == normalized_expected {
debug!("Cache hit for tarball {}", url);
return Ok(FetchTarballResult {
store_path: cached_entry.store_path.clone(),
hash: cached_tarball_hash.to_string(),
nar_hash: cached_nar_hash.to_string(),
});
}
} else if !cached_entry.is_expired(3600) {
debug!("Cache hit for tarball {} (no hash check)", url);
return Ok(FetchTarballResult { return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(), store_path: cached_entry.store_path.clone(),
hash: tarball_hash, hash: cached_tarball_hash.to_string(),
nar_hash: normalized, nar_hash: cached_nar_hash.to_string(),
}); });
} }
debug!("fetchTarball: cache MISS, downloading...");
} else if let Some((cached, cached_nar_hash)) = cache.get_extracted_tarball_by_url(&url) {
debug!(
"fetchTarball: cache HIT (by URL, nar_hash={})",
cached_nar_hash
);
let tarball_hash = expected_hash
.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_default();
return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(),
hash: tarball_hash,
nar_hash: cached_nar_hash,
});
} }
debug!("fetchTarball: cache MISS, downloading...");
debug!("Cache miss, downloading tarball from {}", url);
let downloader = Downloader::new();
let data = downloader let data = downloader
.download(&url) .download(&url)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
// Compute tarball hash (hash of the archive file itself)
let tarball_hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data)); let tarball_hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
// Verify tarball hash if provided
if let Some(ref expected) = expected_hash { if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected); let normalized_expected = normalize_hash(expected);
if tarball_hash != normalized_expected { if tarball_hash != normalized_expected {
@@ -168,11 +237,11 @@ pub fn op_fetch_tarball(
} }
} }
let temp_dir = tempfile::tempdir().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let extracted_path = archive::extract_archive(&data, temp_dir.path()) let extracted_path = cache
.extract_tarball_to_temp(&data)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
// Compute NAR hash (hash of the extracted content)
let nar_hash = let nar_hash =
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?; nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
@@ -181,10 +250,8 @@ pub fn op_fetch_tarball(
tarball_hash, nar_hash tarball_hash, nar_hash
); );
// Verify NAR hash if provided
if let Some(ref expected) = expected_nar_hash { if let Some(ref expected) = expected_nar_hash {
let normalized_expected = normalize_hash(expected); let normalized_expected = normalize_hash(expected);
if nar_hash != normalized_expected { if nar_hash != normalized_expected {
return Err(NixError::from(format!( return Err(NixError::from(format!(
"NAR hash mismatch for '{}': expected {}, got {}", "NAR hash mismatch for '{}': expected {}, got {}",
@@ -193,12 +260,25 @@ pub fn op_fetch_tarball(
} }
} }
let store_path = cache let store = state.borrow::<Arc<StoreBackend>>();
.put_tarball_from_extracted(&url, &nar_hash, &extracted_path, &dir_name) let store_path = store
.as_store()
.add_to_store_from_path(&dir_name, &extracted_path, vec![])
.map_err(|e| NixError::from(e.to_string()))?;
let info = serde_json::json!({
"tarball_hash": tarball_hash,
"nar_hash": nar_hash,
"url": url,
});
let immutable = expected_nar_hash.is_some();
metadata_cache
.add(&input, &info, &store_path, immutable)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
Ok(FetchTarballResult { Ok(FetchTarballResult {
store_path: store_path.to_string_lossy().to_string(), store_path,
hash: tarball_hash, hash: tarball_hash,
nar_hash, nar_hash,
}) })
@@ -207,6 +287,7 @@ pub fn op_fetch_tarball(
#[op2] #[op2]
#[serde] #[serde]
pub fn op_fetch_git( pub fn op_fetch_git(
state: &mut OpState,
#[string] url: String, #[string] url: String,
#[string] git_ref: Option<String>, #[string] git_ref: Option<String>,
#[string] rev: Option<String>, #[string] rev: Option<String>,
@@ -215,12 +296,18 @@ pub fn op_fetch_git(
all_refs: bool, all_refs: bool,
#[string] name: Option<String>, #[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> { ) -> Result<FetchGitResult, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
debug!("fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev); debug!("fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());
let store = state.borrow::<Arc<StoreBackend>>();
git::fetch_git( git::fetch_git(
&cache, &cache,
store.as_store(),
&url, &url,
git_ref.as_deref(), git_ref.as_deref(),
rev.as_deref(), rev.as_deref(),

View File

@@ -1,17 +1,12 @@
use std::fs::{self, File}; use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use tracing::debug;
use super::archive::ArchiveError; use super::archive::ArchiveError;
#[derive(Debug)] #[derive(Debug)]
pub enum CacheError { pub enum CacheError {
Io(std::io::Error), Io(std::io::Error),
Archive(ArchiveError), Archive(ArchiveError),
Json(serde_json::Error),
} }
impl std::fmt::Display for CacheError { impl std::fmt::Display for CacheError {
@@ -19,7 +14,6 @@ impl std::fmt::Display for CacheError {
match self { match self {
CacheError::Io(e) => write!(f, "I/O error: {}", e), CacheError::Io(e) => write!(f, "I/O error: {}", e),
CacheError::Archive(e) => write!(f, "Archive error: {}", e), CacheError::Archive(e) => write!(f, "Archive error: {}", e),
CacheError::Json(e) => write!(f, "JSON error: {}", e),
} }
} }
} }
@@ -38,24 +32,11 @@ impl From<ArchiveError> for CacheError {
} }
} }
impl From<serde_json::Error> for CacheError {
fn from(e: serde_json::Error) -> Self {
CacheError::Json(e)
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct FetcherCache { pub struct FetcherCache {
base_dir: PathBuf, base_dir: PathBuf,
} }
#[derive(Serialize, Deserialize)]
struct CacheMetadata {
url: String,
hash: String,
name: String,
}
impl FetcherCache { impl FetcherCache {
pub fn new() -> Result<Self, std::io::Error> { pub fn new() -> Result<Self, std::io::Error> {
let base_dir = dirs::cache_dir() let base_dir = dirs::cache_dir()
@@ -68,16 +49,15 @@ impl FetcherCache {
Ok(Self { base_dir }) Ok(Self { base_dir })
} }
fn url_cache_dir(&self) -> PathBuf { pub fn make_store_path(&self, hash: &str, name: &str) -> PathBuf {
self.base_dir.join("url") let short_hash = &hash[..32.min(hash.len())];
} self.base_dir
.join("store")
fn tarball_cache_dir(&self) -> PathBuf { .join(format!("{}-{}", short_hash, name))
self.base_dir.join("tarball")
} }
fn git_cache_dir(&self) -> PathBuf { fn git_cache_dir(&self) -> PathBuf {
self.base_dir.join("git") self.base_dir.join("gitv3")
} }
fn hg_cache_dir(&self) -> PathBuf { fn hg_cache_dir(&self) -> PathBuf {
@@ -88,277 +68,6 @@ impl FetcherCache {
crate::nix_hash::sha256_hex(url) crate::nix_hash::sha256_hex(url)
} }
pub fn get_url(&self, url: &str, expected_hash: &str) -> Option<PathBuf> {
let cache_dir = self.url_cache_dir();
let key = Self::hash_key(url);
let meta_path = cache_dir.join(format!("{}.meta", key));
let data_path = cache_dir.join(format!("{}.data", key));
if !meta_path.exists() || !data_path.exists() {
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
if meta.hash == expected_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() {
Some(store_path)
} else {
None
}
} else {
None
}
}
pub fn put_url(
&self,
url: &str,
hash: &str,
data: &[u8],
name: &str,
executable: bool,
) -> Result<PathBuf, std::io::Error> {
let cache_dir = self.url_cache_dir();
fs::create_dir_all(&cache_dir)?;
let key = Self::hash_key(url);
let meta_path = cache_dir.join(format!("{}.meta", key));
let data_path = cache_dir.join(format!("{}.data", key));
let mut file = File::create(&data_path)?;
file.write_all(data)?;
#[cfg(unix)]
if executable {
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(&data_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&data_path, perms)?;
}
let meta = CacheMetadata {
url: url.to_string(),
hash: hash.to_string(),
name: name.to_string(),
};
fs::write(&meta_path, serde_json::to_string(&meta)?)?;
let store_path = self.make_store_path(hash, name);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
fs::copy(&data_path, &store_path)?;
#[cfg(unix)]
if executable {
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(&store_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&store_path, perms)?;
}
}
Ok(store_path)
}
pub fn get_tarball(&self, url: &str, expected_hash: &str) -> Option<PathBuf> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let meta_path = cache_dir.join(&key).join(".meta");
let data_dir = cache_dir.join(&key);
debug!("get_tarball: url={}, expected_hash={}", url, expected_hash);
if !meta_path.exists() || !data_dir.exists() {
debug!("get_tarball: cache miss - meta or data dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
debug!("get_tarball: cached hash={}, name={}", meta.hash, meta.name);
if meta.hash == expected_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
debug!(
"get_tarball: hash match, checking store_path={}",
store_path.display()
);
if store_path.exists() {
debug!("get_tarball: HIT - returning store path");
Some(store_path)
} else {
debug!("get_tarball: store path doesn't exist");
None
}
} else {
debug!(
"get_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_hash
);
None
}
}
pub fn put_tarball(
&self,
url: &str,
hash: &str,
data: &[u8],
name: &str,
) -> Result<PathBuf, CacheError> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let extract_dir = cache_dir.join(&key);
fs::create_dir_all(&extract_dir)?;
let extracted_path = super::archive::extract_archive(data, &extract_dir)?;
let meta = CacheMetadata {
url: url.to_string(),
hash: hash.to_string(),
name: name.to_string(),
};
fs::write(extract_dir.join(".meta"), serde_json::to_string(&meta)?)?;
let store_path = self.make_store_path(hash, name);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
copy_dir_recursive(&extracted_path, &store_path)?;
}
Ok(store_path)
}
pub fn get_extracted_tarball(&self, url: &str, expected_nar_hash: &str) -> Option<PathBuf> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key);
let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content");
debug!(
"get_extracted_tarball: url={}, expected_nar_hash={}",
url, expected_nar_hash
);
if !meta_path.exists() || !cached_content.exists() {
debug!("get_extracted_tarball: cache miss - meta or content dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
debug!(
"get_extracted_tarball: cached hash={}, name={}",
meta.hash, meta.name
);
if meta.hash == expected_nar_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
debug!(
"get_extracted_tarball: hash match, checking store_path={}",
store_path.display()
);
if store_path.exists() {
debug!("get_extracted_tarball: HIT - returning store path");
Some(store_path)
} else {
debug!("get_extracted_tarball: store path doesn't exist");
None
}
} else {
debug!(
"get_extracted_tarball: hash mismatch (cached={}, expected={})",
meta.hash, expected_nar_hash
);
None
}
}
pub fn get_extracted_tarball_by_url(&self, url: &str) -> Option<(PathBuf, String)> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key);
let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content");
debug!("get_extracted_tarball_by_url: url={}", url);
if !meta_path.exists() || !cached_content.exists() {
debug!("get_extracted_tarball_by_url: cache miss - meta or content dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
debug!(
"get_extracted_tarball_by_url: cached hash={}, name={}",
meta.hash, meta.name
);
let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() {
debug!("get_extracted_tarball_by_url: HIT - returning store path and hash");
Some((store_path, meta.hash))
} else {
debug!("get_extracted_tarball_by_url: store path doesn't exist");
None
}
}
pub fn put_tarball_from_extracted(
&self,
url: &str,
hash: &str,
extracted_path: &Path,
name: &str,
) -> Result<PathBuf, CacheError> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key);
debug!(
"put_tarball_from_extracted: url={}, hash={}, name={}",
url, hash, name
);
fs::create_dir_all(&cache_entry_dir)?;
let cached_content = cache_entry_dir.join("content");
if !cached_content.exists() {
copy_dir_recursive(extracted_path, &cached_content)?;
}
let meta = CacheMetadata {
url: url.to_string(),
hash: hash.to_string(),
name: name.to_string(),
};
fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?;
let store_path = self.make_store_path(hash, name);
debug!(
"put_tarball_from_extracted: store_path={}",
store_path.display()
);
if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
copy_dir_recursive(extracted_path, &store_path)?;
debug!("put_tarball_from_extracted: copied to store");
} else {
debug!("put_tarball_from_extracted: store path already exists");
}
Ok(store_path)
}
pub fn get_git_bare(&self, url: &str) -> PathBuf { pub fn get_git_bare(&self, url: &str) -> PathBuf {
let key = Self::hash_key(url); let key = Self::hash_key(url);
self.git_cache_dir().join(key) self.git_cache_dir().join(key)
@@ -369,15 +78,14 @@ impl FetcherCache {
self.hg_cache_dir().join(key) self.hg_cache_dir().join(key)
} }
pub fn make_store_path(&self, hash: &str, name: &str) -> PathBuf { pub fn extract_tarball_to_temp(&self, data: &[u8]) -> Result<PathBuf, CacheError> {
let short_hash = &hash[..32.min(hash.len())]; let temp_dir = tempfile::tempdir()?;
self.base_dir let extracted_path = super::archive::extract_archive(data, temp_dir.path())?;
.join("store") Ok(extracted_path)
.join(format!("{}-{}", short_hash, name))
} }
} }
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> { pub fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> {
fs::create_dir_all(dst)?; fs::create_dir_all(dst)?;
for entry in fs::read_dir(src)? { for entry in fs::read_dir(src)? {

View File

@@ -4,10 +4,12 @@ use std::process::Command;
use super::FetchGitResult; use super::FetchGitResult;
use super::cache::FetcherCache; use super::cache::FetcherCache;
use crate::store::Store;
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn fetch_git( pub fn fetch_git(
cache: &FetcherCache, cache: &FetcherCache,
store: &dyn Store,
url: &str, url: &str,
git_ref: Option<&str>, git_ref: Option<&str>,
rev: Option<&str>, rev: Option<&str>,
@@ -25,7 +27,15 @@ pub fn fetch_git(
} }
let target_rev = resolve_rev(&bare_repo, git_ref, rev)?; let target_rev = resolve_rev(&bare_repo, git_ref, rev)?;
let checkout_dir = checkout_rev(&bare_repo, &target_rev, submodules, name, cache)?;
let temp_dir = tempfile::tempdir()?;
let checkout_dir = checkout_rev_to_temp(&bare_repo, &target_rev, submodules, temp_dir.path())?;
let nar_hash = crate::nar::compute_nar_hash(&checkout_dir)
.map_err(|e| GitError::NarHashError(e.to_string()))?;
let store_path = store.add_to_store_from_path(name, &checkout_dir, vec![])
.map_err(|e| GitError::StoreError(e.to_string()))?;
let rev_count = get_rev_count(&bare_repo, &target_rev)?; let rev_count = get_rev_count(&bare_repo, &target_rev)?;
let last_modified = get_last_modified(&bare_repo, &target_rev)?; let last_modified = get_last_modified(&bare_repo, &target_rev)?;
@@ -38,14 +48,14 @@ pub fn fetch_git(
}; };
Ok(FetchGitResult { Ok(FetchGitResult {
out_path: checkout_dir.to_string_lossy().to_string(), out_path: store_path,
rev: target_rev, rev: target_rev,
short_rev, short_rev,
rev_count, rev_count,
last_modified, last_modified,
last_modified_date, last_modified_date,
submodules, submodules,
nar_hash: None, nar_hash: Some(nar_hash),
}) })
} }
@@ -127,20 +137,13 @@ fn resolve_rev(
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string()) Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
} }
fn checkout_rev( fn checkout_rev_to_temp(
bare_repo: &PathBuf, bare_repo: &PathBuf,
rev: &str, rev: &str,
submodules: bool, submodules: bool,
name: &str, temp_path: &std::path::Path,
cache: &FetcherCache,
) -> Result<PathBuf, GitError> { ) -> Result<PathBuf, GitError> {
let hash = crate::nix_hash::sha256_hex(&format!("{}:{}", bare_repo.display(), rev)); let checkout_dir = temp_path.join("checkout");
let checkout_dir = cache.make_store_path(&hash, name);
if checkout_dir.exists() {
return Ok(checkout_dir);
}
fs::create_dir_all(&checkout_dir)?; fs::create_dir_all(&checkout_dir)?;
let output = Command::new("git") let output = Command::new("git")
@@ -283,6 +286,8 @@ impl<T> Pipe for T {}
pub enum GitError { pub enum GitError {
IoError(std::io::Error), IoError(std::io::Error),
CommandFailed { operation: String, message: String }, CommandFailed { operation: String, message: String },
NarHashError(String),
StoreError(String),
} }
impl std::fmt::Display for GitError { impl std::fmt::Display for GitError {
@@ -292,6 +297,8 @@ impl std::fmt::Display for GitError {
GitError::CommandFailed { operation, message } => { GitError::CommandFailed { operation, message } => {
write!(f, "Git {} failed: {}", operation, message) write!(f, "Git {} failed: {}", operation, message)
} }
GitError::NarHashError(e) => write!(f, "NAR hash error: {}", e),
GitError::StoreError(e) => write!(f, "Store error: {}", e),
} }
} }
} }

View File

@@ -0,0 +1,215 @@
use rusqlite::{params, Connection, OptionalExtension};
use serde::{Deserialize, Serialize};
use serde_json;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
#[derive(Debug)]
pub enum CacheError {
Database(rusqlite::Error),
Json(serde_json::Error),
}
impl std::fmt::Display for CacheError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CacheError::Database(e) => write!(f, "Database error: {}", e),
CacheError::Json(e) => write!(f, "JSON error: {}", e),
}
}
}
impl std::error::Error for CacheError {}
impl From<rusqlite::Error> for CacheError {
fn from(e: rusqlite::Error) -> Self {
CacheError::Database(e)
}
}
impl From<serde_json::Error> for CacheError {
fn from(e: serde_json::Error) -> Self {
CacheError::Json(e)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheEntry {
pub input: serde_json::Value,
pub info: serde_json::Value,
pub store_path: String,
pub immutable: bool,
pub timestamp: u64,
}
impl CacheEntry {
pub fn is_expired(&self, ttl_seconds: u64) -> bool {
if self.immutable {
return false;
}
if ttl_seconds == 0 {
return false;
}
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
now > self.timestamp + ttl_seconds
}
}
pub struct MetadataCache {
conn: Connection,
ttl_seconds: u64,
}
impl MetadataCache {
pub fn new(ttl_seconds: u64) -> Result<Self, CacheError> {
let cache_dir = dirs::cache_dir()
.unwrap_or_else(|| PathBuf::from("/tmp"))
.join("nix-js");
std::fs::create_dir_all(&cache_dir)
.map_err(|e| CacheError::Database(rusqlite::Error::ToSqlConversionFailure(Box::new(e))))?;
let db_path = cache_dir.join("fetcher-cache.sqlite");
let conn = Connection::open(db_path)?;
conn.execute(
"CREATE TABLE IF NOT EXISTS cache (
input TEXT NOT NULL PRIMARY KEY,
info TEXT NOT NULL,
store_path TEXT NOT NULL,
immutable INTEGER NOT NULL,
timestamp INTEGER NOT NULL
)",
[],
)?;
Ok(Self { conn, ttl_seconds })
}
pub fn lookup(&self, input: &serde_json::Value) -> Result<Option<CacheEntry>, CacheError> {
let input_str = serde_json::to_string(input)?;
let entry: Option<(String, String, String, i64, i64)> = self
.conn
.query_row(
"SELECT input, info, store_path, immutable, timestamp FROM cache WHERE input = ?1",
params![input_str],
|row| {
Ok((
row.get(0)?,
row.get(1)?,
row.get(2)?,
row.get(3)?,
row.get(4)?,
))
},
)
.optional()?;
match entry {
Some((input_json, info_json, store_path, immutable, timestamp)) => {
let entry = CacheEntry {
input: serde_json::from_str(&input_json)?,
info: serde_json::from_str(&info_json)?,
store_path,
immutable: immutable != 0,
timestamp: timestamp as u64,
};
if entry.is_expired(self.ttl_seconds) {
Ok(None)
} else {
Ok(Some(entry))
}
}
None => Ok(None),
}
}
pub fn lookup_expired(
&self,
input: &serde_json::Value,
) -> Result<Option<CacheEntry>, CacheError> {
let input_str = serde_json::to_string(input)?;
let entry: Option<(String, String, String, i64, i64)> = self
.conn
.query_row(
"SELECT input, info, store_path, immutable, timestamp FROM cache WHERE input = ?1",
params![input_str],
|row| {
Ok((
row.get(0)?,
row.get(1)?,
row.get(2)?,
row.get(3)?,
row.get(4)?,
))
},
)
.optional()?;
match entry {
Some((input_json, info_json, store_path, immutable, timestamp)) => Ok(Some(
CacheEntry {
input: serde_json::from_str(&input_json)?,
info: serde_json::from_str(&info_json)?,
store_path,
immutable: immutable != 0,
timestamp: timestamp as u64,
},
)),
None => Ok(None),
}
}
pub fn add(
&self,
input: &serde_json::Value,
info: &serde_json::Value,
store_path: &str,
immutable: bool,
) -> Result<(), CacheError> {
let input_str = serde_json::to_string(input)?;
let info_str = serde_json::to_string(info)?;
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
self.conn.execute(
"INSERT OR REPLACE INTO cache (input, info, store_path, immutable, timestamp)
VALUES (?1, ?2, ?3, ?4, ?5)",
params![
input_str,
info_str,
store_path,
if immutable { 1 } else { 0 },
timestamp as i64
],
)?;
Ok(())
}
pub fn update_timestamp(&self, input: &serde_json::Value) -> Result<(), CacheError> {
let input_str = serde_json::to_string(input)?;
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
self.conn.execute(
"UPDATE cache SET timestamp = ?1 WHERE input = ?2",
params![timestamp as i64, input_str],
)?;
Ok(())
}
}

View File

@@ -2,8 +2,6 @@ mod config;
mod error; mod error;
mod validation; mod validation;
use std::path::Path;
pub use config::{StoreConfig, StoreMode}; pub use config::{StoreConfig, StoreMode};
pub use validation::validate_store_path; pub use validation::validate_store_path;
@@ -24,6 +22,13 @@ pub trait Store: Send + Sync {
references: Vec<String>, references: Vec<String>,
) -> Result<String>; ) -> Result<String>;
fn add_to_store_from_path(
&self,
name: &str,
source_path: &std::path::Path,
references: Vec<String>,
) -> Result<String>;
fn add_text_to_store( fn add_text_to_store(
&self, &self,
name: &str, name: &str,

View File

@@ -1,7 +1,13 @@
use std::io::{Error as IoError, ErrorKind as IoErrorKind, Result as IoResult};
use std::path::Path; use std::path::Path;
use std::sync::Arc;
use nix_daemon::{Progress as _, Store as _, nix}; use nix_compat::nix_daemon::types::{AddToStoreNarRequest, UnkeyedValidPathInfo};
use nix_compat::nix_daemon::worker_protocol::{ClientSettings, Operation};
use nix_compat::store_path::StorePath;
use nix_compat::wire::ProtocolVersion;
use nix_compat::wire::de::{NixRead, NixReader};
use nix_compat::wire::ser::{NixSerialize, NixWrite, NixWriter, NixWriterBuilder};
use tokio::io::{AsyncReadExt, AsyncWriteExt, ReadHalf, WriteHalf, split};
use tokio::net::UnixStream; use tokio::net::UnixStream;
use tokio::sync::Mutex; use tokio::sync::Mutex;
@@ -11,7 +17,7 @@ use super::Store;
pub struct DaemonStore { pub struct DaemonStore {
runtime: tokio::runtime::Runtime, runtime: tokio::runtime::Runtime,
store: Arc<Mutex<nix::DaemonStore<UnixStream>>>, connection: NixDaemonConnection,
} }
impl DaemonStore { impl DaemonStore {
@@ -19,25 +25,21 @@ impl DaemonStore {
let runtime = tokio::runtime::Runtime::new() let runtime = tokio::runtime::Runtime::new()
.map_err(|e| Error::internal(format!("Failed to create tokio runtime: {}", e)))?; .map_err(|e| Error::internal(format!("Failed to create tokio runtime: {}", e)))?;
let socket_str = socket_path let connection = runtime.block_on(async {
.to_str() NixDaemonConnection::connect(socket_path)
.ok_or_else(|| Error::internal("Invalid socket path: not UTF-8".to_string()))?;
let store = runtime.block_on(async {
nix_daemon::nix::DaemonStore::builder()
.connect_unix(socket_str)
.await .await
.map_err(|e| { .map_err(|e| {
Error::internal(format!( Error::internal(format!(
"Failed to connect to nix-daemon at {}: {}", "Failed to connect to nix-daemon at {}: {}",
socket_str, e socket_path.display(),
e
)) ))
}) })
})?; })?;
Ok(Self { Ok(Self {
runtime, runtime,
store: Arc::new(Mutex::new(store)), connection,
}) })
} }
@@ -56,10 +58,8 @@ impl Store for DaemonStore {
fn is_valid_path(&self, path: &str) -> Result<bool> { fn is_valid_path(&self, path: &str) -> Result<bool> {
self.block_on(async { self.block_on(async {
let mut store = self.store.lock().await; self.connection
store
.is_valid_path(path) .is_valid_path(path)
.result()
.await .await
.map_err(|e| Error::internal(format!("Daemon error in is_valid_path: {}", e))) .map_err(|e| Error::internal(format!("Daemon error in is_valid_path: {}", e)))
}) })
@@ -67,8 +67,7 @@ impl Store for DaemonStore {
fn ensure_path(&self, path: &str) -> Result<()> { fn ensure_path(&self, path: &str) -> Result<()> {
self.block_on(async { self.block_on(async {
let mut store = self.store.lock().await; self.connection.ensure_path(path).await.map_err(|e| {
store.ensure_path(path).result().await.map_err(|e| {
Error::eval_error( Error::eval_error(
format!( format!(
"builtins.storePath: path '{}' is not valid in nix store: {}", "builtins.storePath: path '{}' is not valid in nix store: {}",
@@ -87,34 +86,156 @@ impl Store for DaemonStore {
recursive: bool, recursive: bool,
references: Vec<String>, references: Vec<String>,
) -> Result<String> { ) -> Result<String> {
let temp_dir = tempfile::tempdir() use nix_compat::nix_daemon::types::AddToStoreNarRequest;
.map_err(|e| Error::internal(format!("Failed to create temp dir: {}", e)))?; use nix_compat::nixhash::{CAHash, NixHash};
let content_path = temp_dir.path().join(name); use nix_compat::store_path::{StorePath, build_ca_path};
std::fs::write(&content_path, content) use sha2::{Digest, Sha256};
.map_err(|e| Error::internal(format!("Failed to write content: {}", e)))?; use std::fs;
use tempfile::NamedTempFile;
let cam_str = if recursive { let temp_file = NamedTempFile::new()
"fixed:r:sha256" .map_err(|e| Error::internal(format!("Failed to create temp file: {}", e)))?;
fs::write(temp_file.path(), content)
.map_err(|e| Error::internal(format!("Failed to write temp file: {}", e)))?;
let nar_data = crate::nar::pack_nar(temp_file.path())?;
let nar_hash_hex = {
let mut hasher = Sha256::new();
hasher.update(&nar_data);
hex::encode(hasher.finalize())
};
let nar_hash_bytes = hex::decode(&nar_hash_hex)
.map_err(|e| Error::internal(format!("Invalid nar hash: {}", e)))?;
let mut nar_hash_arr = [0u8; 32];
nar_hash_arr.copy_from_slice(&nar_hash_bytes);
let ca_hash = if recursive {
CAHash::Nar(NixHash::Sha256(nar_hash_arr))
} else { } else {
"fixed:sha256" let mut content_hasher = Sha256::new();
content_hasher.update(content);
let content_hash = content_hasher.finalize();
let mut content_hash_arr = [0u8; 32];
content_hash_arr.copy_from_slice(&content_hash);
CAHash::Flat(NixHash::Sha256(content_hash_arr))
};
let ref_store_paths: std::result::Result<Vec<StorePath<String>>, _> = references
.iter()
.map(|r| StorePath::<String>::from_absolute_path(r.as_bytes()))
.collect();
let ref_store_paths = ref_store_paths
.map_err(|e| Error::internal(format!("Invalid reference path: {}", e)))?;
let store_path: StorePath<String> =
build_ca_path(name, &ca_hash, references.clone(), false)
.map_err(|e| Error::internal(format!("Failed to build store path: {}", e)))?;
let store_path_str = store_path.to_absolute_path();
if self.is_valid_path(&store_path_str)? {
return Ok(store_path_str);
}
let request = AddToStoreNarRequest {
path: store_path,
deriver: None,
nar_hash: unsafe {
std::mem::transmute::<[u8; 32], nix_compat::nix_daemon::types::NarHash>(
nar_hash_arr,
)
},
references: ref_store_paths,
registration_time: 0,
nar_size: nar_data.len() as u64,
ultimate: false,
signatures: vec![],
ca: Some(ca_hash),
repair: false,
dont_check_sigs: false,
}; };
self.block_on(async { self.block_on(async {
let mut store = self.store.lock().await; self.connection
let (store_path, _path_info) = store .add_to_store_nar(request, &nar_data)
.add_to_store(
name,
cam_str,
references,
false,
content_path.as_os_str().as_encoded_bytes(),
)
.result()
.await .await
.map_err(|e| Error::internal(format!("Daemon error in add_to_store: {}", e)))?; .map_err(|e| Error::internal(format!("Failed to add to store: {}", e)))
})?;
Ok(store_path) Ok(store_path_str)
}) }
fn add_to_store_from_path(
&self,
name: &str,
source_path: &std::path::Path,
references: Vec<String>,
) -> Result<String> {
use nix_compat::nix_daemon::types::AddToStoreNarRequest;
use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::{StorePath, build_ca_path};
use sha2::{Digest, Sha256};
let nar_data = crate::nar::pack_nar(source_path)?;
let nar_hash_hex = {
let mut hasher = Sha256::new();
hasher.update(&nar_data);
hex::encode(hasher.finalize())
};
let nar_hash_bytes = hex::decode(&nar_hash_hex)
.map_err(|e| Error::internal(format!("Invalid nar hash: {}", e)))?;
let mut nar_hash_arr = [0u8; 32];
nar_hash_arr.copy_from_slice(&nar_hash_bytes);
let ca_hash = CAHash::Nar(NixHash::Sha256(nar_hash_arr));
let ref_store_paths: std::result::Result<Vec<StorePath<String>>, _> = references
.iter()
.map(|r| StorePath::<String>::from_absolute_path(r.as_bytes()))
.collect();
let ref_store_paths = ref_store_paths
.map_err(|e| Error::internal(format!("Invalid reference path: {}", e)))?;
let store_path: StorePath<String> =
build_ca_path(name, &ca_hash, references.clone(), false)
.map_err(|e| Error::internal(format!("Failed to build store path: {}", e)))?;
let store_path_str = store_path.to_absolute_path();
if self.is_valid_path(&store_path_str)? {
return Ok(store_path_str);
}
let request = AddToStoreNarRequest {
path: store_path,
deriver: None,
nar_hash: unsafe {
std::mem::transmute::<[u8; 32], nix_compat::nix_daemon::types::NarHash>(
nar_hash_arr,
)
},
references: ref_store_paths,
registration_time: 0,
nar_size: nar_data.len() as u64,
ultimate: false,
signatures: vec![],
ca: Some(ca_hash),
repair: false,
dont_check_sigs: false,
};
self.block_on(async {
self.connection
.add_to_store_nar(request, &nar_data)
.await
.map_err(|e| Error::internal(format!("Failed to add to store: {}", e)))
})?;
Ok(store_path_str)
} }
fn add_text_to_store( fn add_text_to_store(
@@ -123,28 +244,562 @@ impl Store for DaemonStore {
content: &str, content: &str,
references: Vec<String>, references: Vec<String>,
) -> Result<String> { ) -> Result<String> {
self.block_on(async { use nix_compat::nix_daemon::types::AddToStoreNarRequest;
let mut store = self.store.lock().await; use nix_compat::nixhash::CAHash;
let (store_path, _) = store use nix_compat::store_path::{StorePath, build_text_path};
.add_to_store(name, "text:sha256", references, false, content.as_bytes()) use sha2::{Digest, Sha256};
.result() use std::fs;
.await use tempfile::NamedTempFile;
.map_err(|e| {
Error::internal(format!("Daemon error in add_text_to_store: {}", e))
})?;
Ok(store_path) let temp_file = NamedTempFile::new()
}) .map_err(|e| Error::internal(format!("Failed to create temp file: {}", e)))?;
fs::write(temp_file.path(), content.as_bytes())
.map_err(|e| Error::internal(format!("Failed to write temp file: {}", e)))?;
let nar_data = crate::nar::pack_nar(temp_file.path())?;
let nar_hash_hex = {
let mut hasher = Sha256::new();
hasher.update(&nar_data);
hex::encode(hasher.finalize())
};
let nar_hash_bytes = hex::decode(&nar_hash_hex)
.map_err(|e| Error::internal(format!("Invalid nar hash: {}", e)))?;
let mut nar_hash_arr = [0u8; 32];
nar_hash_arr.copy_from_slice(&nar_hash_bytes);
let content_hash = {
let mut hasher = Sha256::new();
hasher.update(content.as_bytes());
hasher.finalize().into()
};
let ref_store_paths: std::result::Result<Vec<StorePath<String>>, _> = references
.iter()
.map(|r| StorePath::<String>::from_absolute_path(r.as_bytes()))
.collect();
let ref_store_paths = ref_store_paths
.map_err(|e| Error::internal(format!("Invalid reference path: {}", e)))?;
let store_path: StorePath<String> = build_text_path(name, content, references.clone())
.map_err(|e| Error::internal(format!("Failed to build text store path: {}", e)))?;
let store_path_str = store_path.to_absolute_path();
if self.is_valid_path(&store_path_str)? {
return Ok(store_path_str);
}
let request = AddToStoreNarRequest {
path: store_path,
deriver: None,
nar_hash: unsafe {
std::mem::transmute::<[u8; 32], nix_compat::nix_daemon::types::NarHash>(
nar_hash_arr,
)
},
references: ref_store_paths,
registration_time: 0,
nar_size: nar_data.len() as u64,
ultimate: false,
signatures: vec![],
ca: Some(CAHash::Text(content_hash)),
repair: false,
dont_check_sigs: false,
};
self.block_on(async {
self.connection
.add_to_store_nar(request, &nar_data)
.await
.map_err(|e| Error::internal(format!("Failed to add text to store: {}", e)))
})?;
Ok(store_path_str)
} }
fn make_fixed_output_path( fn make_fixed_output_path(
&self, &self,
_hash_algo: &str, hash_algo: &str,
hash: &str, hash: &str,
_hash_mode: &str, hash_mode: &str,
name: &str, name: &str,
) -> Result<String> { ) -> Result<String> {
let short_hash = &hash[..32.min(hash.len())]; use nix_compat::nixhash::{CAHash, NixHash};
Ok(format!("/nix/store/{}-{}", short_hash, name)) use nix_compat::store_path::build_ca_path;
let nix_hash = match hash_algo {
"sha256" => {
let hash_bytes = hex::decode(hash)
.map_err(|e| Error::internal(format!("Invalid hash hex: {}", e)))?;
if hash_bytes.len() != 32 {
return Err(Error::internal(format!(
"Invalid sha256 hash length: expected 32, got {}",
hash_bytes.len()
)));
}
let mut arr = [0u8; 32];
arr.copy_from_slice(&hash_bytes);
NixHash::Sha256(arr)
}
_ => {
return Err(Error::internal(format!(
"Unsupported hash algorithm: {}",
hash_algo
)));
}
};
let ca_hash = if hash_mode == "r" {
CAHash::Nar(nix_hash)
} else {
CAHash::Flat(nix_hash)
};
let store_path: nix_compat::store_path::StorePath<String> =
build_ca_path(name, &ca_hash, Vec::<String>::new(), false)
.map_err(|e| Error::internal(format!("Failed to build store path: {}", e)))?;
Ok(store_path.to_absolute_path())
}
}
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37);
// Protocol magic numbers (from nix-compat worker_protocol.rs)
const WORKER_MAGIC_1: u64 = 0x6e697863; // "nixc"
const WORKER_MAGIC_2: u64 = 0x6478696f; // "dxio"
const STDERR_LAST: u64 = 0x616c7473; // "alts"
const STDERR_ERROR: u64 = 0x63787470; // "cxtp"
/// Performs the client handshake with a nix-daemon server
///
/// This is the client-side counterpart to `server_handshake_client`.
/// It exchanges magic numbers, negotiates protocol version, and sends client settings.
async fn client_handshake<RW>(
conn: &mut RW,
client_settings: &ClientSettings,
) -> IoResult<ProtocolVersion>
where
RW: AsyncReadExt + AsyncWriteExt + Unpin,
{
// 1. Send magic number 1
conn.write_u64_le(WORKER_MAGIC_1).await?;
// 2. Receive magic number 2
let magic2 = conn.read_u64_le().await?;
if magic2 != WORKER_MAGIC_2 {
return Err(IoError::new(
IoErrorKind::InvalidData,
format!("Invalid magic number from server: {}", magic2),
));
}
// 3. Receive server protocol version
let server_version_raw = conn.read_u64_le().await?;
let server_version: ProtocolVersion = server_version_raw.try_into().map_err(|e| {
IoError::new(
IoErrorKind::InvalidData,
format!("Invalid protocol version: {}", e),
)
})?;
// 4. Send our protocol version
conn.write_u64_le(PROTOCOL_VERSION.into()).await?;
// Pick the minimum version
let protocol_version = std::cmp::min(PROTOCOL_VERSION, server_version);
// 5. Send obsolete fields based on protocol version
if protocol_version.minor() >= 14 {
// CPU affinity (obsolete, send 0)
conn.write_u64_le(0).await?;
}
if protocol_version.minor() >= 11 {
// Reserve space (obsolete, send 0)
conn.write_u64_le(0).await?;
}
if protocol_version.minor() >= 33 {
// Read Nix version string
let version_len = conn.read_u64_le().await? as usize;
let mut version_bytes = vec![0u8; version_len];
conn.read_exact(&mut version_bytes).await?;
// Padding
let padding = (8 - (version_len % 8)) % 8;
if padding > 0 {
let mut pad = vec![0u8; padding];
conn.read_exact(&mut pad).await?;
}
}
if protocol_version.minor() >= 35 {
// Read trust level
let _trust = conn.read_u64_le().await?;
}
// 6. Read STDERR_LAST
let stderr_last = conn.read_u64_le().await?;
if stderr_last != STDERR_LAST {
return Err(IoError::new(
IoErrorKind::InvalidData,
format!("Expected STDERR_LAST, got: {}", stderr_last),
));
}
// 7. Send SetOptions operation with client settings
conn.write_u64_le(Operation::SetOptions.into()).await?;
conn.flush().await?;
// Serialize client settings
let mut settings_buf = Vec::new();
{
let mut writer = NixWriterBuilder::default()
.set_version(protocol_version)
.build(&mut settings_buf);
writer.write_value(client_settings).await?;
writer.flush().await?;
}
conn.write_all(&settings_buf).await?;
conn.flush().await?;
// 8. Read response to SetOptions
let response = conn.read_u64_le().await?;
if response != STDERR_LAST {
return Err(IoError::new(
IoErrorKind::InvalidData,
format!("Expected STDERR_LAST after SetOptions, got: {}", response),
));
}
Ok(protocol_version)
}
/// Low-level Nix Daemon client
///
/// This struct manages communication with a nix-daemon using the wire protocol.
/// It is NOT thread-safe and should be wrapped in a Mutex for concurrent access.
pub struct NixDaemonClient {
protocol_version: ProtocolVersion,
reader: NixReader<ReadHalf<UnixStream>>,
writer: NixWriter<WriteHalf<UnixStream>>,
}
impl NixDaemonClient {
/// Connect to a nix-daemon at the given Unix socket path
pub async fn connect(socket_path: &Path) -> IoResult<Self> {
let stream = UnixStream::connect(socket_path).await?;
Self::from_stream(stream).await
}
/// Create a client from an existing Unix stream
pub async fn from_stream(mut stream: UnixStream) -> IoResult<Self> {
let client_settings = ClientSettings::default();
// Perform handshake
let protocol_version = client_handshake(&mut stream, &client_settings).await?;
// Split stream into reader and writer
let (read_half, write_half) = split(stream);
let reader = NixReader::builder()
.set_version(protocol_version)
.build(read_half);
let writer = NixWriterBuilder::default()
.set_version(protocol_version)
.build(write_half);
Ok(Self {
protocol_version,
reader,
writer,
})
}
/// Execute an operation that returns a typed result
///
/// This is the main method for implementing protocol operations:
/// 1. Send operation code
/// 2. Send operation parameters
/// 3. Receive response or error
async fn execute<T>(&mut self, operation: Operation) -> IoResult<T>
where
T: nix_compat::wire::de::NixDeserialize,
{
// Send operation
self.writer.write_value(&operation).await?;
self.writer.flush().await?;
self.read_response().await
}
/// Execute an operation with a single parameter
async fn execute_with<P, T>(&mut self, operation: Operation, param: &P) -> IoResult<T>
where
P: NixSerialize + Send,
T: nix_compat::wire::de::NixDeserialize,
{
// Send operation
self.writer.write_value(&operation).await?;
// Send parameter
self.writer.write_value(param).await?;
self.writer.flush().await?;
self.read_response().await
}
/// Read a response from the daemon
///
/// The daemon sends either:
/// - STDERR_LAST followed by the result
/// - STDERR_ERROR followed by an error message
async fn read_response<T>(&mut self) -> IoResult<T>
where
T: nix_compat::wire::de::NixDeserialize,
{
loop {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
// Success, read the actual response
let result: T = self.reader.read_value().await?;
return Ok(result);
} else if msg == STDERR_ERROR {
// IoError, read error message
// The error is sent as a NixIoError struct, but we just read the message
let error_msg: String = self.reader.read_value().await?;
return Err(IoError::other(error_msg));
} else {
// Other STDERR_* codes (logging, etc.) - for now, we ignore them
// Read and discard the associated data
let _data: String = self.reader.read_value().await?;
continue;
}
}
}
/// Check if a path is valid in the store
pub async fn is_valid_path(&mut self, path: &str) -> IoResult<bool> {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
self.execute_with(Operation::IsValidPath, &store_path).await
}
/// Query information about a store path
pub async fn query_path_info(&mut self, path: &str) -> IoResult<Option<UnkeyedValidPathInfo>> {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
// QueryPathInfo returns Option<UnkeyedValidPathInfo> which is serialized
// as a bool followed by the value if true
self.writer.write_value(&Operation::QueryPathInfo).await?;
self.writer.write_value(&store_path).await?;
self.writer.flush().await?;
// Read response - it's serialized as bool + optional value
loop {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
let has_value: bool = self.reader.read_value().await?;
if has_value {
// Manually deserialize UnkeyedValidPathInfo
use nix_compat::narinfo::Signature;
use nix_compat::nixhash::CAHash;
let deriver = self.reader.read_value().await?;
let nar_hash: String = self.reader.read_value().await?;
let references = self.reader.read_value().await?;
let registration_time = self.reader.read_value().await?;
let nar_size = self.reader.read_value().await?;
let ultimate = self.reader.read_value().await?;
let signatures: Vec<Signature<String>> = self.reader.read_value().await?;
let ca: Option<CAHash> = self.reader.read_value().await?;
let value = UnkeyedValidPathInfo {
deriver,
nar_hash,
references,
registration_time,
nar_size,
ultimate,
signatures,
ca,
};
return Ok(Some(value));
} else {
return Ok(None);
}
} else if msg == STDERR_ERROR {
let error_msg: String = self.reader.read_value().await?;
return Err(IoError::other(error_msg));
} else {
let _data: String = self.reader.read_value().await?;
continue;
}
}
}
/// Ensure a path is available in the store
pub async fn ensure_path(&mut self, path: &str) -> IoResult<()> {
let store_path = StorePath::<String>::from_absolute_path(path.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))?;
// EnsurePath returns void (no value)
self.writer.write_value(&Operation::EnsurePath).await?;
self.writer.write_value(&store_path).await?;
self.writer.flush().await?;
// Read response - expect STDERR_LAST with no value
loop {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
return Ok(());
} else if msg == STDERR_ERROR {
let error_msg: String = self.reader.read_value().await?;
return Err(IoError::other(error_msg));
} else {
let _data: String = self.reader.read_value().await?;
continue;
}
}
}
/// Query which paths are valid
pub async fn query_valid_paths(&mut self, paths: Vec<String>) -> IoResult<Vec<String>> {
let store_paths: IoResult<Vec<StorePath<String>>> = paths
.iter()
.map(|p| {
StorePath::<String>::from_absolute_path(p.as_bytes())
.map_err(|e| IoError::new(IoErrorKind::InvalidInput, e.to_string()))
})
.collect();
let store_paths = store_paths?;
// Send operation
self.writer.write_value(&Operation::QueryValidPaths).await?;
// Manually serialize the request since QueryValidPaths doesn't impl NixSerialize
// QueryValidPaths = { paths: Vec<StorePath>, substitute: bool }
self.writer.write_value(&store_paths).await?;
// For protocol >= 1.27, send substitute flag
if self.protocol_version.minor() >= 27 {
self.writer.write_value(&false).await?;
}
self.writer.flush().await?;
let result: Vec<StorePath<String>> = self.read_response().await?;
Ok(result.into_iter().map(|p| p.to_absolute_path()).collect())
}
/// Add a NAR to the store
pub async fn add_to_store_nar(
&mut self,
request: AddToStoreNarRequest,
nar_data: &[u8],
) -> IoResult<()> {
// Send operation
self.writer.write_value(&Operation::AddToStoreNar).await?;
// Manually serialize request fields
self.writer.write_value(&request.path).await?;
self.writer.write_value(&request.deriver).await?;
// Write nar_hash as hex string
let nar_hash_hex = hex::encode(request.nar_hash.as_ref());
self.writer.write_value(&nar_hash_hex).await?;
self.writer.write_value(&request.references).await?;
self.writer.write_value(&request.registration_time).await?;
self.writer.write_value(&request.nar_size).await?;
self.writer.write_value(&request.ultimate).await?;
self.writer.write_value(&request.signatures).await?;
self.writer.write_value(&request.ca).await?;
self.writer.write_value(&request.repair).await?;
self.writer.write_value(&request.dont_check_sigs).await?;
// For protocol >= 1.23, use framed protocol
// For < 1.23, send NAR data directly
if self.protocol_version.minor() >= 23 {
// Write frame header (length)
self.writer.write_number(nar_data.len() as u64).await?;
// Write NAR data
self.writer.write_slice(nar_data).await?;
// Write end-of-frame marker (0)
self.writer.write_number(0u64).await?;
} else {
// Pre-framed protocol: write NAR data directly
self.writer.write_slice(nar_data).await?;
}
self.writer.flush().await?;
// Read response - expect STDERR_LAST with no value
loop {
let msg = self.reader.read_number().await?;
if msg == STDERR_LAST {
return Ok(());
} else if msg == STDERR_ERROR {
let error_msg: String = self.reader.read_value().await?;
return Err(IoError::other(error_msg));
} else {
let _data: String = self.reader.read_value().await?;
continue;
}
}
}
}
/// Thread-safe wrapper around NixDaemonClient
pub struct NixDaemonConnection {
client: Mutex<NixDaemonClient>,
}
impl NixDaemonConnection {
/// Connect to a nix-daemon at the given socket path
pub async fn connect(socket_path: &Path) -> IoResult<Self> {
let client = NixDaemonClient::connect(socket_path).await?;
Ok(Self {
client: Mutex::new(client),
})
}
/// Check if a path is valid in the store
pub async fn is_valid_path(&self, path: &str) -> IoResult<bool> {
let mut client = self.client.lock().await;
client.is_valid_path(path).await
}
/// Query information about a store path
pub async fn query_path_info(&self, path: &str) -> IoResult<Option<UnkeyedValidPathInfo>> {
let mut client = self.client.lock().await;
client.query_path_info(path).await
}
/// Ensure a path is available in the store
pub async fn ensure_path(&self, path: &str) -> IoResult<()> {
let mut client = self.client.lock().await;
client.ensure_path(path).await
}
/// Query which paths are valid
pub async fn query_valid_paths(&self, paths: Vec<String>) -> IoResult<Vec<String>> {
let mut client = self.client.lock().await;
client.query_valid_paths(paths).await
}
/// Add a NAR to the store
pub async fn add_to_store_nar(
&self,
request: AddToStoreNarRequest,
nar_data: &[u8],
) -> IoResult<()> {
let mut client = self.client.lock().await;
client.add_to_store_nar(request, nar_data).await
} }
} }

View File

@@ -74,6 +74,35 @@ impl Store for SimulatedStore {
Ok(store_path.to_string_lossy().to_string()) Ok(store_path.to_string_lossy().to_string())
} }
fn add_to_store_from_path(
&self,
name: &str,
source_path: &Path,
_references: Vec<String>,
) -> Result<String> {
use crate::fetcher::cache::copy_dir_recursive;
let nar_hash = crate::nar::compute_nar_hash(source_path)
.map_err(|e| Error::internal(format!("Failed to compute NAR hash: {}", e)))?;
let store_path = self.cache.make_store_path(&nar_hash, name);
if !store_path.exists() {
fs::create_dir_all(&store_path)
.map_err(|e| Error::internal(format!("Failed to create store directory: {}", e)))?;
if source_path.is_dir() {
copy_dir_recursive(source_path, &store_path)
.map_err(|e| Error::internal(format!("Failed to copy to store: {}", e)))?;
} else {
fs::copy(source_path, &store_path)
.map_err(|e| Error::internal(format!("Failed to copy to store: {}", e)))?;
}
}
Ok(store_path.to_string_lossy().to_string())
}
fn add_text_to_store( fn add_text_to_store(
&self, &self,
name: &str, name: &str,