Compare commits
11 Commits
c5240385ea
...
3b6804dde6
| Author | SHA1 | Date | |
|---|---|---|---|
|
3b6804dde6
|
|||
|
4c505edef5
|
|||
|
75cb3bfaf1
|
|||
|
7d04d8262f
|
|||
|
c8e617fe24
|
|||
|
158784cbe8
|
|||
|
5b1750b1ba
|
|||
|
160b59b8bf
|
|||
|
0538463bf0
|
|||
|
621d4ea5c0
|
|||
|
3f7fd02263
|
1055
Cargo.lock
generated
1055
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
15
default.nix
Normal file
15
default.nix
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
let
|
||||||
|
lockFile = builtins.fromJSON (builtins.readFile ./flake.lock);
|
||||||
|
flake-compat-node = lockFile.nodes.${lockFile.nodes.root.inputs.flake-compat};
|
||||||
|
flake-compat = builtins.fetchTarball {
|
||||||
|
inherit (flake-compat-node.locked) url;
|
||||||
|
sha256 = flake-compat-node.locked.narHash;
|
||||||
|
};
|
||||||
|
|
||||||
|
flake = (
|
||||||
|
import flake-compat {
|
||||||
|
src = ./.;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
in
|
||||||
|
flake.defaultNix
|
||||||
15
flake.lock
generated
15
flake.lock
generated
@@ -21,6 +21,20 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"flake-compat": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1751685974,
|
||||||
|
"narHash": "sha256-NKw96t+BgHIYzHUjkTK95FqYRVKB8DHpVhefWSz/kTw=",
|
||||||
|
"rev": "549f2762aebeff29a2e5ece7a7dc0f955281a1d1",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://git.lix.systems/api/v1/repos/lix-project/flake-compat/archive/549f2762aebeff29a2e5ece7a7dc0f955281a1d1.tar.gz"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz"
|
||||||
|
}
|
||||||
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1767116409,
|
"lastModified": 1767116409,
|
||||||
@@ -40,6 +54,7 @@
|
|||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"fenix": "fenix",
|
"fenix": "fenix",
|
||||||
|
"flake-compat": "flake-compat",
|
||||||
"nixpkgs": "nixpkgs"
|
"nixpkgs": "nixpkgs"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -3,6 +3,10 @@
|
|||||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||||
fenix.url = "github:nix-community/fenix";
|
fenix.url = "github:nix-community/fenix";
|
||||||
fenix.inputs.nixpkgs.follows = "nixpkgs";
|
fenix.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
flake-compat = {
|
||||||
|
url = "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz";
|
||||||
|
flake = false;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
outputs = { nixpkgs, fenix, ... }:
|
outputs = { nixpkgs, fenix, ... }:
|
||||||
let
|
let
|
||||||
|
|||||||
@@ -28,12 +28,23 @@ deno_error = "0.7"
|
|||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
|
|
||||||
|
# Fetcher dependencies
|
||||||
|
reqwest = { version = "0.12", features = ["blocking", "rustls-tls"], default-features = false }
|
||||||
|
tar = "0.4"
|
||||||
|
flate2 = "1.0"
|
||||||
|
xz2 = "0.1"
|
||||||
|
bzip2 = "0.5"
|
||||||
|
zip = "2.2"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
dirs = "5.0"
|
||||||
|
tempfile = "3.24"
|
||||||
|
|
||||||
rnix = "0.12"
|
rnix = "0.12"
|
||||||
|
|
||||||
nix-js-macros = { path = "../nix-js-macros" }
|
nix-js-macros = { path = "../nix-js-macros" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3.24"
|
|
||||||
criterion = { version = "0.5", features = ["html_reports"] }
|
criterion = { version = "0.5", features = ["html_reports"] }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ fn main() {
|
|||||||
println!("cargo::rerun-if-changed=runtime-ts/src");
|
println!("cargo::rerun-if-changed=runtime-ts/src");
|
||||||
println!("cargo::rerun-if-changed=runtime-ts/package.json");
|
println!("cargo::rerun-if-changed=runtime-ts/package.json");
|
||||||
println!("cargo::rerun-if-changed=runtime-ts/tsconfig.json");
|
println!("cargo::rerun-if-changed=runtime-ts/tsconfig.json");
|
||||||
|
println!("cargo::rerun-if-changed=runtime-ts/build.mjs");
|
||||||
|
|
||||||
if !runtime_ts_dir.join("node_modules").exists() {
|
if !runtime_ts_dir.join("node_modules").exists() {
|
||||||
println!("Installing npm dependencies...");
|
println!("Installing npm dependencies...");
|
||||||
|
|||||||
@@ -4,5 +4,5 @@ await esbuild.build({
|
|||||||
entryPoints: ["src/index.ts"],
|
entryPoints: ["src/index.ts"],
|
||||||
outfile: "dist/runtime.js",
|
outfile: "dist/runtime.js",
|
||||||
bundle: true,
|
bundle: true,
|
||||||
minify: true,
|
// minify: true,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -22,11 +22,15 @@ export const hasAttr =
|
|||||||
export const mapAttrs =
|
export const mapAttrs =
|
||||||
(f: NixValue) =>
|
(f: NixValue) =>
|
||||||
(attrs: NixValue): NixAttrs => {
|
(attrs: NixValue): NixAttrs => {
|
||||||
const new_attrs: NixAttrs = {};
|
|
||||||
const forced_attrs = forceAttrs(attrs);
|
const forced_attrs = forceAttrs(attrs);
|
||||||
const forced_f = forceFunction(f);
|
const forced_f = forceFunction(f);
|
||||||
|
const new_attrs: NixAttrs = {};
|
||||||
for (const key in forced_attrs) {
|
for (const key in forced_attrs) {
|
||||||
new_attrs[key] = forceFunction(forced_f(key))(forced_attrs[key]);
|
Object.defineProperty(new_attrs, key, {
|
||||||
|
get: () => forceFunction(forced_f(key))(forced_attrs[key]),
|
||||||
|
enumerable: true,
|
||||||
|
configurable: true,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return new_attrs;
|
return new_attrs;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -53,9 +53,7 @@ export const addDrvOutputDependencies = (value: NixValue): NixString => {
|
|||||||
const context = getStringContext(s);
|
const context = getStringContext(s);
|
||||||
|
|
||||||
if (context.size !== 1) {
|
if (context.size !== 1) {
|
||||||
throw new Error(
|
throw new Error(`context of string '${strValue}' must have exactly one element, but has ${context.size}`);
|
||||||
`context of string '${strValue}' must have exactly one element, but has ${context.size}`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const [encoded] = context;
|
const [encoded] = context;
|
||||||
|
|||||||
@@ -5,23 +5,64 @@
|
|||||||
import type { NixValue, NixString } from "../types";
|
import type { NixValue, NixString } from "../types";
|
||||||
import { isStringWithContext } from "../types";
|
import { isStringWithContext } from "../types";
|
||||||
import { force } from "../thunk";
|
import { force } from "../thunk";
|
||||||
import {
|
import { type NixStringContext, mkStringWithContext, addBuiltContext } from "../string-context";
|
||||||
type NixStringContext,
|
|
||||||
mkStringWithContext,
|
|
||||||
addBuiltContext,
|
|
||||||
} from "../string-context";
|
|
||||||
import { forceFunction } from "../type-assert";
|
import { forceFunction } from "../type-assert";
|
||||||
|
import { nixValueToJson } from "../conversion";
|
||||||
|
|
||||||
export const fromJSON = (e: NixValue): never => {
|
const convertJsonToNix = (json: unknown): NixValue => {
|
||||||
throw new Error("Not implemented: fromJSON");
|
if (json === null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (typeof json === "boolean") {
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
if (typeof json === "number") {
|
||||||
|
if (Number.isInteger(json)) {
|
||||||
|
return BigInt(json);
|
||||||
|
}
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
if (typeof json === "string") {
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
if (Array.isArray(json)) {
|
||||||
|
return json.map(convertJsonToNix);
|
||||||
|
}
|
||||||
|
if (typeof json === "object") {
|
||||||
|
const result: Record<string, NixValue> = {};
|
||||||
|
for (const [key, value] of Object.entries(json)) {
|
||||||
|
result[key] = convertJsonToNix(value);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
throw new TypeError(`unsupported JSON value type: ${typeof json}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const fromJSON = (e: NixValue): NixValue => {
|
||||||
|
const str = force(e);
|
||||||
|
if (typeof str !== "string" && !isStringWithContext(str)) {
|
||||||
|
throw new TypeError(`builtins.fromJSON: expected a string, got ${typeName(str)}`);
|
||||||
|
}
|
||||||
|
const jsonStr = isStringWithContext(str) ? str.value : str;
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(jsonStr);
|
||||||
|
return convertJsonToNix(parsed);
|
||||||
|
} catch (err) {
|
||||||
|
throw new SyntaxError(`builtins.fromJSON: ${err instanceof Error ? err.message : String(err)}`);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fromTOML = (e: NixValue): never => {
|
export const fromTOML = (e: NixValue): never => {
|
||||||
throw new Error("Not implemented: fromTOML");
|
throw new Error("Not implemented: fromTOML");
|
||||||
};
|
};
|
||||||
|
|
||||||
export const toJSON = (e: NixValue): never => {
|
export const toJSON = (e: NixValue): NixString => {
|
||||||
throw new Error("Not implemented: toJSON");
|
const context: Set<string> = new Set();
|
||||||
|
const string = JSON.stringify(nixValueToJson(e, new Set(), context));
|
||||||
|
if (context.size === 0) {
|
||||||
|
return string;
|
||||||
|
}
|
||||||
|
return mkStringWithContext(string, context);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const toXML = (e: NixValue): never => {
|
export const toXML = (e: NixValue): never => {
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import {
|
|||||||
isStringWithContext,
|
isStringWithContext,
|
||||||
HAS_CONTEXT,
|
HAS_CONTEXT,
|
||||||
} from "../string-context";
|
} from "../string-context";
|
||||||
|
import { nixValueToJson } from "../conversion";
|
||||||
|
|
||||||
const forceAttrs = (value: NixValue): NixAttrs => {
|
const forceAttrs = (value: NixValue): NixAttrs => {
|
||||||
const forced = force(value);
|
const forced = force(value);
|
||||||
@@ -80,61 +81,6 @@ const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] =>
|
|||||||
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, false, outContext));
|
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, false, outContext));
|
||||||
};
|
};
|
||||||
|
|
||||||
const nixValueToJson = (
|
|
||||||
value: NixValue,
|
|
||||||
seen = new Set<object>(),
|
|
||||||
outContext?: NixStringContext,
|
|
||||||
): any => {
|
|
||||||
const v = force(value);
|
|
||||||
|
|
||||||
if (v === null) return null;
|
|
||||||
if (typeof v === "boolean") return v;
|
|
||||||
if (typeof v === "string") return v;
|
|
||||||
if (typeof v === "number") return v;
|
|
||||||
|
|
||||||
if (typeof v === "object" && HAS_CONTEXT in v && "context" in v) {
|
|
||||||
if (outContext) {
|
|
||||||
for (const elem of v.context) {
|
|
||||||
outContext.add(elem);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return v.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof v === "bigint") {
|
|
||||||
const num = Number(v);
|
|
||||||
if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {
|
|
||||||
console.warn(`derivation: integer ${v} exceeds safe range, precision may be lost in __structuredAttrs`);
|
|
||||||
}
|
|
||||||
return num;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof v === "object" && v !== null) {
|
|
||||||
if (seen.has(v)) {
|
|
||||||
throw new Error("derivation: circular reference detected in __structuredAttrs");
|
|
||||||
}
|
|
||||||
seen.add(v);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(v)) {
|
|
||||||
return v.map((item) => nixValueToJson(item, seen, outContext));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof v === "object") {
|
|
||||||
const result: Record<string, any> = {};
|
|
||||||
for (const [key, val] of Object.entries(v)) {
|
|
||||||
result[key] = nixValueToJson(val, seen, outContext);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof v === "function") {
|
|
||||||
throw new Error("derivation: cannot serialize function in __structuredAttrs");
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(`derivation: cannot serialize ${typeof v} to JSON`);
|
|
||||||
};
|
|
||||||
|
|
||||||
const extractEnv = (
|
const extractEnv = (
|
||||||
attrs: NixAttrs,
|
attrs: NixAttrs,
|
||||||
structuredAttrs: boolean,
|
structuredAttrs: boolean,
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
* Functional programming builtin functions
|
* Functional programming builtin functions
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { CatchableError, type NixValue } from "../types";
|
import { CatchableError, HAS_CONTEXT, type NixValue } from "../types";
|
||||||
import { force } from "../thunk";
|
import { force } from "../thunk";
|
||||||
import { forceString } from "../type-assert";
|
import { forceString } from "../type-assert";
|
||||||
|
|
||||||
@@ -15,8 +15,18 @@ export const seq =
|
|||||||
|
|
||||||
export const deepSeq =
|
export const deepSeq =
|
||||||
(e1: NixValue) =>
|
(e1: NixValue) =>
|
||||||
(e2: NixValue): never => {
|
(e2: NixValue): NixValue => {
|
||||||
throw new Error("Not implemented: deepSeq");
|
const forced = force(e1);
|
||||||
|
if (Array.isArray(forced)) {
|
||||||
|
for (const val of forced) {
|
||||||
|
deepSeq(val);
|
||||||
|
}
|
||||||
|
} else if (typeof forced === "object" && forced !== null && !(HAS_CONTEXT in forced)) {
|
||||||
|
for (const [_, val] of Object.entries(forced)) {
|
||||||
|
deepSeq(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return e2;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const abort = (s: NixValue): never => {
|
export const abort = (s: NixValue): never => {
|
||||||
|
|||||||
@@ -249,7 +249,7 @@ export const builtins: any = {
|
|||||||
|
|
||||||
builtins: createThunk(() => builtins),
|
builtins: createThunk(() => builtins),
|
||||||
currentSystem: createThunk(() => {
|
currentSystem: createThunk(() => {
|
||||||
throw new Error("Not implemented: currentSystem");
|
return "x86_64-linux";
|
||||||
}),
|
}),
|
||||||
currentTime: createThunk(() => Date.now()),
|
currentTime: createThunk(() => Date.now()),
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,9 @@
|
|||||||
* Implemented via Rust ops exposed through deno_core
|
* Implemented via Rust ops exposed through deno_core
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { forceString } from "../type-assert";
|
import { forceAttrs, forceBool, forceString } from "../type-assert";
|
||||||
import type { NixValue } from "../types";
|
import type { NixValue, NixAttrs } from "../types";
|
||||||
|
import { force } from "../thunk";
|
||||||
|
|
||||||
// Declare Deno.core.ops global (provided by deno_core runtime)
|
// Declare Deno.core.ops global (provided by deno_core runtime)
|
||||||
|
|
||||||
@@ -33,24 +34,188 @@ export const fetchClosure = (args: NixValue): never => {
|
|||||||
throw new Error("Not implemented: fetchClosure");
|
throw new Error("Not implemented: fetchClosure");
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchMercurial = (args: NixValue): never => {
|
interface FetchUrlResult {
|
||||||
throw new Error("Not implemented: fetchMercurial");
|
store_path: string;
|
||||||
|
hash: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FetchTarballResult {
|
||||||
|
store_path: string;
|
||||||
|
hash: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FetchGitResult {
|
||||||
|
out_path: string;
|
||||||
|
rev: string;
|
||||||
|
short_rev: string;
|
||||||
|
rev_count: number;
|
||||||
|
last_modified: number;
|
||||||
|
last_modified_date: string;
|
||||||
|
submodules: boolean;
|
||||||
|
nar_hash: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FetchHgResult {
|
||||||
|
out_path: string;
|
||||||
|
branch: string;
|
||||||
|
rev: string;
|
||||||
|
short_rev: string;
|
||||||
|
rev_count: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizeUrlInput = (
|
||||||
|
args: NixValue,
|
||||||
|
): { url: string; hash?: string; name?: string; executable?: boolean } => {
|
||||||
|
const forced = force(args);
|
||||||
|
if (typeof forced === "string") {
|
||||||
|
return { url: forced };
|
||||||
|
}
|
||||||
|
const attrs = forceAttrs(args);
|
||||||
|
const url = forceString(attrs.url);
|
||||||
|
const hash =
|
||||||
|
"sha256" in attrs ? forceString(attrs.sha256) : "hash" in attrs ? forceString(attrs.hash) : undefined;
|
||||||
|
const name = "name" in attrs ? forceString(attrs.name) : undefined;
|
||||||
|
const executable = "executable" in attrs ? forceBool(attrs.executable) : false;
|
||||||
|
return { url, hash, name, executable };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchGit = (args: NixValue): never => {
|
export const fetchurl = (args: NixValue): string => {
|
||||||
throw new Error("Not implemented: fetchGit");
|
const { url, hash, name, executable } = normalizeUrlInput(args);
|
||||||
|
const result: FetchUrlResult = Deno.core.ops.op_fetch_url(
|
||||||
|
url,
|
||||||
|
hash ?? null,
|
||||||
|
name ?? null,
|
||||||
|
executable ?? false,
|
||||||
|
);
|
||||||
|
return result.store_path;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchTarball = (args: NixValue): never => {
|
export const fetchTarball = (args: NixValue): string => {
|
||||||
throw new Error("Not implemented: fetchTarball");
|
const { url, hash, name } = normalizeUrlInput(args);
|
||||||
|
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, hash ?? null, name ?? null);
|
||||||
|
return result.store_path;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchTree = (args: NixValue): never => {
|
export const fetchGit = (args: NixValue): NixAttrs => {
|
||||||
throw new Error("Not implemented: fetchTree");
|
const forced = force(args);
|
||||||
|
if (typeof forced === "string") {
|
||||||
|
const result: FetchGitResult = Deno.core.ops.op_fetch_git(forced, null, null, false, false, false, null);
|
||||||
|
return {
|
||||||
|
outPath: result.out_path,
|
||||||
|
rev: result.rev,
|
||||||
|
shortRev: result.short_rev,
|
||||||
|
revCount: BigInt(result.rev_count),
|
||||||
|
lastModified: BigInt(result.last_modified),
|
||||||
|
lastModifiedDate: result.last_modified_date,
|
||||||
|
submodules: result.submodules,
|
||||||
|
narHash: result.nar_hash,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const attrs = forceAttrs(args);
|
||||||
|
const url = forceString(attrs.url);
|
||||||
|
const gitRef = "ref" in attrs ? forceString(attrs.ref) : null;
|
||||||
|
const rev = "rev" in attrs ? forceString(attrs.rev) : null;
|
||||||
|
const shallow = "shallow" in attrs ? forceBool(attrs.shallow) : false;
|
||||||
|
const submodules = "submodules" in attrs ? forceBool(attrs.submodules) : false;
|
||||||
|
const allRefs = "allRefs" in attrs ? forceBool(attrs.allRefs) : false;
|
||||||
|
const name = "name" in attrs ? forceString(attrs.name) : null;
|
||||||
|
|
||||||
|
const result: FetchGitResult = Deno.core.ops.op_fetch_git(
|
||||||
|
url,
|
||||||
|
gitRef,
|
||||||
|
rev,
|
||||||
|
shallow,
|
||||||
|
submodules,
|
||||||
|
allRefs,
|
||||||
|
name,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
outPath: result.out_path,
|
||||||
|
rev: result.rev,
|
||||||
|
shortRev: result.short_rev,
|
||||||
|
revCount: BigInt(result.rev_count),
|
||||||
|
lastModified: BigInt(result.last_modified),
|
||||||
|
lastModifiedDate: result.last_modified_date,
|
||||||
|
submodules: result.submodules,
|
||||||
|
narHash: result.nar_hash,
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const fetchurl = (args: NixValue): never => {
|
export const fetchMercurial = (args: NixValue): NixAttrs => {
|
||||||
throw new Error("Not implemented: fetchurl");
|
const attrs = forceAttrs(args);
|
||||||
|
const url = forceString(attrs.url);
|
||||||
|
const rev = "rev" in attrs ? forceString(attrs.rev) : null;
|
||||||
|
const name = "name" in attrs ? forceString(attrs.name) : null;
|
||||||
|
|
||||||
|
const result: FetchHgResult = Deno.core.ops.op_fetch_hg(url, rev, name);
|
||||||
|
|
||||||
|
return {
|
||||||
|
outPath: result.out_path,
|
||||||
|
branch: result.branch,
|
||||||
|
rev: result.rev,
|
||||||
|
shortRev: result.short_rev,
|
||||||
|
revCount: BigInt(result.rev_count),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const fetchTree = (args: NixValue): NixAttrs => {
|
||||||
|
const attrs = forceAttrs(args);
|
||||||
|
const type = "type" in attrs ? forceString(attrs.type) : "auto";
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case "git":
|
||||||
|
return fetchGit(args);
|
||||||
|
case "hg":
|
||||||
|
case "mercurial":
|
||||||
|
return fetchMercurial(args);
|
||||||
|
case "tarball":
|
||||||
|
return { outPath: fetchTarball(args) };
|
||||||
|
case "file":
|
||||||
|
return { outPath: fetchurl(args) };
|
||||||
|
case "path": {
|
||||||
|
const path = forceString(attrs.path);
|
||||||
|
return { outPath: path };
|
||||||
|
}
|
||||||
|
case "github":
|
||||||
|
case "gitlab":
|
||||||
|
case "sourcehut":
|
||||||
|
return fetchGitForge(type, attrs);
|
||||||
|
case "auto":
|
||||||
|
default:
|
||||||
|
return autoDetectAndFetch(attrs);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const fetchGitForge = (forge: string, attrs: NixAttrs): NixAttrs => {
|
||||||
|
const owner = forceString(attrs.owner);
|
||||||
|
const repo = forceString(attrs.repo);
|
||||||
|
const rev = "rev" in attrs ? forceString(attrs.rev) : "ref" in attrs ? forceString(attrs.ref) : "HEAD";
|
||||||
|
|
||||||
|
const baseUrls: Record<string, string> = {
|
||||||
|
github: "https://github.com",
|
||||||
|
gitlab: "https://gitlab.com",
|
||||||
|
sourcehut: "https://git.sr.ht",
|
||||||
|
};
|
||||||
|
|
||||||
|
const url = `${baseUrls[forge]}/${owner}/${repo}`;
|
||||||
|
return fetchGit({ ...attrs, url, rev });
|
||||||
|
};
|
||||||
|
|
||||||
|
const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
|
||||||
|
const url = forceString(attrs.url);
|
||||||
|
if (url.endsWith(".git") || url.includes("github.com") || url.includes("gitlab.com")) {
|
||||||
|
return fetchGit(attrs);
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
url.endsWith(".tar.gz") ||
|
||||||
|
url.endsWith(".tar.xz") ||
|
||||||
|
url.endsWith(".tar.bz2") ||
|
||||||
|
url.endsWith(".tgz")
|
||||||
|
) {
|
||||||
|
return { outPath: fetchTarball(attrs) };
|
||||||
|
}
|
||||||
|
return { outPath: fetchurl(attrs) };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const readDir = (path: NixValue): never => {
|
export const readDir = (path: NixValue): never => {
|
||||||
|
|||||||
@@ -4,7 +4,8 @@
|
|||||||
|
|
||||||
import { force } from "../thunk";
|
import { force } from "../thunk";
|
||||||
import { CatchableError } from "../types";
|
import { CatchableError } from "../types";
|
||||||
import type { NixBool, NixStrictValue, NixValue, NixString } from "../types";
|
import type { NixBool, NixStrictValue, NixValue } from "../types";
|
||||||
|
import { forceList, forceAttrs, forceFunction } from "../type-assert";
|
||||||
import * as context from "./context";
|
import * as context from "./context";
|
||||||
|
|
||||||
export const addErrorContext =
|
export const addErrorContext =
|
||||||
@@ -138,6 +139,34 @@ export const tryEval = (e: NixValue): { success: NixBool; value: NixStrictValue
|
|||||||
|
|
||||||
export const zipAttrsWith =
|
export const zipAttrsWith =
|
||||||
(f: NixValue) =>
|
(f: NixValue) =>
|
||||||
(list: NixValue): never => {
|
(list: NixValue): NixValue => {
|
||||||
throw new Error("Not implemented: zipAttrsWith");
|
const listForced = forceList(list);
|
||||||
|
|
||||||
|
// Map to collect all values for each attribute name
|
||||||
|
const attrMap = new Map<string, NixValue[]>();
|
||||||
|
|
||||||
|
// Iterate through each attribute set in the list
|
||||||
|
for (const item of listForced) {
|
||||||
|
const attrs = forceAttrs(force(item) as NixValue);
|
||||||
|
|
||||||
|
// Collect all attribute names and their values
|
||||||
|
for (const [key, value] of Object.entries(attrs)) {
|
||||||
|
if (!attrMap.has(key)) {
|
||||||
|
attrMap.set(key, []);
|
||||||
|
}
|
||||||
|
attrMap.get(key)!.push(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the result attribute set
|
||||||
|
const result: Record<string, NixValue> = {};
|
||||||
|
|
||||||
|
for (const [name, values] of attrMap.entries()) {
|
||||||
|
// Apply f to name and values list
|
||||||
|
// f is curried: f name values
|
||||||
|
const fWithName = forceFunction(f)(name);
|
||||||
|
result[name] = forceFunction(fWithName)(values);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,10 +2,10 @@
|
|||||||
* String operation builtin functions
|
* String operation builtin functions
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { NixValue } from "../types";
|
import type { NixInt, NixValue } from "../types";
|
||||||
import { forceString, forceList, forceInt } from "../type-assert";
|
import { forceString, forceList, forceInt } from "../type-assert";
|
||||||
|
|
||||||
export const stringLength = (e: NixValue): number => forceString(e).length;
|
export const stringLength = (e: NixValue): NixInt => BigInt(forceString(e).length);
|
||||||
|
|
||||||
export const substring =
|
export const substring =
|
||||||
(start: NixValue) =>
|
(start: NixValue) =>
|
||||||
|
|||||||
@@ -2,23 +2,23 @@
|
|||||||
* Type checking builtin functions
|
* Type checking builtin functions
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {
|
import {
|
||||||
NixAttrs,
|
HAS_CONTEXT,
|
||||||
NixBool,
|
type NixAttrs,
|
||||||
NixFloat,
|
type NixBool,
|
||||||
NixFunction,
|
type NixFloat,
|
||||||
NixInt,
|
type NixFunction,
|
||||||
NixList,
|
type NixInt,
|
||||||
NixNull,
|
type NixList,
|
||||||
NixStrictValue,
|
type NixNull,
|
||||||
NixString,
|
type NixString,
|
||||||
NixValue,
|
type NixValue,
|
||||||
} from "../types";
|
} from "../types";
|
||||||
import { force } from "../thunk";
|
import { force } from "../thunk";
|
||||||
|
|
||||||
export const isAttrs = (e: NixValue): e is NixAttrs => {
|
export const isAttrs = (e: NixValue): e is NixAttrs => {
|
||||||
const val = force(e);
|
const val = force(e);
|
||||||
return typeof val === "object" && !Array.isArray(val) && val !== null;
|
return typeof val === "object" && !Array.isArray(val) && val !== null && !(HAS_CONTEXT in val);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const isBool = (e: NixValue): e is NixBool => typeof force(e) === "boolean";
|
export const isBool = (e: NixValue): e is NixBool => typeof force(e) === "boolean";
|
||||||
|
|||||||
58
nix-js/runtime-ts/src/conversion.ts
Normal file
58
nix-js/runtime-ts/src/conversion.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import { HAS_CONTEXT, NixStringContext } from "./string-context";
|
||||||
|
import { force } from "./thunk";
|
||||||
|
import type { NixValue } from "./types";
|
||||||
|
|
||||||
|
export const nixValueToJson = (
|
||||||
|
value: NixValue,
|
||||||
|
seen = new Set<object>(),
|
||||||
|
outContext?: NixStringContext,
|
||||||
|
): any => {
|
||||||
|
const v = force(value);
|
||||||
|
|
||||||
|
if (v === null) return null;
|
||||||
|
if (typeof v === "boolean") return v;
|
||||||
|
if (typeof v === "string") return v;
|
||||||
|
if (typeof v === "number") return v;
|
||||||
|
|
||||||
|
if (typeof v === "object" && HAS_CONTEXT in v && "context" in v) {
|
||||||
|
if (outContext) {
|
||||||
|
for (const elem of v.context) {
|
||||||
|
outContext.add(elem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return v.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof v === "bigint") {
|
||||||
|
const num = Number(v);
|
||||||
|
if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {
|
||||||
|
console.warn(`derivation: integer ${v} exceeds safe range, precision may be lost in __structuredAttrs`);
|
||||||
|
}
|
||||||
|
return num;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof v === "object" && v !== null) {
|
||||||
|
if (seen.has(v)) {
|
||||||
|
throw new Error("derivation: circular reference detected in __structuredAttrs");
|
||||||
|
}
|
||||||
|
seen.add(v);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(v)) {
|
||||||
|
return v.map((item) => nixValueToJson(item, seen, outContext));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof v === "object") {
|
||||||
|
const result: Record<string, any> = {};
|
||||||
|
for (const [key, val] of Object.entries(v)) {
|
||||||
|
result[key] = nixValueToJson(val, seen, outContext);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof v === "function") {
|
||||||
|
throw new Error("derivation: cannot serialize function in __structuredAttrs");
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`derivation: cannot serialize ${typeof v} to JSON`);
|
||||||
|
};
|
||||||
@@ -3,10 +3,11 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import type { NixValue, NixAttrs, NixBool, NixString } from "./types";
|
import type { NixValue, NixAttrs, NixBool, NixString } from "./types";
|
||||||
import { forceAttrs, forceString } from "./type-assert";
|
import { forceAttrs, forceFunction, forceString, typeName } from "./type-assert";
|
||||||
import { isAttrs } from "./builtins/type-check";
|
import { isAttrs } from "./builtins/type-check";
|
||||||
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
||||||
import { type NixStringContext, mkStringWithContext } from "./string-context";
|
import { type NixStringContext, mkStringWithContext } from "./string-context";
|
||||||
|
import { force } from "./thunk";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Concatenate multiple values into a string with context
|
* Concatenate multiple values into a string with context
|
||||||
@@ -71,15 +72,15 @@ export const select = (obj: NixValue, key: NixValue): NixValue => {
|
|||||||
*
|
*
|
||||||
* @param obj - Attribute set to select from
|
* @param obj - Attribute set to select from
|
||||||
* @param key - Key to select
|
* @param key - Key to select
|
||||||
* @param default_val - Value to return if key not found
|
* @param default_val - Value to return if key not found (will be forced if it's a thunk)
|
||||||
* @returns obj[key] if exists, otherwise default_val
|
* @returns obj[key] if exists, otherwise force(default_val)
|
||||||
*/
|
*/
|
||||||
export const selectWithDefault = (obj: NixValue, key: NixValue, default_val: NixValue): NixValue => {
|
export const selectWithDefault = (obj: NixValue, key: NixValue, default_val: NixValue): NixValue => {
|
||||||
const attrs = forceAttrs(obj);
|
const attrs = forceAttrs(obj);
|
||||||
const forced_key = forceString(key);
|
const forced_key = forceString(key);
|
||||||
|
|
||||||
if (!(forced_key in attrs)) {
|
if (!(forced_key in attrs)) {
|
||||||
return default_val;
|
return force(default_val);
|
||||||
}
|
}
|
||||||
|
|
||||||
return attrs[forced_key];
|
return attrs[forced_key];
|
||||||
@@ -144,3 +145,20 @@ export const validateParams = (
|
|||||||
|
|
||||||
return forced_arg;
|
return forced_arg;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const call = (func: NixValue, arg: NixValue): NixValue => {
|
||||||
|
const forcedFunc = force(func);
|
||||||
|
if (typeof forcedFunc === "function") {
|
||||||
|
return forcedFunc(arg);
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
typeof forcedFunc === "object" &&
|
||||||
|
!Array.isArray(forcedFunc) &&
|
||||||
|
forcedFunc !== null &&
|
||||||
|
"__functor" in forcedFunc
|
||||||
|
) {
|
||||||
|
const functor = forceFunction(forcedFunc.__functor);
|
||||||
|
return forceFunction(functor(forcedFunc))(arg);
|
||||||
|
}
|
||||||
|
throw new Error(`attempt to call something which is not a function but ${typeName(forcedFunc)}`);
|
||||||
|
};
|
||||||
|
|||||||
@@ -4,8 +4,16 @@
|
|||||||
* All functionality is exported via the global `Nix` object
|
* All functionality is exported via the global `Nix` object
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { createThunk, force, isThunk, IS_THUNK } from "./thunk";
|
import { createThunk, force, isThunk, IS_THUNK, DEBUG_THUNKS } from "./thunk";
|
||||||
import { select, selectWithDefault, validateParams, resolvePath, hasAttr, concatStringsWithContext } from "./helpers";
|
import {
|
||||||
|
select,
|
||||||
|
selectWithDefault,
|
||||||
|
validateParams,
|
||||||
|
resolvePath,
|
||||||
|
hasAttr,
|
||||||
|
concatStringsWithContext,
|
||||||
|
call,
|
||||||
|
} from "./helpers";
|
||||||
import { op } from "./operators";
|
import { op } from "./operators";
|
||||||
import { builtins, PRIMOP_METADATA } from "./builtins";
|
import { builtins, PRIMOP_METADATA } from "./builtins";
|
||||||
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
||||||
@@ -22,7 +30,9 @@ export const Nix = {
|
|||||||
isThunk,
|
isThunk,
|
||||||
IS_THUNK,
|
IS_THUNK,
|
||||||
HAS_CONTEXT,
|
HAS_CONTEXT,
|
||||||
|
DEBUG_THUNKS,
|
||||||
|
|
||||||
|
call,
|
||||||
hasAttr,
|
hasAttr,
|
||||||
select,
|
select,
|
||||||
selectWithDefault,
|
selectWithDefault,
|
||||||
|
|||||||
@@ -7,17 +7,22 @@ import type { NixValue, NixList, NixAttrs, NixString } from "./types";
|
|||||||
import { isStringWithContext } from "./types";
|
import { isStringWithContext } from "./types";
|
||||||
import { force } from "./thunk";
|
import { force } from "./thunk";
|
||||||
import { forceNumeric, forceList, forceAttrs, coerceNumeric } from "./type-assert";
|
import { forceNumeric, forceList, forceAttrs, coerceNumeric } from "./type-assert";
|
||||||
import {
|
import { getStringValue, getStringContext, mergeContexts, mkStringWithContext } from "./string-context";
|
||||||
getStringValue,
|
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
||||||
getStringContext,
|
|
||||||
mergeContexts,
|
|
||||||
mkStringWithContext,
|
|
||||||
} from "./string-context";
|
|
||||||
|
|
||||||
const isNixString = (v: unknown): v is NixString => {
|
const isNixString = (v: unknown): v is NixString => {
|
||||||
return typeof v === "string" || isStringWithContext(v);
|
return typeof v === "string" || isStringWithContext(v);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const canCoerceToString = (v: NixValue): boolean => {
|
||||||
|
const forced = force(v);
|
||||||
|
if (isNixString(forced)) return true;
|
||||||
|
if (typeof forced === "object" && forced !== null && !Array.isArray(forced)) {
|
||||||
|
if ("outPath" in forced || "__toString" in forced) return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Operator object exported as Nix.op
|
* Operator object exported as Nix.op
|
||||||
* All operators referenced by codegen (e.g., Nix.op.add, Nix.op.eq)
|
* All operators referenced by codegen (e.g., Nix.op.add, Nix.op.eq)
|
||||||
@@ -40,6 +45,12 @@ export const op = {
|
|||||||
return mkStringWithContext(strA + strB, mergeContexts(ctxA, ctxB));
|
return mkStringWithContext(strA + strB, mergeContexts(ctxA, ctxB));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (canCoerceToString(a) && canCoerceToString(b)) {
|
||||||
|
const strA = coerceToString(a, StringCoercionMode.Interpolation, false);
|
||||||
|
const strB = coerceToString(b, StringCoercionMode.Interpolation, false);
|
||||||
|
return strA + strB;
|
||||||
|
}
|
||||||
|
|
||||||
const [numA, numB] = coerceNumeric(forceNumeric(a), forceNumeric(b));
|
const [numA, numB] = coerceNumeric(forceNumeric(a), forceNumeric(b));
|
||||||
return (numA as any) + (numB as any);
|
return (numA as any) + (numB as any);
|
||||||
},
|
},
|
||||||
@@ -79,6 +90,34 @@ export const op = {
|
|||||||
return av === Number(bv);
|
return av === Number(bv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(av) && Array.isArray(bv)) {
|
||||||
|
if (av.length !== bv.length) return false;
|
||||||
|
for (let i = 0; i < av.length; i++) {
|
||||||
|
if (!op.eq(av[i], bv[i])) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof av === "object" &&
|
||||||
|
av !== null &&
|
||||||
|
!Array.isArray(av) &&
|
||||||
|
typeof bv === "object" &&
|
||||||
|
bv !== null &&
|
||||||
|
!Array.isArray(bv) &&
|
||||||
|
!isNixString(av) &&
|
||||||
|
!isNixString(bv)
|
||||||
|
) {
|
||||||
|
const keysA = Object.keys(av);
|
||||||
|
const keysB = Object.keys(bv);
|
||||||
|
if (keysA.length !== keysB.length) return false;
|
||||||
|
for (const key of keysA) {
|
||||||
|
if (!(key in bv)) return false;
|
||||||
|
if (!op.eq((av as NixAttrs)[key], (bv as NixAttrs)[key])) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
return av === bv;
|
return av === bv;
|
||||||
},
|
},
|
||||||
neq: (a: NixValue, b: NixValue): boolean => {
|
neq: (a: NixValue, b: NixValue): boolean => {
|
||||||
|
|||||||
@@ -27,7 +27,9 @@ export interface StringWithContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const isStringWithContext = (v: unknown): v is StringWithContext => {
|
export const isStringWithContext = (v: unknown): v is StringWithContext => {
|
||||||
return typeof v === "object" && v !== null && HAS_CONTEXT in v && (v as StringWithContext)[HAS_CONTEXT] === true;
|
return (
|
||||||
|
typeof v === "object" && v !== null && HAS_CONTEXT in v && (v as StringWithContext)[HAS_CONTEXT] === true
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const mkStringWithContext = (value: string, context: NixStringContext): StringWithContext => {
|
export const mkStringWithContext = (value: string, context: NixStringContext): StringWithContext => {
|
||||||
|
|||||||
@@ -11,21 +11,43 @@ import type { NixValue, NixThunkInterface, NixStrictValue } from "./types";
|
|||||||
*/
|
*/
|
||||||
export const IS_THUNK = Symbol("is_thunk");
|
export const IS_THUNK = Symbol("is_thunk");
|
||||||
|
|
||||||
|
const forceStack: NixThunk[] = [];
|
||||||
|
|
||||||
|
export const DEBUG_THUNKS = { enabled: false };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* NixThunk class - represents a lazy, unevaluated expression
|
* NixThunk class - represents a lazy, unevaluated expression
|
||||||
*
|
*
|
||||||
* A thunk wraps a function that produces a value when called.
|
* A thunk wraps a function that produces a value when called.
|
||||||
* Once evaluated, the result is cached to avoid recomputation.
|
* Once evaluated, the result is cached to avoid recomputation.
|
||||||
|
*
|
||||||
|
* Thunk states:
|
||||||
|
* - Unevaluated: func is defined, result is undefined
|
||||||
|
* - Evaluating (blackhole): func is undefined, result is undefined
|
||||||
|
* - Evaluated: func is undefined, result is defined
|
||||||
*/
|
*/
|
||||||
export class NixThunk implements NixThunkInterface {
|
export class NixThunk implements NixThunkInterface {
|
||||||
[key: symbol]: any;
|
[key: symbol]: any;
|
||||||
readonly [IS_THUNK] = true as const;
|
readonly [IS_THUNK] = true as const;
|
||||||
func: (() => NixValue) | undefined;
|
func: (() => NixValue) | undefined;
|
||||||
result: NixStrictValue | undefined;
|
result: NixStrictValue | undefined;
|
||||||
|
readonly label: string | undefined;
|
||||||
|
readonly creationStack: string | undefined;
|
||||||
|
|
||||||
constructor(func: () => NixValue) {
|
constructor(func: () => NixValue, label?: string) {
|
||||||
this.func = func;
|
this.func = func;
|
||||||
this.result = undefined;
|
this.result = undefined;
|
||||||
|
this.label = label;
|
||||||
|
if (DEBUG_THUNKS.enabled) {
|
||||||
|
this.creationStack = new Error().stack?.split("\n").slice(2).join("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toString(): string {
|
||||||
|
if (this.label) {
|
||||||
|
return `«thunk ${this.label}»`;
|
||||||
|
}
|
||||||
|
return `«thunk»`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -43,32 +65,66 @@ export const isThunk = (value: unknown): value is NixThunkInterface => {
|
|||||||
* If the value is a thunk, evaluate it and cache the result
|
* If the value is a thunk, evaluate it and cache the result
|
||||||
* If already evaluated or not a thunk, return as-is
|
* If already evaluated or not a thunk, return as-is
|
||||||
*
|
*
|
||||||
|
* Uses "blackhole" detection to catch infinite recursion:
|
||||||
|
* - Before evaluating, set func to undefined (entering blackhole state)
|
||||||
|
* - If we encounter a thunk with func=undefined and result=undefined, it's a blackhole
|
||||||
|
*
|
||||||
* @param value - Value to force (may be a thunk)
|
* @param value - Value to force (may be a thunk)
|
||||||
* @returns The forced/evaluated value
|
* @returns The forced/evaluated value
|
||||||
|
* @throws Error if infinite recursion is detected
|
||||||
*/
|
*/
|
||||||
export const force = (value: NixValue): NixStrictValue => {
|
export const force = (value: NixValue): NixStrictValue => {
|
||||||
if (!isThunk(value)) {
|
if (!isThunk(value)) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Already evaluated - return cached result
|
|
||||||
if (value.func === undefined) {
|
if (value.func === undefined) {
|
||||||
return value.result!;
|
if (value.result === undefined) {
|
||||||
|
const thunk = value as NixThunk;
|
||||||
|
let msg = `infinite recursion encountered (blackhole) at ${thunk}\n`;
|
||||||
|
msg += "Force chain (most recent first):\n";
|
||||||
|
for (let i = forceStack.length - 1; i >= 0; i--) {
|
||||||
|
const t = forceStack[i];
|
||||||
|
msg += ` ${i + 1}. ${t}`;
|
||||||
|
if (DEBUG_THUNKS.enabled && t.creationStack) {
|
||||||
|
msg += `\n Created at:\n${t.creationStack
|
||||||
|
.split("\n")
|
||||||
|
.map((l) => " " + l)
|
||||||
|
.join("\n")}`;
|
||||||
|
}
|
||||||
|
msg += "\n";
|
||||||
|
}
|
||||||
|
if (DEBUG_THUNKS.enabled && thunk.creationStack) {
|
||||||
|
msg += `\nBlackhole thunk created at:\n${thunk.creationStack
|
||||||
|
.split("\n")
|
||||||
|
.map((l) => " " + l)
|
||||||
|
.join("\n")}`;
|
||||||
|
}
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
return value.result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Evaluate and cache
|
const thunk = value as NixThunk;
|
||||||
const result = force(value.func());
|
const func = thunk.func!;
|
||||||
value.result = result;
|
thunk.func = undefined;
|
||||||
value.func = undefined;
|
|
||||||
|
|
||||||
|
forceStack.push(thunk);
|
||||||
|
try {
|
||||||
|
const result = force(func());
|
||||||
|
thunk.result = result;
|
||||||
return result;
|
return result;
|
||||||
|
} finally {
|
||||||
|
forceStack.pop();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new thunk from a function
|
* Create a new thunk from a function
|
||||||
* @param func - Function that produces a value when called
|
* @param func - Function that produces a value when called
|
||||||
|
* @param label - Optional label for debugging
|
||||||
* @returns A new NixThunk wrapping the function
|
* @returns A new NixThunk wrapping the function
|
||||||
*/
|
*/
|
||||||
export const createThunk = (func: () => NixValue): NixThunkInterface => {
|
export const createThunk = (func: () => NixValue, label?: string): NixThunkInterface => {
|
||||||
return new NixThunk(func);
|
return new NixThunk(func, label);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,12 +3,21 @@
|
|||||||
* These functions force evaluation and verify the type, throwing errors on mismatch
|
* These functions force evaluation and verify the type, throwing errors on mismatch
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { NixValue, NixList, NixAttrs, NixFunction, NixInt, NixFloat, NixNumber, NixString } from "./types";
|
import type {
|
||||||
|
NixValue,
|
||||||
|
NixList,
|
||||||
|
NixAttrs,
|
||||||
|
NixFunction,
|
||||||
|
NixInt,
|
||||||
|
NixFloat,
|
||||||
|
NixNumber,
|
||||||
|
NixString,
|
||||||
|
} from "./types";
|
||||||
import { isStringWithContext } from "./types";
|
import { isStringWithContext } from "./types";
|
||||||
import { force } from "./thunk";
|
import { force } from "./thunk";
|
||||||
import { getStringValue } from "./string-context";
|
import { getStringValue } from "./string-context";
|
||||||
|
|
||||||
const typeName = (value: NixValue): string => {
|
export const typeName = (value: NixValue): string => {
|
||||||
const val = force(value);
|
const val = force(value);
|
||||||
|
|
||||||
if (typeof val === "bigint") return "int";
|
if (typeof val === "bigint") return "int";
|
||||||
|
|||||||
50
nix-js/runtime-ts/src/types/global.d.ts
vendored
50
nix-js/runtime-ts/src/types/global.d.ts
vendored
@@ -1,5 +1,34 @@
|
|||||||
import type { NixRuntime } from "..";
|
import type { NixRuntime } from "..";
|
||||||
|
|
||||||
|
interface FetchUrlResult {
|
||||||
|
store_path: string;
|
||||||
|
hash: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FetchTarballResult {
|
||||||
|
store_path: string;
|
||||||
|
hash: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FetchGitResult {
|
||||||
|
out_path: string;
|
||||||
|
rev: string;
|
||||||
|
short_rev: string;
|
||||||
|
rev_count: number;
|
||||||
|
last_modified: number;
|
||||||
|
last_modified_date: string;
|
||||||
|
submodules: boolean;
|
||||||
|
nar_hash: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FetchHgResult {
|
||||||
|
out_path: string;
|
||||||
|
branch: string;
|
||||||
|
rev: string;
|
||||||
|
short_rev: string;
|
||||||
|
rev_count: number;
|
||||||
|
}
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
var Nix: NixRuntime;
|
var Nix: NixRuntime;
|
||||||
namespace Deno {
|
namespace Deno {
|
||||||
@@ -18,6 +47,27 @@ declare global {
|
|||||||
hash_mode: string,
|
hash_mode: string,
|
||||||
name: string,
|
name: string,
|
||||||
): string;
|
): string;
|
||||||
|
function op_fetch_url(
|
||||||
|
url: string,
|
||||||
|
expected_hash: string | null,
|
||||||
|
name: string | null,
|
||||||
|
executable: boolean,
|
||||||
|
): FetchUrlResult;
|
||||||
|
function op_fetch_tarball(
|
||||||
|
url: string,
|
||||||
|
expected_hash: string | null,
|
||||||
|
name: string | null,
|
||||||
|
): FetchTarballResult;
|
||||||
|
function op_fetch_git(
|
||||||
|
url: string,
|
||||||
|
ref: string | null,
|
||||||
|
rev: string | null,
|
||||||
|
shallow: boolean,
|
||||||
|
submodules: boolean,
|
||||||
|
all_refs: boolean,
|
||||||
|
name: string | null,
|
||||||
|
): FetchGitResult;
|
||||||
|
function op_fetch_hg(url: string, rev: string | null, name: string | null): FetchHgResult;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,16 +11,10 @@ pub(crate) trait CodegenContext {
|
|||||||
fn get_sym(&self, id: SymId) -> &str;
|
fn get_sym(&self, id: SymId) -> &str;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
|
fn escape_quote_string(s: &str) -> String {
|
||||||
fn compile(&self, ctx: &Ctx) -> String {
|
let mut escaped = String::with_capacity(s.len() + 2);
|
||||||
match self {
|
|
||||||
Ir::Int(int) => format!("{int}n"), // Generate BigInt literal
|
|
||||||
Ir::Float(float) => float.to_string(),
|
|
||||||
Ir::Str(s) => {
|
|
||||||
// Escape string for JavaScript
|
|
||||||
let mut escaped = String::with_capacity(s.val.len() + 2);
|
|
||||||
escaped.push('"');
|
escaped.push('"');
|
||||||
for c in s.val.chars() {
|
for c in s.chars() {
|
||||||
match c {
|
match c {
|
||||||
'\\' => escaped.push_str("\\\\"),
|
'\\' => escaped.push_str("\\\\"),
|
||||||
'\"' => escaped.push_str("\\\""),
|
'\"' => escaped.push_str("\\\""),
|
||||||
@@ -33,6 +27,13 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
|
|||||||
escaped.push('"');
|
escaped.push('"');
|
||||||
escaped
|
escaped
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
|
||||||
|
fn compile(&self, ctx: &Ctx) -> String {
|
||||||
|
match self {
|
||||||
|
Ir::Int(int) => format!("{int}n"), // Generate BigInt literal
|
||||||
|
Ir::Float(float) => float.to_string(),
|
||||||
|
Ir::Str(s) => escape_quote_string(&s.val),
|
||||||
Ir::Path(p) => {
|
Ir::Path(p) => {
|
||||||
// Path needs runtime resolution for interpolated paths
|
// Path needs runtime resolution for interpolated paths
|
||||||
let path_expr = ctx.get_ir(p.expr).compile(ctx);
|
let path_expr = ctx.get_ir(p.expr).compile(ctx);
|
||||||
@@ -49,17 +50,13 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
|
|||||||
Ir::Func(x) => x.compile(ctx),
|
Ir::Func(x) => x.compile(ctx),
|
||||||
Ir::AttrSet(x) => x.compile(ctx),
|
Ir::AttrSet(x) => x.compile(ctx),
|
||||||
Ir::List(x) => x.compile(ctx),
|
Ir::List(x) => x.compile(ctx),
|
||||||
&Ir::Call(Call { func, arg }) => {
|
Ir::Call(x) => x.compile(ctx),
|
||||||
let func = ctx.get_ir(func).compile(ctx);
|
|
||||||
let arg = ctx.get_ir(arg).compile(ctx);
|
|
||||||
format!("Nix.force({func})({arg})")
|
|
||||||
}
|
|
||||||
Ir::Arg(x) => format!("arg{}", x.0),
|
Ir::Arg(x) => format!("arg{}", x.0),
|
||||||
Ir::Let(x) => x.compile(ctx),
|
Ir::Let(x) => x.compile(ctx),
|
||||||
Ir::Select(x) => x.compile(ctx),
|
Ir::Select(x) => x.compile(ctx),
|
||||||
&Ir::Thunk(expr_id) => {
|
&Ir::Thunk(expr_id) => {
|
||||||
let inner = ctx.get_ir(expr_id).compile(ctx);
|
let inner = ctx.get_ir(expr_id).compile(ctx);
|
||||||
format!("Nix.createThunk(()=>({}))", inner)
|
format!("Nix.createThunk(()=>({}),\"expr{}\")", inner, expr_id.0)
|
||||||
}
|
}
|
||||||
&Ir::ExprRef(expr_id) => {
|
&Ir::ExprRef(expr_id) => {
|
||||||
format!("expr{}", expr_id.0)
|
format!("expr{}", expr_id.0)
|
||||||
@@ -171,6 +168,14 @@ impl Func {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<Ctx: CodegenContext> Compile<Ctx> for Call {
|
||||||
|
fn compile(&self, ctx: &Ctx) -> String {
|
||||||
|
let func = ctx.get_ir(self.func).compile(ctx);
|
||||||
|
let arg = ctx.get_ir(self.arg).compile(ctx);
|
||||||
|
format!("Nix.call({func}, {arg})")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<Ctx: CodegenContext> Compile<Ctx> for Let {
|
impl<Ctx: CodegenContext> Compile<Ctx> for Let {
|
||||||
fn compile(&self, ctx: &Ctx) -> String {
|
fn compile(&self, ctx: &Ctx) -> String {
|
||||||
let info = &self.binding_sccs;
|
let info = &self.binding_sccs;
|
||||||
@@ -189,11 +194,18 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Let {
|
|||||||
for &expr in scc_exprs {
|
for &expr in scc_exprs {
|
||||||
let ir = ctx.get_ir(expr);
|
let ir = ctx.get_ir(expr);
|
||||||
let value = if let Ir::Thunk(inner) = ir {
|
let value = if let Ir::Thunk(inner) = ir {
|
||||||
ctx.get_ir(*inner).compile(ctx)
|
let inner_ir = ctx.get_ir(*inner);
|
||||||
|
// Don't unwrap Thunk if inner is a Let expression
|
||||||
|
// to avoid generating IIFE that executes immediately
|
||||||
|
if matches!(inner_ir, Ir::Let(_)) {
|
||||||
|
ir.compile(ctx)
|
||||||
|
} else {
|
||||||
|
inner_ir.compile(ctx)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
ir.compile(ctx)
|
ir.compile(ctx)
|
||||||
};
|
};
|
||||||
js_statements.push(format!("let expr{}={}", expr.0, value));
|
js_statements.push(format!("const expr{}={}", expr.0, value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -255,7 +267,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
|
|||||||
for (&sym, &expr) in &self.stcs {
|
for (&sym, &expr) in &self.stcs {
|
||||||
let key = ctx.get_sym(sym);
|
let key = ctx.get_sym(sym);
|
||||||
let value = ctx.get_ir(expr).compile(ctx);
|
let value = ctx.get_ir(expr).compile(ctx);
|
||||||
attrs.push(format!("\"{}\": {}", key, value));
|
attrs.push(format!("{}: {}", escape_quote_string(key), value));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (key_expr, value_expr) in &self.dyns {
|
for (key_expr, value_expr) in &self.dyns {
|
||||||
|
|||||||
@@ -189,7 +189,13 @@ impl Ctx {
|
|||||||
.downgrade_ctx()
|
.downgrade_ctx()
|
||||||
.downgrade(root.tree().expr().unwrap())?;
|
.downgrade(root.tree().expr().unwrap())?;
|
||||||
let code = self.get_ir(root).compile(self);
|
let code = self.get_ir(root).compile(self);
|
||||||
let code = format!("Nix.force({})", code);
|
|
||||||
|
let debug_prefix = if std::env::var("NIX_JS_DEBUG_THUNKS").is_ok() {
|
||||||
|
"Nix.DEBUG_THUNKS.enabled=true,"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
};
|
||||||
|
let code = format!("({}Nix.force({}))", debug_prefix, code);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
eprintln!("[DEBUG] generated code: {}", &code);
|
eprintln!("[DEBUG] generated code: {}", &code);
|
||||||
Ok(code)
|
Ok(code)
|
||||||
|
|||||||
@@ -15,6 +15,8 @@ struct DependencyTracker {
|
|||||||
graph: Graph<ExprId, (), Directed>,
|
graph: Graph<ExprId, (), Directed>,
|
||||||
current_binding: Option<ExprId>,
|
current_binding: Option<ExprId>,
|
||||||
let_scope_exprs: HashSet<ExprId>,
|
let_scope_exprs: HashSet<ExprId>,
|
||||||
|
// The outer binding that owns this tracker (for nested let scopes in function params)
|
||||||
|
owner_binding: Option<ExprId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Scope<'ctx> {
|
enum Scope<'ctx> {
|
||||||
@@ -90,14 +92,44 @@ impl DowngradeContext for DowngradeCtx<'_> {
|
|||||||
}
|
}
|
||||||
Scope::Let(let_scope) => {
|
Scope::Let(let_scope) => {
|
||||||
if let Some(&expr) = let_scope.get(&sym) {
|
if let Some(&expr) = let_scope.get(&sym) {
|
||||||
if let Some(tracker) = self.dep_tracker_stack.last_mut()
|
// Find which tracker contains this expression
|
||||||
&& let Some(current) = tracker.current_binding
|
let expr_tracker_idx = self
|
||||||
&& tracker.let_scope_exprs.contains(¤t)
|
.dep_tracker_stack
|
||||||
&& tracker.let_scope_exprs.contains(&expr)
|
.iter()
|
||||||
{
|
.position(|t| t.let_scope_exprs.contains(&expr));
|
||||||
let from = tracker.expr_to_node[¤t];
|
|
||||||
let to = tracker.expr_to_node[&expr];
|
// Find the innermost tracker with a current_binding
|
||||||
tracker.graph.add_edge(from, to, ());
|
let current_tracker_idx = self
|
||||||
|
.dep_tracker_stack
|
||||||
|
.iter()
|
||||||
|
.rposition(|t| t.current_binding.is_some());
|
||||||
|
|
||||||
|
// Record dependency if both exist
|
||||||
|
if let (Some(expr_idx), Some(curr_idx)) = (expr_tracker_idx, current_tracker_idx) {
|
||||||
|
let current_binding = self.dep_tracker_stack[curr_idx].current_binding.unwrap();
|
||||||
|
let owner_binding = self.dep_tracker_stack[curr_idx].owner_binding;
|
||||||
|
|
||||||
|
// If referencing from inner scope to outer scope
|
||||||
|
if curr_idx >= expr_idx {
|
||||||
|
let tracker = &mut self.dep_tracker_stack[expr_idx];
|
||||||
|
if let (Some(&from_node), Some(&to_node)) = (
|
||||||
|
tracker.expr_to_node.get(¤t_binding),
|
||||||
|
tracker.expr_to_node.get(&expr),
|
||||||
|
) {
|
||||||
|
// Same-level reference: record directly
|
||||||
|
tracker.graph.add_edge(from_node, to_node, ());
|
||||||
|
} else if curr_idx > expr_idx {
|
||||||
|
// Cross-scope reference: use owner_binding if available
|
||||||
|
if let Some(owner) = owner_binding {
|
||||||
|
if let (Some(&from_node), Some(&to_node)) = (
|
||||||
|
tracker.expr_to_node.get(&owner),
|
||||||
|
tracker.expr_to_node.get(&expr),
|
||||||
|
) {
|
||||||
|
tracker.graph.add_edge(from_node, to_node, ());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(self.new_expr(Ir::ExprRef(expr)));
|
return Ok(self.new_expr(Ir::ExprRef(expr)));
|
||||||
@@ -216,9 +248,34 @@ impl DowngradeContext for DowngradeCtx<'_> {
|
|||||||
graph,
|
graph,
|
||||||
current_binding: None,
|
current_binding: None,
|
||||||
let_scope_exprs,
|
let_scope_exprs,
|
||||||
|
owner_binding: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId) {
|
||||||
|
let mut graph = Graph::new();
|
||||||
|
let mut expr_to_node = HashMap::new();
|
||||||
|
let mut let_scope_exprs = HashSet::new();
|
||||||
|
|
||||||
|
for &expr in slots.iter() {
|
||||||
|
let node = graph.add_node(expr);
|
||||||
|
expr_to_node.insert(expr, node);
|
||||||
|
let_scope_exprs.insert(expr);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.dep_tracker_stack.push(DependencyTracker {
|
||||||
|
expr_to_node,
|
||||||
|
graph,
|
||||||
|
current_binding: None,
|
||||||
|
let_scope_exprs,
|
||||||
|
owner_binding: Some(owner),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_current_binding(&self) -> Option<ExprId> {
|
||||||
|
self.dep_tracker_stack.last().and_then(|t| t.current_binding)
|
||||||
|
}
|
||||||
|
|
||||||
fn set_current_binding(&mut self, expr: Option<ExprId>) {
|
fn set_current_binding(&mut self, expr: Option<ExprId>) {
|
||||||
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
|
if let Some(tracker) = self.dep_tracker_stack.last_mut() {
|
||||||
tracker.current_binding = expr;
|
tracker.current_binding = expr;
|
||||||
|
|||||||
@@ -9,8 +9,14 @@ pub enum ErrorKind {
|
|||||||
ParseError(String),
|
ParseError(String),
|
||||||
#[error("error occurred during downgrade stage: {0}")]
|
#[error("error occurred during downgrade stage: {0}")]
|
||||||
DowngradeError(String),
|
DowngradeError(String),
|
||||||
#[error("error occurred during evaluation stage: {0}")]
|
#[error(
|
||||||
EvalError(String),
|
"error occurred during evaluation stage: {msg}{}",
|
||||||
|
backtrace.as_ref().map_or("".into(), |backtrace| format!("\nBacktrace: {backtrace}"))
|
||||||
|
)]
|
||||||
|
EvalError {
|
||||||
|
msg: String,
|
||||||
|
backtrace: Option<String>,
|
||||||
|
},
|
||||||
#[error("internal error occurred: {0}")]
|
#[error("internal error occurred: {0}")]
|
||||||
InternalError(String),
|
InternalError(String),
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
@@ -114,8 +120,11 @@ impl Error {
|
|||||||
pub fn downgrade_error(msg: String) -> Self {
|
pub fn downgrade_error(msg: String) -> Self {
|
||||||
Self::new(ErrorKind::DowngradeError(msg))
|
Self::new(ErrorKind::DowngradeError(msg))
|
||||||
}
|
}
|
||||||
pub fn eval_error(msg: String) -> Self {
|
pub fn eval_error(msg: String, backtrace: Option<String>) -> Self {
|
||||||
Self::new(ErrorKind::EvalError(msg))
|
Self::new(ErrorKind::EvalError {
|
||||||
|
msg,
|
||||||
|
backtrace
|
||||||
|
})
|
||||||
}
|
}
|
||||||
pub fn internal(msg: String) -> Self {
|
pub fn internal(msg: String) -> Self {
|
||||||
Self::new(ErrorKind::InternalError(msg))
|
Self::new(ErrorKind::InternalError(msg))
|
||||||
|
|||||||
238
nix-js/src/fetcher.rs
Normal file
238
nix-js/src/fetcher.rs
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
mod archive;
|
||||||
|
mod cache;
|
||||||
|
mod download;
|
||||||
|
mod git;
|
||||||
|
mod hg;
|
||||||
|
mod nar;
|
||||||
|
|
||||||
|
pub use cache::FetcherCache;
|
||||||
|
pub use download::Downloader;
|
||||||
|
|
||||||
|
use deno_core::op2;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::runtime::NixError;
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct FetchUrlResult {
|
||||||
|
pub store_path: String,
|
||||||
|
pub hash: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct FetchTarballResult {
|
||||||
|
pub store_path: String,
|
||||||
|
pub hash: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct FetchGitResult {
|
||||||
|
pub out_path: String,
|
||||||
|
pub rev: String,
|
||||||
|
pub short_rev: String,
|
||||||
|
pub rev_count: u64,
|
||||||
|
pub last_modified: u64,
|
||||||
|
pub last_modified_date: String,
|
||||||
|
pub submodules: bool,
|
||||||
|
pub nar_hash: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct FetchHgResult {
|
||||||
|
pub out_path: String,
|
||||||
|
pub branch: String,
|
||||||
|
pub rev: String,
|
||||||
|
pub short_rev: String,
|
||||||
|
pub rev_count: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[op2]
|
||||||
|
#[serde]
|
||||||
|
pub fn op_fetch_url(
|
||||||
|
#[string] url: String,
|
||||||
|
#[string] expected_hash: Option<String>,
|
||||||
|
#[string] name: Option<String>,
|
||||||
|
executable: bool,
|
||||||
|
) -> Result<FetchUrlResult, NixError> {
|
||||||
|
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
let downloader = Downloader::new();
|
||||||
|
|
||||||
|
let file_name = name.unwrap_or_else(|| {
|
||||||
|
url.rsplit('/')
|
||||||
|
.next()
|
||||||
|
.unwrap_or("download")
|
||||||
|
.to_string()
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(ref hash) = expected_hash {
|
||||||
|
if let Some(cached) = cache.get_url(&url, hash) {
|
||||||
|
return Ok(FetchUrlResult {
|
||||||
|
store_path: cached.to_string_lossy().to_string(),
|
||||||
|
hash: hash.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = downloader
|
||||||
|
.download(&url)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
|
||||||
|
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
|
||||||
|
|
||||||
|
if let Some(ref expected) = expected_hash {
|
||||||
|
let normalized_expected = normalize_hash(expected);
|
||||||
|
if hash != normalized_expected {
|
||||||
|
return Err(NixError::from(format!(
|
||||||
|
"hash mismatch for '{}': expected {}, got {}",
|
||||||
|
url, normalized_expected, hash
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let store_path = cache
|
||||||
|
.put_url(&url, &hash, &data, &file_name, executable)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(FetchUrlResult {
|
||||||
|
store_path: store_path.to_string_lossy().to_string(),
|
||||||
|
hash,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[op2]
|
||||||
|
#[serde]
|
||||||
|
pub fn op_fetch_tarball(
|
||||||
|
#[string] url: String,
|
||||||
|
#[string] expected_hash: Option<String>,
|
||||||
|
#[string] name: Option<String>,
|
||||||
|
) -> Result<FetchTarballResult, NixError> {
|
||||||
|
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
let downloader = Downloader::new();
|
||||||
|
|
||||||
|
let dir_name = name.unwrap_or_else(|| "source".to_string());
|
||||||
|
|
||||||
|
let is_nar_hash = expected_hash
|
||||||
|
.as_ref()
|
||||||
|
.map(|h| h.starts_with("sha256-"))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if let Some(ref hash) = expected_hash {
|
||||||
|
let normalized = normalize_hash(hash);
|
||||||
|
if let Some(cached) = cache.get_tarball(&url, &normalized) {
|
||||||
|
return Ok(FetchTarballResult {
|
||||||
|
store_path: cached.to_string_lossy().to_string(),
|
||||||
|
hash: normalized,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = downloader
|
||||||
|
.download(&url)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
|
||||||
|
let temp_dir = tempfile::tempdir().map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
let extracted_path = archive::extract_archive(&data, &temp_dir.path().to_path_buf())
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
|
||||||
|
let nar_hash = nar::compute_nar_hash(&extracted_path)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
|
||||||
|
if let Some(ref expected) = expected_hash {
|
||||||
|
let normalized_expected = normalize_hash(expected);
|
||||||
|
let hash_to_compare = if is_nar_hash { &nar_hash } else { &nar_hash };
|
||||||
|
|
||||||
|
if *hash_to_compare != normalized_expected {
|
||||||
|
return Err(NixError::from(format!(
|
||||||
|
"hash mismatch for '{}': expected {}, got {}",
|
||||||
|
url, normalized_expected, hash_to_compare
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let store_path = cache
|
||||||
|
.put_tarball_from_extracted(&url, &nar_hash, &extracted_path, &dir_name)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(FetchTarballResult {
|
||||||
|
store_path: store_path.to_string_lossy().to_string(),
|
||||||
|
hash: nar_hash,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[op2]
|
||||||
|
#[serde]
|
||||||
|
pub fn op_fetch_git(
|
||||||
|
#[string] url: String,
|
||||||
|
#[string] git_ref: Option<String>,
|
||||||
|
#[string] rev: Option<String>,
|
||||||
|
shallow: bool,
|
||||||
|
submodules: bool,
|
||||||
|
all_refs: bool,
|
||||||
|
#[string] name: Option<String>,
|
||||||
|
) -> Result<FetchGitResult, NixError> {
|
||||||
|
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
let dir_name = name.unwrap_or_else(|| "source".to_string());
|
||||||
|
|
||||||
|
git::fetch_git(&cache, &url, git_ref.as_deref(), rev.as_deref(), shallow, submodules, all_refs, &dir_name)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[op2]
|
||||||
|
#[serde]
|
||||||
|
pub fn op_fetch_hg(
|
||||||
|
#[string] url: String,
|
||||||
|
#[string] rev: Option<String>,
|
||||||
|
#[string] name: Option<String>,
|
||||||
|
) -> Result<FetchHgResult, NixError> {
|
||||||
|
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||||
|
let dir_name = name.unwrap_or_else(|| "source".to_string());
|
||||||
|
|
||||||
|
hg::fetch_hg(&cache, &url, rev.as_deref(), &dir_name)
|
||||||
|
.map_err(|e| NixError::from(e.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize_hash(hash: &str) -> String {
|
||||||
|
if hash.starts_with("sha256-") {
|
||||||
|
if let Some(b64) = hash.strip_prefix("sha256-") {
|
||||||
|
if let Ok(bytes) = base64_decode(b64) {
|
||||||
|
return hex::encode(bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hash.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn base64_decode(input: &str) -> Result<Vec<u8>, String> {
|
||||||
|
const ALPHABET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||||
|
|
||||||
|
let input = input.trim_end_matches('=');
|
||||||
|
let mut output = Vec::with_capacity(input.len() * 3 / 4);
|
||||||
|
|
||||||
|
let mut buffer = 0u32;
|
||||||
|
let mut bits = 0;
|
||||||
|
|
||||||
|
for c in input.bytes() {
|
||||||
|
let value = ALPHABET.iter().position(|&x| x == c)
|
||||||
|
.ok_or_else(|| format!("Invalid base64 character: {}", c as char))?;
|
||||||
|
|
||||||
|
buffer = (buffer << 6) | (value as u32);
|
||||||
|
bits += 6;
|
||||||
|
|
||||||
|
if bits >= 8 {
|
||||||
|
bits -= 8;
|
||||||
|
output.push((buffer >> bits) as u8);
|
||||||
|
buffer &= (1 << bits) - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_ops() -> Vec<deno_core::OpDecl> {
|
||||||
|
vec![
|
||||||
|
op_fetch_url(),
|
||||||
|
op_fetch_tarball(),
|
||||||
|
op_fetch_git(),
|
||||||
|
op_fetch_hg(),
|
||||||
|
]
|
||||||
|
}
|
||||||
214
nix-js/src/fetcher/archive.rs
Normal file
214
nix-js/src/fetcher/archive.rs
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
use std::fs::{self, File};
|
||||||
|
use std::io::Cursor;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use flate2::read::GzDecoder;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum ArchiveFormat {
|
||||||
|
TarGz,
|
||||||
|
TarXz,
|
||||||
|
TarBz2,
|
||||||
|
Tar,
|
||||||
|
Zip,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ArchiveFormat {
|
||||||
|
pub fn detect(url: &str, data: &[u8]) -> Self {
|
||||||
|
if url.ends_with(".tar.gz") || url.ends_with(".tgz") {
|
||||||
|
return ArchiveFormat::TarGz;
|
||||||
|
}
|
||||||
|
if url.ends_with(".tar.xz") || url.ends_with(".txz") {
|
||||||
|
return ArchiveFormat::TarXz;
|
||||||
|
}
|
||||||
|
if url.ends_with(".tar.bz2") || url.ends_with(".tbz2") {
|
||||||
|
return ArchiveFormat::TarBz2;
|
||||||
|
}
|
||||||
|
if url.ends_with(".tar") {
|
||||||
|
return ArchiveFormat::Tar;
|
||||||
|
}
|
||||||
|
if url.ends_with(".zip") {
|
||||||
|
return ArchiveFormat::Zip;
|
||||||
|
}
|
||||||
|
|
||||||
|
if data.len() >= 2 && data[0] == 0x1f && data[1] == 0x8b {
|
||||||
|
return ArchiveFormat::TarGz;
|
||||||
|
}
|
||||||
|
if data.len() >= 6 && &data[0..6] == b"\xfd7zXZ\x00" {
|
||||||
|
return ArchiveFormat::TarXz;
|
||||||
|
}
|
||||||
|
if data.len() >= 3 && &data[0..3] == b"BZh" {
|
||||||
|
return ArchiveFormat::TarBz2;
|
||||||
|
}
|
||||||
|
if data.len() >= 4 && &data[0..4] == b"PK\x03\x04" {
|
||||||
|
return ArchiveFormat::Zip;
|
||||||
|
}
|
||||||
|
|
||||||
|
ArchiveFormat::TarGz
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_archive(data: &[u8], dest: &PathBuf) -> Result<PathBuf, ArchiveError> {
|
||||||
|
let format = ArchiveFormat::detect("", data);
|
||||||
|
|
||||||
|
let temp_dir = dest.join("_extract_temp");
|
||||||
|
fs::create_dir_all(&temp_dir)?;
|
||||||
|
|
||||||
|
match format {
|
||||||
|
ArchiveFormat::TarGz => extract_tar_gz(data, &temp_dir)?,
|
||||||
|
ArchiveFormat::TarXz => extract_tar_xz(data, &temp_dir)?,
|
||||||
|
ArchiveFormat::TarBz2 => extract_tar_bz2(data, &temp_dir)?,
|
||||||
|
ArchiveFormat::Tar => extract_tar(data, &temp_dir)?,
|
||||||
|
ArchiveFormat::Zip => extract_zip(data, &temp_dir)?,
|
||||||
|
}
|
||||||
|
|
||||||
|
strip_single_toplevel(&temp_dir, dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_tar_gz(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||||
|
let decoder = GzDecoder::new(Cursor::new(data));
|
||||||
|
let mut archive = tar::Archive::new(decoder);
|
||||||
|
archive.unpack(dest)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_tar_xz(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||||
|
let decoder = xz2::read::XzDecoder::new(Cursor::new(data));
|
||||||
|
let mut archive = tar::Archive::new(decoder);
|
||||||
|
archive.unpack(dest)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_tar_bz2(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||||
|
let decoder = bzip2::read::BzDecoder::new(Cursor::new(data));
|
||||||
|
let mut archive = tar::Archive::new(decoder);
|
||||||
|
archive.unpack(dest)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_tar(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||||
|
let mut archive = tar::Archive::new(Cursor::new(data));
|
||||||
|
archive.unpack(dest)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_zip(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
let mut archive = zip::ZipArchive::new(cursor)?;
|
||||||
|
|
||||||
|
for i in 0..archive.len() {
|
||||||
|
let mut file = archive.by_index(i)?;
|
||||||
|
let outpath = dest.join(file.mangled_name());
|
||||||
|
|
||||||
|
if file.is_dir() {
|
||||||
|
fs::create_dir_all(&outpath)?;
|
||||||
|
} else {
|
||||||
|
if let Some(parent) = outpath.parent() {
|
||||||
|
fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
let mut outfile = File::create(&outpath)?;
|
||||||
|
std::io::copy(&mut file, &mut outfile)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
if let Some(mode) = file.unix_mode() {
|
||||||
|
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn strip_single_toplevel(temp_dir: &PathBuf, dest: &PathBuf) -> Result<PathBuf, ArchiveError> {
|
||||||
|
let entries: Vec<_> = fs::read_dir(temp_dir)?
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
.filter(|e| !e.file_name().to_string_lossy().starts_with('.'))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let source_dir = if entries.len() == 1 && entries[0].file_type()?.is_dir() {
|
||||||
|
entries[0].path()
|
||||||
|
} else {
|
||||||
|
temp_dir.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let final_dest = dest.join("content");
|
||||||
|
if final_dest.exists() {
|
||||||
|
fs::remove_dir_all(&final_dest)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if source_dir == *temp_dir {
|
||||||
|
fs::rename(temp_dir, &final_dest)?;
|
||||||
|
} else {
|
||||||
|
copy_dir_recursive(&source_dir, &final_dest)?;
|
||||||
|
fs::remove_dir_all(temp_dir)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(final_dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> Result<(), std::io::Error> {
|
||||||
|
fs::create_dir_all(dst)?;
|
||||||
|
|
||||||
|
for entry in fs::read_dir(src)? {
|
||||||
|
let entry = entry?;
|
||||||
|
let path = entry.path();
|
||||||
|
let dest_path = dst.join(entry.file_name());
|
||||||
|
let metadata = fs::symlink_metadata(&path)?;
|
||||||
|
|
||||||
|
if metadata.is_symlink() {
|
||||||
|
let target = fs::read_link(&path)?;
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
std::os::unix::fs::symlink(&target, &dest_path)?;
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
if target.is_dir() {
|
||||||
|
std::os::windows::fs::symlink_dir(&target, &dest_path)?;
|
||||||
|
} else {
|
||||||
|
std::os::windows::fs::symlink_file(&target, &dest_path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if metadata.is_dir() {
|
||||||
|
copy_dir_recursive(&path, &dest_path)?;
|
||||||
|
} else {
|
||||||
|
fs::copy(&path, &dest_path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ArchiveError {
|
||||||
|
IoError(std::io::Error),
|
||||||
|
ZipError(zip::result::ZipError),
|
||||||
|
UnsupportedFormat(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for ArchiveError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
ArchiveError::IoError(e) => write!(f, "I/O error: {}", e),
|
||||||
|
ArchiveError::ZipError(e) => write!(f, "ZIP error: {}", e),
|
||||||
|
ArchiveError::UnsupportedFormat(fmt) => write!(f, "Unsupported archive format: {}", fmt),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ArchiveError {}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for ArchiveError {
|
||||||
|
fn from(e: std::io::Error) -> Self {
|
||||||
|
ArchiveError::IoError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<zip::result::ZipError> for ArchiveError {
|
||||||
|
fn from(e: zip::result::ZipError) -> Self {
|
||||||
|
ArchiveError::ZipError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
277
nix-js/src/fetcher/cache.rs
Normal file
277
nix-js/src/fetcher/cache.rs
Normal file
@@ -0,0 +1,277 @@
|
|||||||
|
use std::fs::{self, File};
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::archive::ArchiveError;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CacheError {
|
||||||
|
Io(std::io::Error),
|
||||||
|
Archive(ArchiveError),
|
||||||
|
Json(serde_json::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for CacheError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
CacheError::Io(e) => write!(f, "I/O error: {}", e),
|
||||||
|
CacheError::Archive(e) => write!(f, "Archive error: {}", e),
|
||||||
|
CacheError::Json(e) => write!(f, "JSON error: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for CacheError {}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for CacheError {
|
||||||
|
fn from(e: std::io::Error) -> Self {
|
||||||
|
CacheError::Io(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ArchiveError> for CacheError {
|
||||||
|
fn from(e: ArchiveError) -> Self {
|
||||||
|
CacheError::Archive(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<serde_json::Error> for CacheError {
|
||||||
|
fn from(e: serde_json::Error) -> Self {
|
||||||
|
CacheError::Json(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FetcherCache {
|
||||||
|
base_dir: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct CacheMetadata {
|
||||||
|
url: String,
|
||||||
|
hash: String,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetcherCache {
|
||||||
|
pub fn new() -> Result<Self, std::io::Error> {
|
||||||
|
let base_dir = dirs::cache_dir()
|
||||||
|
.unwrap_or_else(|| PathBuf::from("/tmp"))
|
||||||
|
.join("nix-js")
|
||||||
|
.join("fetchers");
|
||||||
|
|
||||||
|
fs::create_dir_all(&base_dir)?;
|
||||||
|
|
||||||
|
Ok(Self { base_dir })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn url_cache_dir(&self) -> PathBuf {
|
||||||
|
self.base_dir.join("url")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tarball_cache_dir(&self) -> PathBuf {
|
||||||
|
self.base_dir.join("tarball")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn git_cache_dir(&self) -> PathBuf {
|
||||||
|
self.base_dir.join("git")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hg_cache_dir(&self) -> PathBuf {
|
||||||
|
self.base_dir.join("hg")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hash_key(url: &str) -> String {
|
||||||
|
crate::nix_hash::sha256_hex(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_url(&self, url: &str, expected_hash: &str) -> Option<PathBuf> {
|
||||||
|
let cache_dir = self.url_cache_dir();
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
let meta_path = cache_dir.join(format!("{}.meta", key));
|
||||||
|
let data_path = cache_dir.join(format!("{}.data", key));
|
||||||
|
|
||||||
|
if !meta_path.exists() || !data_path.exists() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let meta: CacheMetadata = serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
|
||||||
|
|
||||||
|
if meta.hash == expected_hash {
|
||||||
|
Some(data_path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_url(
|
||||||
|
&self,
|
||||||
|
url: &str,
|
||||||
|
hash: &str,
|
||||||
|
data: &[u8],
|
||||||
|
name: &str,
|
||||||
|
executable: bool,
|
||||||
|
) -> Result<PathBuf, std::io::Error> {
|
||||||
|
let cache_dir = self.url_cache_dir();
|
||||||
|
fs::create_dir_all(&cache_dir)?;
|
||||||
|
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
let meta_path = cache_dir.join(format!("{}.meta", key));
|
||||||
|
let data_path = cache_dir.join(format!("{}.data", key));
|
||||||
|
|
||||||
|
let mut file = File::create(&data_path)?;
|
||||||
|
file.write_all(data)?;
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
if executable {
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let mut perms = fs::metadata(&data_path)?.permissions();
|
||||||
|
perms.set_mode(0o755);
|
||||||
|
fs::set_permissions(&data_path, perms)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let meta = CacheMetadata {
|
||||||
|
url: url.to_string(),
|
||||||
|
hash: hash.to_string(),
|
||||||
|
name: name.to_string(),
|
||||||
|
};
|
||||||
|
fs::write(&meta_path, serde_json::to_string(&meta)?)?;
|
||||||
|
|
||||||
|
let store_path = self.make_store_path(hash, name);
|
||||||
|
if !store_path.exists() {
|
||||||
|
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
|
||||||
|
fs::copy(&data_path, &store_path)?;
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
if executable {
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let mut perms = fs::metadata(&store_path)?.permissions();
|
||||||
|
perms.set_mode(0o755);
|
||||||
|
fs::set_permissions(&store_path, perms)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(store_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_tarball(&self, url: &str, expected_hash: &str) -> Option<PathBuf> {
|
||||||
|
let cache_dir = self.tarball_cache_dir();
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
let meta_path = cache_dir.join(&key).join(".meta");
|
||||||
|
let data_dir = cache_dir.join(&key);
|
||||||
|
|
||||||
|
if !meta_path.exists() || !data_dir.exists() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let meta: CacheMetadata = serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
|
||||||
|
|
||||||
|
if meta.hash == expected_hash {
|
||||||
|
Some(self.make_store_path(&meta.hash, &meta.name))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_tarball(
|
||||||
|
&self,
|
||||||
|
url: &str,
|
||||||
|
hash: &str,
|
||||||
|
data: &[u8],
|
||||||
|
name: &str,
|
||||||
|
) -> Result<PathBuf, CacheError> {
|
||||||
|
let cache_dir = self.tarball_cache_dir();
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
let extract_dir = cache_dir.join(&key);
|
||||||
|
|
||||||
|
fs::create_dir_all(&extract_dir)?;
|
||||||
|
|
||||||
|
let extracted_path = super::archive::extract_archive(data, &extract_dir)?;
|
||||||
|
|
||||||
|
let meta = CacheMetadata {
|
||||||
|
url: url.to_string(),
|
||||||
|
hash: hash.to_string(),
|
||||||
|
name: name.to_string(),
|
||||||
|
};
|
||||||
|
fs::write(extract_dir.join(".meta"), serde_json::to_string(&meta)?)?;
|
||||||
|
|
||||||
|
let store_path = self.make_store_path(hash, name);
|
||||||
|
if !store_path.exists() {
|
||||||
|
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
|
||||||
|
copy_dir_recursive(&extracted_path, &store_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(store_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_tarball_from_extracted(
|
||||||
|
&self,
|
||||||
|
url: &str,
|
||||||
|
hash: &str,
|
||||||
|
extracted_path: &PathBuf,
|
||||||
|
name: &str,
|
||||||
|
) -> Result<PathBuf, CacheError> {
|
||||||
|
let cache_dir = self.tarball_cache_dir();
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
let cache_entry_dir = cache_dir.join(&key);
|
||||||
|
|
||||||
|
fs::create_dir_all(&cache_entry_dir)?;
|
||||||
|
|
||||||
|
let cached_content = cache_entry_dir.join("content");
|
||||||
|
if !cached_content.exists() {
|
||||||
|
copy_dir_recursive(extracted_path, &cached_content)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let meta = CacheMetadata {
|
||||||
|
url: url.to_string(),
|
||||||
|
hash: hash.to_string(),
|
||||||
|
name: name.to_string(),
|
||||||
|
};
|
||||||
|
fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?;
|
||||||
|
|
||||||
|
let store_path = self.make_store_path(hash, name);
|
||||||
|
if !store_path.exists() {
|
||||||
|
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
|
||||||
|
copy_dir_recursive(extracted_path, &store_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(store_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_git_bare(&self, url: &str) -> PathBuf {
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
self.git_cache_dir().join(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_hg_bare(&self, url: &str) -> PathBuf {
|
||||||
|
let key = Self::hash_key(url);
|
||||||
|
self.hg_cache_dir().join(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make_store_path(&self, hash: &str, name: &str) -> PathBuf {
|
||||||
|
let short_hash = &hash[..32.min(hash.len())];
|
||||||
|
self.base_dir
|
||||||
|
.join("store")
|
||||||
|
.join(format!("{}-{}", short_hash, name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> Result<(), std::io::Error> {
|
||||||
|
fs::create_dir_all(dst)?;
|
||||||
|
|
||||||
|
for entry in fs::read_dir(src)? {
|
||||||
|
let entry = entry?;
|
||||||
|
let path = entry.path();
|
||||||
|
let dest_path = dst.join(entry.file_name());
|
||||||
|
|
||||||
|
if path.is_dir() {
|
||||||
|
copy_dir_recursive(&path, &dest_path)?;
|
||||||
|
} else {
|
||||||
|
fs::copy(&path, &dest_path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
63
nix-js/src/fetcher/download.rs
Normal file
63
nix-js/src/fetcher/download.rs
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
use reqwest::blocking::Client;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
pub struct Downloader {
|
||||||
|
client: Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Downloader {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let client = Client::builder()
|
||||||
|
.timeout(Duration::from_secs(300))
|
||||||
|
.user_agent("nix-js/0.1")
|
||||||
|
.build()
|
||||||
|
.expect("Failed to create HTTP client");
|
||||||
|
|
||||||
|
Self { client }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn download(&self, url: &str) -> Result<Vec<u8>, DownloadError> {
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.get(url)
|
||||||
|
.send()
|
||||||
|
.map_err(|e| DownloadError::NetworkError(e.to_string()))?;
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
return Err(DownloadError::HttpError {
|
||||||
|
url: url.to_string(),
|
||||||
|
status: response.status().as_u16(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
response
|
||||||
|
.bytes()
|
||||||
|
.map(|b| b.to_vec())
|
||||||
|
.map_err(|e| DownloadError::NetworkError(e.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Downloader {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum DownloadError {
|
||||||
|
NetworkError(String),
|
||||||
|
HttpError { url: String, status: u16 },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for DownloadError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
DownloadError::NetworkError(msg) => write!(f, "Network error: {}", msg),
|
||||||
|
DownloadError::HttpError { url, status } => {
|
||||||
|
write!(f, "HTTP error {} for URL: {}", status, url)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for DownloadError {}
|
||||||
303
nix-js/src/fetcher/git.rs
Normal file
303
nix-js/src/fetcher/git.rs
Normal file
@@ -0,0 +1,303 @@
|
|||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use super::cache::FetcherCache;
|
||||||
|
use super::FetchGitResult;
|
||||||
|
|
||||||
|
pub fn fetch_git(
|
||||||
|
cache: &FetcherCache,
|
||||||
|
url: &str,
|
||||||
|
git_ref: Option<&str>,
|
||||||
|
rev: Option<&str>,
|
||||||
|
_shallow: bool,
|
||||||
|
submodules: bool,
|
||||||
|
all_refs: bool,
|
||||||
|
name: &str,
|
||||||
|
) -> Result<FetchGitResult, GitError> {
|
||||||
|
let bare_repo = cache.get_git_bare(url);
|
||||||
|
|
||||||
|
if !bare_repo.exists() {
|
||||||
|
clone_bare(url, &bare_repo)?;
|
||||||
|
} else {
|
||||||
|
fetch_repo(&bare_repo, all_refs)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_rev = resolve_rev(&bare_repo, git_ref, rev)?;
|
||||||
|
let checkout_dir = checkout_rev(&bare_repo, &target_rev, submodules, name, cache)?;
|
||||||
|
|
||||||
|
let rev_count = get_rev_count(&bare_repo, &target_rev)?;
|
||||||
|
let last_modified = get_last_modified(&bare_repo, &target_rev)?;
|
||||||
|
let last_modified_date = format_timestamp(last_modified);
|
||||||
|
|
||||||
|
let short_rev = if target_rev.len() >= 7 {
|
||||||
|
target_rev[..7].to_string()
|
||||||
|
} else {
|
||||||
|
target_rev.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(FetchGitResult {
|
||||||
|
out_path: checkout_dir.to_string_lossy().to_string(),
|
||||||
|
rev: target_rev,
|
||||||
|
short_rev,
|
||||||
|
rev_count,
|
||||||
|
last_modified,
|
||||||
|
last_modified_date,
|
||||||
|
submodules,
|
||||||
|
nar_hash: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clone_bare(url: &str, dest: &PathBuf) -> Result<(), GitError> {
|
||||||
|
fs::create_dir_all(dest.parent().unwrap_or(dest))?;
|
||||||
|
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["clone", "--bare", url])
|
||||||
|
.arg(dest)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(GitError::CommandFailed {
|
||||||
|
operation: "clone".to_string(),
|
||||||
|
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_repo(repo: &PathBuf, all_refs: bool) -> Result<(), GitError> {
|
||||||
|
let mut args = vec!["fetch", "--prune"];
|
||||||
|
if all_refs {
|
||||||
|
args.push("--all");
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(args)
|
||||||
|
.current_dir(repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(GitError::CommandFailed {
|
||||||
|
operation: "fetch".to_string(),
|
||||||
|
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_rev(repo: &PathBuf, git_ref: Option<&str>, rev: Option<&str>) -> Result<String, GitError> {
|
||||||
|
if let Some(rev) = rev {
|
||||||
|
return Ok(rev.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let ref_to_resolve = git_ref.unwrap_or("HEAD");
|
||||||
|
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["rev-parse", ref_to_resolve])
|
||||||
|
.current_dir(repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["rev-parse", &format!("refs/heads/{}", ref_to_resolve)])
|
||||||
|
.current_dir(repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["rev-parse", &format!("refs/tags/{}", ref_to_resolve)])
|
||||||
|
.current_dir(repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(GitError::CommandFailed {
|
||||||
|
operation: "rev-parse".to_string(),
|
||||||
|
message: format!("Could not resolve ref: {}", ref_to_resolve),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return Ok(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
||||||
|
}
|
||||||
|
return Ok(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn checkout_rev(
|
||||||
|
bare_repo: &PathBuf,
|
||||||
|
rev: &str,
|
||||||
|
submodules: bool,
|
||||||
|
name: &str,
|
||||||
|
cache: &FetcherCache,
|
||||||
|
) -> Result<PathBuf, GitError> {
|
||||||
|
let hash = crate::nix_hash::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
|
||||||
|
let checkout_dir = cache.make_store_path(&hash, name);
|
||||||
|
|
||||||
|
if checkout_dir.exists() {
|
||||||
|
return Ok(checkout_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::create_dir_all(&checkout_dir)?;
|
||||||
|
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["--work-tree", checkout_dir.to_str().unwrap_or(".")])
|
||||||
|
.arg("checkout")
|
||||||
|
.arg(rev)
|
||||||
|
.arg("--")
|
||||||
|
.arg(".")
|
||||||
|
.current_dir(bare_repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
fs::remove_dir_all(&checkout_dir)?;
|
||||||
|
return Err(GitError::CommandFailed {
|
||||||
|
operation: "checkout".to_string(),
|
||||||
|
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if submodules {
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["submodule", "update", "--init", "--recursive"])
|
||||||
|
.current_dir(&checkout_dir)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: failed to initialize submodules: {}",
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let git_dir = checkout_dir.join(".git");
|
||||||
|
if git_dir.exists() {
|
||||||
|
fs::remove_dir_all(&git_dir)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(checkout_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_rev_count(repo: &PathBuf, rev: &str) -> Result<u64, GitError> {
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["rev-list", "--count", rev])
|
||||||
|
.current_dir(repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let count_str = String::from_utf8_lossy(&output.stdout);
|
||||||
|
count_str.trim().parse().unwrap_or(0).pipe(Ok)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_last_modified(repo: &PathBuf, rev: &str) -> Result<u64, GitError> {
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["log", "-1", "--format=%ct", rev])
|
||||||
|
.current_dir(repo)
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let ts_str = String::from_utf8_lossy(&output.stdout);
|
||||||
|
ts_str.trim().parse().unwrap_or(0).pipe(Ok)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_timestamp(ts: u64) -> String {
|
||||||
|
use std::time::{Duration, UNIX_EPOCH};
|
||||||
|
|
||||||
|
let datetime = UNIX_EPOCH + Duration::from_secs(ts);
|
||||||
|
let secs = datetime
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.map(|d| d.as_secs())
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
let days_since_epoch = secs / 86400;
|
||||||
|
let remaining_secs = secs % 86400;
|
||||||
|
let hours = remaining_secs / 3600;
|
||||||
|
let minutes = (remaining_secs % 3600) / 60;
|
||||||
|
let seconds = remaining_secs % 60;
|
||||||
|
|
||||||
|
let (year, month, day) = days_to_ymd(days_since_epoch);
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{:04}{:02}{:02}{:02}{:02}{:02}",
|
||||||
|
year, month, day, hours, minutes, seconds
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn days_to_ymd(days: u64) -> (u64, u64, u64) {
|
||||||
|
let mut y = 1970;
|
||||||
|
let mut remaining = days as i64;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let days_in_year = if is_leap_year(y) { 366 } else { 365 };
|
||||||
|
if remaining < days_in_year {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
remaining -= days_in_year;
|
||||||
|
y += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let days_in_months: [i64; 12] = if is_leap_year(y) {
|
||||||
|
[31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
|
||||||
|
} else {
|
||||||
|
[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut m = 1;
|
||||||
|
for days_in_month in days_in_months.iter() {
|
||||||
|
if remaining < *days_in_month {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
remaining -= *days_in_month;
|
||||||
|
m += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
(y, m, (remaining + 1) as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_leap_year(y: u64) -> bool {
|
||||||
|
(y % 4 == 0 && y % 100 != 0) || (y % 400 == 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
trait Pipe: Sized {
|
||||||
|
fn pipe<F, R>(self, f: F) -> R
|
||||||
|
where
|
||||||
|
F: FnOnce(Self) -> R,
|
||||||
|
{
|
||||||
|
f(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Pipe for T {}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum GitError {
|
||||||
|
IoError(std::io::Error),
|
||||||
|
CommandFailed { operation: String, message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for GitError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
GitError::IoError(e) => write!(f, "I/O error: {}", e),
|
||||||
|
GitError::CommandFailed { operation, message } => {
|
||||||
|
write!(f, "Git {} failed: {}", operation, message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for GitError {}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for GitError {
|
||||||
|
fn from(e: std::io::Error) -> Self {
|
||||||
|
GitError::IoError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
196
nix-js/src/fetcher/hg.rs
Normal file
196
nix-js/src/fetcher/hg.rs
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use super::cache::FetcherCache;
|
||||||
|
use super::FetchHgResult;
|
||||||
|
|
||||||
|
pub fn fetch_hg(
|
||||||
|
cache: &FetcherCache,
|
||||||
|
url: &str,
|
||||||
|
rev: Option<&str>,
|
||||||
|
name: &str,
|
||||||
|
) -> Result<FetchHgResult, HgError> {
|
||||||
|
let bare_repo = cache.get_hg_bare(url);
|
||||||
|
|
||||||
|
if !bare_repo.exists() {
|
||||||
|
clone_repo(url, &bare_repo)?;
|
||||||
|
} else {
|
||||||
|
pull_repo(&bare_repo)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_rev = rev.unwrap_or("tip").to_string();
|
||||||
|
let resolved_rev = resolve_rev(&bare_repo, &target_rev)?;
|
||||||
|
let branch = get_branch(&bare_repo, &resolved_rev)?;
|
||||||
|
|
||||||
|
let checkout_dir = checkout_rev(&bare_repo, &resolved_rev, name, cache)?;
|
||||||
|
|
||||||
|
let rev_count = get_rev_count(&bare_repo, &resolved_rev)?;
|
||||||
|
|
||||||
|
let short_rev = if resolved_rev.len() >= 12 {
|
||||||
|
resolved_rev[..12].to_string()
|
||||||
|
} else {
|
||||||
|
resolved_rev.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(FetchHgResult {
|
||||||
|
out_path: checkout_dir.to_string_lossy().to_string(),
|
||||||
|
branch,
|
||||||
|
rev: resolved_rev,
|
||||||
|
short_rev,
|
||||||
|
rev_count,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clone_repo(url: &str, dest: &PathBuf) -> Result<(), HgError> {
|
||||||
|
fs::create_dir_all(dest.parent().unwrap_or(dest))?;
|
||||||
|
|
||||||
|
let output = Command::new("hg")
|
||||||
|
.args(["clone", "-U", url])
|
||||||
|
.arg(dest)
|
||||||
|
.env("HGPLAIN", "")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(HgError::CommandFailed {
|
||||||
|
operation: "clone".to_string(),
|
||||||
|
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pull_repo(repo: &PathBuf) -> Result<(), HgError> {
|
||||||
|
let output = Command::new("hg")
|
||||||
|
.args(["pull"])
|
||||||
|
.current_dir(repo)
|
||||||
|
.env("HGPLAIN", "")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(HgError::CommandFailed {
|
||||||
|
operation: "pull".to_string(),
|
||||||
|
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_rev(repo: &PathBuf, rev: &str) -> Result<String, HgError> {
|
||||||
|
let output = Command::new("hg")
|
||||||
|
.args(["log", "-r", rev, "--template", "{node}"])
|
||||||
|
.current_dir(repo)
|
||||||
|
.env("HGPLAIN", "")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Err(HgError::CommandFailed {
|
||||||
|
operation: "log".to_string(),
|
||||||
|
message: format!(
|
||||||
|
"Could not resolve rev '{}': {}",
|
||||||
|
rev,
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_branch(repo: &PathBuf, rev: &str) -> Result<String, HgError> {
|
||||||
|
let output = Command::new("hg")
|
||||||
|
.args(["log", "-r", rev, "--template", "{branch}"])
|
||||||
|
.current_dir(repo)
|
||||||
|
.env("HGPLAIN", "")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Ok("default".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let branch = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
if branch.is_empty() {
|
||||||
|
Ok("default".to_string())
|
||||||
|
} else {
|
||||||
|
Ok(branch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn checkout_rev(
|
||||||
|
bare_repo: &PathBuf,
|
||||||
|
rev: &str,
|
||||||
|
name: &str,
|
||||||
|
cache: &FetcherCache,
|
||||||
|
) -> Result<PathBuf, HgError> {
|
||||||
|
let hash = crate::nix_hash::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
|
||||||
|
let checkout_dir = cache.make_store_path(&hash, name);
|
||||||
|
|
||||||
|
if checkout_dir.exists() {
|
||||||
|
return Ok(checkout_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::create_dir_all(&checkout_dir)?;
|
||||||
|
|
||||||
|
let output = Command::new("hg")
|
||||||
|
.args(["archive", "-r", rev])
|
||||||
|
.arg(&checkout_dir)
|
||||||
|
.current_dir(bare_repo)
|
||||||
|
.env("HGPLAIN", "")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
fs::remove_dir_all(&checkout_dir)?;
|
||||||
|
return Err(HgError::CommandFailed {
|
||||||
|
operation: "archive".to_string(),
|
||||||
|
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let hg_archival = checkout_dir.join(".hg_archival.txt");
|
||||||
|
if hg_archival.exists() {
|
||||||
|
fs::remove_file(&hg_archival)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(checkout_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_rev_count(repo: &PathBuf, rev: &str) -> Result<u64, HgError> {
|
||||||
|
let output = Command::new("hg")
|
||||||
|
.args(["log", "-r", &format!("0::{}", rev), "--template", "x"])
|
||||||
|
.current_dir(repo)
|
||||||
|
.env("HGPLAIN", "")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output.stdout.len() as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum HgError {
|
||||||
|
IoError(std::io::Error),
|
||||||
|
CommandFailed { operation: String, message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for HgError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
HgError::IoError(e) => write!(f, "I/O error: {}", e),
|
||||||
|
HgError::CommandFailed { operation, message } => {
|
||||||
|
write!(f, "Mercurial {} failed: {}", operation, message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for HgError {}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for HgError {
|
||||||
|
fn from(e: std::io::Error) -> Self {
|
||||||
|
HgError::IoError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
126
nix-js/src/fetcher/nar.rs
Normal file
126
nix-js/src/fetcher/nar.rs
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fs;
|
||||||
|
use std::io::{self, Write};
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
pub fn compute_nar_hash(path: &Path) -> Result<String, io::Error> {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
dump_path(&mut hasher, path)?;
|
||||||
|
Ok(hex::encode(hasher.finalize()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dump_path<W: Write>(sink: &mut W, path: &Path) -> io::Result<()> {
|
||||||
|
write_string(sink, "nix-archive-1")?;
|
||||||
|
write_string(sink, "(")?;
|
||||||
|
dump_entry(sink, path)?;
|
||||||
|
write_string(sink, ")")?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dump_entry<W: Write>(sink: &mut W, path: &Path) -> io::Result<()> {
|
||||||
|
let metadata = fs::symlink_metadata(path)?;
|
||||||
|
|
||||||
|
if metadata.is_symlink() {
|
||||||
|
let target = fs::read_link(path)?;
|
||||||
|
write_string(sink, "type")?;
|
||||||
|
write_string(sink, "symlink")?;
|
||||||
|
write_string(sink, "target")?;
|
||||||
|
write_string(sink, &target.to_string_lossy())?;
|
||||||
|
} else if metadata.is_file() {
|
||||||
|
write_string(sink, "type")?;
|
||||||
|
write_string(sink, "regular")?;
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
if metadata.permissions().mode() & 0o111 != 0 {
|
||||||
|
write_string(sink, "executable")?;
|
||||||
|
write_string(sink, "")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let contents = fs::read(path)?;
|
||||||
|
write_string(sink, "contents")?;
|
||||||
|
write_contents(sink, &contents)?;
|
||||||
|
} else if metadata.is_dir() {
|
||||||
|
write_string(sink, "type")?;
|
||||||
|
write_string(sink, "directory")?;
|
||||||
|
|
||||||
|
let mut entries: Vec<_> = fs::read_dir(path)?
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
.map(|e| e.file_name().to_string_lossy().to_string())
|
||||||
|
.collect();
|
||||||
|
entries.sort();
|
||||||
|
|
||||||
|
for name in entries {
|
||||||
|
write_string(sink, "entry")?;
|
||||||
|
write_string(sink, "(")?;
|
||||||
|
write_string(sink, "name")?;
|
||||||
|
write_string(sink, &name)?;
|
||||||
|
write_string(sink, "node")?;
|
||||||
|
write_string(sink, "(")?;
|
||||||
|
dump_entry(sink, &path.join(&name))?;
|
||||||
|
write_string(sink, ")")?;
|
||||||
|
write_string(sink, ")")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_string<W: Write>(sink: &mut W, s: &str) -> io::Result<()> {
|
||||||
|
let bytes = s.as_bytes();
|
||||||
|
let len = bytes.len() as u64;
|
||||||
|
|
||||||
|
sink.write_all(&len.to_le_bytes())?;
|
||||||
|
sink.write_all(bytes)?;
|
||||||
|
|
||||||
|
let padding = (8 - (len % 8)) % 8;
|
||||||
|
for _ in 0..padding {
|
||||||
|
sink.write_all(&[0])?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_contents<W: Write>(sink: &mut W, contents: &[u8]) -> io::Result<()> {
|
||||||
|
let len = contents.len() as u64;
|
||||||
|
|
||||||
|
sink.write_all(&len.to_le_bytes())?;
|
||||||
|
sink.write_all(contents)?;
|
||||||
|
|
||||||
|
let padding = (8 - (len % 8)) % 8;
|
||||||
|
for _ in 0..padding {
|
||||||
|
sink.write_all(&[0])?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_simple_file() {
|
||||||
|
let temp = TempDir::new().unwrap();
|
||||||
|
let file_path = temp.path().join("test.txt");
|
||||||
|
fs::write(&file_path, "hello").unwrap();
|
||||||
|
|
||||||
|
let hash = compute_nar_hash(&file_path).unwrap();
|
||||||
|
assert!(!hash.is_empty());
|
||||||
|
assert_eq!(hash.len(), 64);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_directory() {
|
||||||
|
let temp = TempDir::new().unwrap();
|
||||||
|
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
|
||||||
|
fs::write(temp.path().join("b.txt"), "bbb").unwrap();
|
||||||
|
|
||||||
|
let hash = compute_nar_hash(temp.path()).unwrap();
|
||||||
|
assert!(!hash.is_empty());
|
||||||
|
assert_eq!(hash.len(), 64);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -41,6 +41,8 @@ pub trait DowngradeContext {
|
|||||||
fn get_current_dir(&self) -> std::path::PathBuf;
|
fn get_current_dir(&self) -> std::path::PathBuf;
|
||||||
|
|
||||||
fn push_dep_tracker(&mut self, slots: &[ExprId]);
|
fn push_dep_tracker(&mut self, slots: &[ExprId]);
|
||||||
|
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId);
|
||||||
|
fn get_current_binding(&self) -> Option<ExprId>;
|
||||||
fn set_current_binding(&mut self, expr: Option<ExprId>);
|
fn set_current_binding(&mut self, expr: Option<ExprId>);
|
||||||
fn pop_dep_tracker(&mut self) -> Result<SccInfo>;
|
fn pop_dep_tracker(&mut self) -> Result<SccInfo>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,11 +2,28 @@
|
|||||||
#![allow(clippy::unwrap_used)]
|
#![allow(clippy::unwrap_used)]
|
||||||
|
|
||||||
use rnix::ast::{self, Expr, HasEntry};
|
use rnix::ast::{self, Expr, HasEntry};
|
||||||
|
use std::path::{Component, Path as StdPath, PathBuf};
|
||||||
|
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
fn normalize_path(path: &StdPath) -> String {
|
||||||
|
let mut normalized = PathBuf::new();
|
||||||
|
for component in path.components() {
|
||||||
|
match component {
|
||||||
|
Component::Prefix(p) => normalized.push(p.as_os_str()),
|
||||||
|
Component::RootDir => normalized.push("/"),
|
||||||
|
Component::CurDir => {}
|
||||||
|
Component::ParentDir => {
|
||||||
|
normalized.pop();
|
||||||
|
}
|
||||||
|
Component::Normal(c) => normalized.push(c),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
normalized.to_string_lossy().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
pub trait Downgrade<Ctx: DowngradeContext> {
|
pub trait Downgrade<Ctx: DowngradeContext> {
|
||||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
|
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
|
||||||
}
|
}
|
||||||
@@ -74,21 +91,10 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let resolved_path = if path_str.starts_with('/') {
|
let resolved_path = if path_str.starts_with('/') {
|
||||||
path_str
|
normalize_path(&std::path::PathBuf::from(&path_str))
|
||||||
} else {
|
} else {
|
||||||
let current_dir = ctx.get_current_dir();
|
let current_dir = ctx.get_current_dir();
|
||||||
|
normalize_path(¤t_dir.join(&path_str))
|
||||||
current_dir
|
|
||||||
.join(&path_str)
|
|
||||||
.canonicalize()
|
|
||||||
.map_err(|e| {
|
|
||||||
crate::error::Error::downgrade_error(format!(
|
|
||||||
"Failed to resolve path {}: {}",
|
|
||||||
path_str, e
|
|
||||||
))
|
|
||||||
})?
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string()
|
|
||||||
};
|
};
|
||||||
|
|
||||||
vec![ctx.new_expr(Str { val: resolved_path }.to_ir())]
|
vec![ctx.new_expr(Str { val: resolved_path }.to_ir())]
|
||||||
@@ -239,7 +245,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
|
|||||||
let expr = self.expr().unwrap().downgrade(ctx)?;
|
let expr = self.expr().unwrap().downgrade(ctx)?;
|
||||||
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
|
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
|
||||||
let default = if let Some(default) = self.default_expr() {
|
let default = if let Some(default) = self.default_expr() {
|
||||||
Some(default.downgrade(ctx)?)
|
let default_expr = default.downgrade(ctx)?;
|
||||||
|
Some(ctx.new_expr(Ir::Thunk(default_expr)))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -121,20 +121,21 @@ pub fn downgrade_inherit(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let expr = if let Some(expr) = from {
|
let expr = if let Some(expr) = from {
|
||||||
ctx.new_expr(
|
let select_expr = ctx.new_expr(
|
||||||
Select {
|
Select {
|
||||||
expr,
|
expr,
|
||||||
attrpath: vec![Attr::Str(ident)],
|
attrpath: vec![Attr::Str(ident)],
|
||||||
default: None,
|
default: None,
|
||||||
}
|
}
|
||||||
.to_ir(),
|
.to_ir(),
|
||||||
)
|
);
|
||||||
|
ctx.new_expr(Ir::Thunk(select_expr))
|
||||||
} else {
|
} else {
|
||||||
ctx.lookup(ident)?
|
ctx.lookup(ident)?
|
||||||
};
|
};
|
||||||
match stcs.entry(ident) {
|
match stcs.entry(ident) {
|
||||||
Entry::Occupied(occupied) => {
|
Entry::Occupied(occupied) => {
|
||||||
return Err(Error::eval_error(format!(
|
return Err(Error::downgrade_error(format!(
|
||||||
"attribute '{}' already defined",
|
"attribute '{}' already defined",
|
||||||
format_symbol(ctx.get_sym(*occupied.key()))
|
format_symbol(ctx.get_sym(*occupied.key()))
|
||||||
)));
|
)));
|
||||||
@@ -282,7 +283,10 @@ where
|
|||||||
Some(param_syms.iter().copied().collect())
|
Some(param_syms.iter().copied().collect())
|
||||||
};
|
};
|
||||||
|
|
||||||
let (scc_info, body) = downgrade_bindings_generic(
|
// Get the owner from outer tracker's current_binding
|
||||||
|
let owner = ctx.get_current_binding();
|
||||||
|
|
||||||
|
let (scc_info, body) = downgrade_bindings_generic_with_owner(
|
||||||
ctx,
|
ctx,
|
||||||
binding_keys,
|
binding_keys,
|
||||||
|ctx, sym_to_slot| {
|
|ctx, sym_to_slot| {
|
||||||
@@ -317,6 +321,7 @@ where
|
|||||||
Ok(bindings)
|
Ok(bindings)
|
||||||
},
|
},
|
||||||
body_fn,
|
body_fn,
|
||||||
|
owner, // Pass the owner to track cross-scope dependencies
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(PatternBindings {
|
Ok(PatternBindings {
|
||||||
@@ -342,6 +347,21 @@ pub fn downgrade_bindings_generic<Ctx, B, F>(
|
|||||||
compute_bindings_fn: B,
|
compute_bindings_fn: B,
|
||||||
body_fn: F,
|
body_fn: F,
|
||||||
) -> Result<(SccInfo, ExprId)>
|
) -> Result<(SccInfo, ExprId)>
|
||||||
|
where
|
||||||
|
Ctx: DowngradeContext,
|
||||||
|
B: FnOnce(&mut Ctx, &HashMap<SymId, ExprId>) -> Result<HashMap<SymId, ExprId>>,
|
||||||
|
F: FnOnce(&mut Ctx, &[SymId]) -> Result<ExprId>,
|
||||||
|
{
|
||||||
|
downgrade_bindings_generic_with_owner(ctx, binding_keys, compute_bindings_fn, body_fn, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn downgrade_bindings_generic_with_owner<Ctx, B, F>(
|
||||||
|
ctx: &mut Ctx,
|
||||||
|
binding_keys: Vec<SymId>,
|
||||||
|
compute_bindings_fn: B,
|
||||||
|
body_fn: F,
|
||||||
|
owner: Option<ExprId>,
|
||||||
|
) -> Result<(SccInfo, ExprId)>
|
||||||
where
|
where
|
||||||
Ctx: DowngradeContext,
|
Ctx: DowngradeContext,
|
||||||
B: FnOnce(&mut Ctx, &HashMap<SymId, ExprId>) -> Result<HashMap<SymId, ExprId>>,
|
B: FnOnce(&mut Ctx, &HashMap<SymId, ExprId>) -> Result<HashMap<SymId, ExprId>>,
|
||||||
@@ -354,7 +374,11 @@ where
|
|||||||
.zip(slots.iter().copied())
|
.zip(slots.iter().copied())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
if let Some(owner_binding) = owner {
|
||||||
|
ctx.push_dep_tracker_with_owner(&slots, owner_binding);
|
||||||
|
} else {
|
||||||
ctx.push_dep_tracker(&slots);
|
ctx.push_dep_tracker(&slots);
|
||||||
|
}
|
||||||
|
|
||||||
ctx.with_let_scope(let_bindings.clone(), |ctx| {
|
ctx.with_let_scope(let_bindings.clone(), |ctx| {
|
||||||
let bindings = compute_bindings_fn(ctx, &let_bindings)?;
|
let bindings = compute_bindings_fn(ctx, &let_bindings)?;
|
||||||
@@ -413,9 +437,12 @@ where
|
|||||||
}
|
}
|
||||||
ast::Entry::AttrpathValue(value) => {
|
ast::Entry::AttrpathValue(value) => {
|
||||||
let attrpath = value.attrpath().unwrap();
|
let attrpath = value.attrpath().unwrap();
|
||||||
if let Some(first_attr) = attrpath.attrs().next()
|
let attrs_vec: Vec<_> = attrpath.attrs().collect();
|
||||||
&& let ast::Attr::Ident(ident) = first_attr
|
|
||||||
{
|
// Only check for duplicate definitions if this is a top-level binding (path length == 1)
|
||||||
|
// For nested paths (e.g., types.a, types.b), they will be merged into the same attrset
|
||||||
|
if attrs_vec.len() == 1 {
|
||||||
|
if let Some(ast::Attr::Ident(ident)) = attrs_vec.first() {
|
||||||
let sym = ctx.new_sym(ident.to_string());
|
let sym = ctx.new_sym(ident.to_string());
|
||||||
if !binding_syms.insert(sym) {
|
if !binding_syms.insert(sym) {
|
||||||
return Err(Error::downgrade_error(format!(
|
return Err(Error::downgrade_error(format!(
|
||||||
@@ -424,6 +451,13 @@ where
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if attrs_vec.len() > 1 {
|
||||||
|
// For nested paths, just record the first-level name without checking duplicates
|
||||||
|
if let Some(ast::Attr::Ident(ident)) = attrs_vec.first() {
|
||||||
|
let sym = ctx.new_sym(ident.to_string());
|
||||||
|
binding_syms.insert(sym);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
mod codegen;
|
mod codegen;
|
||||||
pub mod context;
|
pub mod context;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
mod fetcher;
|
||||||
mod ir;
|
mod ir;
|
||||||
mod nix_hash;
|
mod nix_hash;
|
||||||
mod runtime;
|
mod runtime;
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ pub(crate) trait RuntimeCtx: 'static {
|
|||||||
fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
|
fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
|
||||||
const ESM: &[ExtensionFileSource] =
|
const ESM: &[ExtensionFileSource] =
|
||||||
&deno_core::include_js_files!(nix_runtime dir "runtime-ts/dist", "runtime.js");
|
&deno_core::include_js_files!(nix_runtime dir "runtime-ts/dist", "runtime.js");
|
||||||
let ops = vec![
|
let mut ops = vec![
|
||||||
op_import::<Ctx>(),
|
op_import::<Ctx>(),
|
||||||
op_read_file(),
|
op_read_file(),
|
||||||
op_path_exists(),
|
op_path_exists(),
|
||||||
@@ -33,6 +33,7 @@ fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
|
|||||||
op_output_path_name(),
|
op_output_path_name(),
|
||||||
op_make_fixed_output_path(),
|
op_make_fixed_output_path(),
|
||||||
];
|
];
|
||||||
|
ops.extend(crate::fetcher::register_ops());
|
||||||
|
|
||||||
Extension {
|
Extension {
|
||||||
name: "nix_runtime",
|
name: "nix_runtime",
|
||||||
@@ -69,7 +70,7 @@ mod private {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
use private::NixError;
|
pub(crate) use private::NixError;
|
||||||
|
|
||||||
#[deno_core::op2]
|
#[deno_core::op2]
|
||||||
#[string]
|
#[string]
|
||||||
@@ -91,6 +92,8 @@ fn op_import<Ctx: RuntimeCtx>(
|
|||||||
let content = std::fs::read_to_string(&absolute_path)
|
let content = std::fs::read_to_string(&absolute_path)
|
||||||
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
|
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
eprintln!("[DEBUG] compiling file: {}", absolute_path.display());
|
||||||
let mut guard = ctx.push_path_stack(absolute_path);
|
let mut guard = ctx.push_path_stack(absolute_path);
|
||||||
let ctx = guard.deref_mut();
|
let ctx = guard.deref_mut();
|
||||||
|
|
||||||
@@ -220,7 +223,7 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
|
|||||||
let global_value = self
|
let global_value = self
|
||||||
.js_runtime
|
.js_runtime
|
||||||
.execute_script("<eval>", script)
|
.execute_script("<eval>", script)
|
||||||
.map_err(|e| Error::eval_error(format!("{}", e.get_message())))?;
|
.map_err(|e| Error::eval_error(format!("{}", e.get_message()), e.stack))?;
|
||||||
|
|
||||||
// Retrieve scope from JsRuntime
|
// Retrieve scope from JsRuntime
|
||||||
deno_core::scope!(scope, self.js_runtime);
|
deno_core::scope!(scope, self.js_runtime);
|
||||||
|
|||||||
@@ -99,3 +99,21 @@ fn logical_not() {
|
|||||||
assert_eq!(eval("!true"), Value::Bool(false));
|
assert_eq!(eval("!true"), Value::Bool(false));
|
||||||
assert_eq!(eval("!false"), Value::Bool(true));
|
assert_eq!(eval("!false"), Value::Bool(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn select_with_default_lazy_evaluation() {
|
||||||
|
assert_eq!(eval("{ a = 1; }.a or (1 / 0)"), Value::Int(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn select_with_default_nested_lazy() {
|
||||||
|
assert_eq!(
|
||||||
|
eval("{ a.b = 42; }.a.b or (builtins.abort \"should not evaluate\")"),
|
||||||
|
Value::Int(42)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn select_with_default_fallback() {
|
||||||
|
assert_eq!(eval("{ a = 1; }.b or 999"), Value::Int(999));
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user